From 6b0ac484ba081520f11794c782c6bbdca43df86f Mon Sep 17 00:00:00 2001 From: ace-pm Date: Wed, 15 Apr 2026 15:37:12 +0200 Subject: [PATCH] refactor: update log prefixes and string values from gsd- to sf- namespace Updates channel prefixes, log messages, comments, and configuration values across daemon, mcp-server, and related packages to complete the rebrand from gsd to sf-run naming. Co-Authored-By: Claude Opus 4.6 (1M context) --- bin/sf-from-source | 24 + ...8-sf-tools-over-mcp-for-provider-parity.md | 240 ++++++++++ packages/daemon/src/channel-manager.ts | 10 +- packages/daemon/src/cli.ts | 16 +- packages/daemon/src/commands.ts | 2 +- packages/daemon/src/config.ts | 6 +- packages/daemon/src/daemon.test.ts | 6 +- packages/daemon/src/discord-bot.test.ts | 58 +-- packages/daemon/src/discord-bot.ts | 46 +- packages/daemon/src/launchd.test.ts | 30 +- packages/daemon/src/launchd.ts | 6 +- packages/daemon/src/orchestrator.test.ts | 8 +- packages/daemon/src/orchestrator.ts | 10 +- packages/daemon/src/project-scanner.test.ts | 10 +- packages/daemon/src/project-scanner.ts | 2 +- packages/daemon/src/session-manager.test.ts | 8 +- packages/daemon/src/session-manager.ts | 10 +- packages/daemon/src/types.ts | 4 +- packages/mcp-server/src/cli.ts | 8 +- packages/mcp-server/src/mcp-server.test.ts | 64 +-- packages/mcp-server/src/readers/captures.ts | 4 +- .../mcp-server/src/readers/doctor-lite.ts | 4 +- packages/mcp-server/src/readers/graph.test.ts | 52 +- packages/mcp-server/src/readers/graph.ts | 6 +- packages/mcp-server/src/readers/knowledge.ts | 4 +- packages/mcp-server/src/readers/metrics.ts | 4 +- packages/mcp-server/src/readers/paths.ts | 20 +- .../mcp-server/src/readers/readers.test.ts | 78 +-- packages/mcp-server/src/readers/roadmap.ts | 18 +- packages/mcp-server/src/readers/state.ts | 14 +- packages/mcp-server/src/server.ts | 16 +- packages/mcp-server/src/session-manager.ts | 14 +- .../mcp-server/src/tool-credentials.test.ts | 8 +- packages/mcp-server/src/tool-credentials.ts | 2 +- packages/mcp-server/src/types.ts | 2 +- .../mcp-server/src/workflow-tools.test.ts | 54 +-- packages/mcp-server/src/workflow-tools.ts | 4 +- .../agent-session-print-mode-persist.test.ts | 10 +- .../pi-coding-agent/src/core/agent-session.ts | 4 +- .../src/core/lifecycle-hooks.test.ts | 4 +- .../src/core/lsp/defaults.json | 2 +- .../src/core/resource-loader.ts | 6 +- packages/pi-coding-agent/src/core/sdk.ts | 2 +- .../src/core/session-manager.test.ts | 4 +- packages/pi-coding-agent/src/core/skills.ts | 4 +- .../core/tools/spawn-shell-windows.test.ts | 6 +- packages/pi-coding-agent/src/main.ts | 2 +- .../__tests__/tool-execution.test.ts | 4 +- .../controllers/input-controller.test.ts | 8 +- .../controllers/input-controller.ts | 4 +- .../extensions/memory/storage.test.ts | 2 +- .../src/tests/path-display.test.ts | 8 +- .../pi-coding-agent/src/utils/proxy-server.ts | 202 ++++++++ packages/rpc-client/src/rpc-client.test.ts | 2 +- scripts/base64-scan.sh | 2 +- scripts/build-web-if-stale.cjs | 2 +- scripts/compile-tests.mjs | 4 +- scripts/dev-cli.js | 2 +- scripts/dist-test-resolve.mjs | 2 +- scripts/install-hooks.mjs | 4 +- scripts/install-hooks.sh | 4 +- scripts/parallel-monitor.mjs | 36 +- scripts/postinstall.js | 2 +- scripts/pr-risk-check.mjs | 2 +- scripts/recover-gsd-1364.ps1 | 156 +++--- scripts/recover-gsd-1364.sh | 164 +++---- scripts/recover-gsd-1668.ps1 | 8 +- scripts/recover-gsd-1668.sh | 8 +- scripts/recover-sf-1364.sh | 386 +++++++++++++++ scripts/recover-sf-1668.sh | 446 ++++++++++++++++++ scripts/rtk-benchmark.mjs | 6 +- scripts/secret-scan.mjs | 2 +- scripts/secret-scan.sh | 2 +- scripts/sync-pkg-version.cjs | 2 +- scripts/validate-pack.js | 12 +- scripts/validate-pack.sh | 10 +- scripts/verify-s04.sh | 40 +- scripts/watch-resources.js | 4 +- sf-orchestrator/SKILL.md | 215 +++++++++ .../references/answer-injection.md | 119 +++++ sf-orchestrator/references/commands.md | 210 +++++++++ sf-orchestrator/references/json-result.md | 162 +++++++ sf-orchestrator/templates/spec.md | 20 + sf-orchestrator/workflows/build-from-spec.md | 184 ++++++++ sf-orchestrator/workflows/monitor-and-poll.md | 187 ++++++++ sf-orchestrator/workflows/step-by-step.md | 156 ++++++ src/cli-web-branch.ts | 16 +- src/cli.ts | 14 +- src/extension-registry.ts | 2 +- src/headless-context.ts | 6 +- src/headless-query.ts | 12 +- src/headless.ts | 32 +- src/help-text.ts | 102 ++-- src/loader.ts | 24 +- src/mcp-server.ts | 2 +- src/models-resolver.ts | 6 +- src/onboarding.ts | 12 +- src/resource-loader.ts | 36 +- src/resources/extensions/aws-auth/index.ts | 6 +- .../extensions/bg-shell/utilities.ts | 4 +- .../browser-tools/tools/state-persistence.ts | 4 +- .../browser-tools/tools/visual-diff.ts | 4 +- .../claude-code-cli/partial-builder.ts | 2 +- .../claude-code-cli/stream-adapter.ts | 2 +- .../tests/partial-builder.test.ts | 6 +- .../tests/stream-adapter.test.ts | 30 +- src/resources/extensions/cmux/index.ts | 38 +- .../genai-proxy/extension-manifest.json | 13 + src/resources/extensions/genai-proxy/index.ts | 14 + .../extensions/genai-proxy/proxy-command.ts | 50 ++ .../extensions/genai-proxy/proxy-server.ts | 282 +++++++++++ .../extensions/get-secrets-from-user.ts | 8 +- .../extensions/github-sync/mapping.ts | 6 +- src/resources/extensions/github-sync/sync.ts | 16 +- .../github-sync/tests/commit-linking.test.ts | 2 +- .../github-sync/tests/mapping.test.ts | 4 +- src/resources/extensions/github-sync/types.ts | 2 +- src/resources/extensions/mcp-client/auth.ts | 2 +- src/resources/extensions/mcp-client/index.ts | 12 +- .../extensions/remote-questions/config.ts | 8 +- .../remote-questions/remote-command.ts | 30 +- .../extensions/remote-questions/status.ts | 2 +- .../extensions/remote-questions/store.ts | 2 +- .../search-the-web/native-search.ts | 2 +- .../extensions/search-the-web/provider.ts | 6 +- src/resources/extensions/sf/activity-log.ts | 2 +- .../extensions/sf/auto-artifact-paths.ts | 2 +- src/resources/extensions/sf/auto-dispatch.ts | 4 +- src/resources/extensions/sf/auto-post-unit.ts | 2 +- src/resources/extensions/sf/auto-prompts.ts | 16 +- src/resources/extensions/sf/auto-recovery.ts | 32 +- src/resources/extensions/sf/auto-start.ts | 40 +- src/resources/extensions/sf/auto-worktree.ts | 122 ++--- src/resources/extensions/sf/auto.ts | 8 +- src/resources/extensions/sf/auto/loop-deps.ts | 3 - src/resources/extensions/sf/auto/phases.ts | 2 +- .../extensions/sf/bootstrap/crash-log.ts | 6 +- .../extensions/sf/bootstrap/db-tools.ts | 4 +- .../extensions/sf/bootstrap/dynamic-tools.ts | 52 +- .../extensions/sf/bootstrap/query-tools.ts | 2 +- .../extensions/sf/bootstrap/register-hooks.ts | 2 +- .../sf/bootstrap/register-shortcuts.ts | 6 +- .../extensions/sf/bootstrap/system-context.ts | 20 +- .../extensions/sf/bootstrap/write-gate.ts | 14 +- src/resources/extensions/sf/cache.ts | 4 +- src/resources/extensions/sf/captures.ts | 26 +- .../extensions/sf/codebase-generator.ts | 6 +- .../extensions/sf/commands-backlog.ts | 2 +- .../extensions/sf/commands-bootstrap.ts | 2 +- src/resources/extensions/sf/commands-cmux.ts | 4 +- .../extensions/sf/commands-codebase.ts | 2 +- .../extensions/sf/commands-config.ts | 4 +- .../extensions/sf/commands-extensions.ts | 4 +- .../extensions/sf/commands-handlers.ts | 8 +- src/resources/extensions/sf/commands-logs.ts | 6 +- .../extensions/sf/commands-maintenance.ts | 4 +- .../extensions/sf/commands-mcp-status.ts | 4 +- .../extensions/sf/commands-pr-branch.ts | 14 +- .../extensions/sf/commands-prefs-wizard.ts | 6 +- .../extensions/sf/commands/catalog.ts | 16 +- .../extensions/sf/commands/handlers/core.ts | 8 +- .../sf/commands/handlers/workflow.ts | 2 +- src/resources/extensions/sf/db-writer.ts | 10 +- .../extensions/sf/definition-loader.ts | 2 +- src/resources/extensions/sf/detection.ts | 10 +- .../extensions/sf/doctor-engine-checks.ts | 6 +- .../extensions/sf/doctor-environment.ts | 8 +- .../extensions/sf/doctor-git-checks.ts | 16 +- .../extensions/sf/doctor-global-checks.ts | 2 +- .../extensions/sf/doctor-runtime-checks.ts | 64 +-- src/resources/extensions/sf/export.ts | 2 +- src/resources/extensions/sf/forensics.ts | 12 +- src/resources/extensions/sf/git-self-heal.ts | 2 +- src/resources/extensions/sf/git-service.ts | 64 +-- src/resources/extensions/sf/gitignore.ts | 90 ++-- src/resources/extensions/sf/graph-context.ts | 4 +- .../extensions/sf/guided-flow-queue.ts | 8 +- src/resources/extensions/sf/guided-flow.ts | 32 +- src/resources/extensions/sf/init-wizard.ts | 18 +- src/resources/extensions/sf/journal.ts | 2 +- src/resources/extensions/sf/key-manager.ts | 2 +- .../sf/learning/fallback-chain-writer.mjs | 16 +- .../learning/fallback-chain-writer.test.mjs | 14 +- .../extensions/sf/learning/index.mjs | 8 +- .../extensions/sf/markdown-renderer.ts | 4 +- src/resources/extensions/sf/md-importer.ts | 8 +- src/resources/extensions/sf/metrics.ts | 2 +- .../extensions/sf/migrate-external.ts | 60 +-- .../extensions/sf/migrate/command.ts | 20 +- src/resources/extensions/sf/migrate/writer.ts | 4 +- src/resources/extensions/sf/milestone-ids.ts | 2 +- .../extensions/sf/native-git-bridge.ts | 4 +- .../extensions/sf/native-parser-bridge.ts | 4 +- .../extensions/sf/notification-store.ts | 12 +- src/resources/extensions/sf/parallel-merge.ts | 8 +- .../extensions/sf/parallel-monitor-overlay.ts | 16 +- .../extensions/sf/parallel-orchestrator.ts | 12 +- src/resources/extensions/sf/paths.ts | 56 +-- .../extensions/sf/preferences-models.ts | 4 +- .../extensions/sf/preferences-skills.ts | 4 +- .../extensions/sf/preferences-validation.ts | 2 +- src/resources/extensions/sf/preferences.ts | 2 +- src/resources/extensions/sf/preparation.ts | 6 +- src/resources/extensions/sf/prompt-loader.ts | 8 +- src/resources/extensions/sf/queue-order.ts | 2 +- src/resources/extensions/sf/quick.ts | 8 +- src/resources/extensions/sf/reactive-graph.ts | 2 +- src/resources/extensions/sf/repo-identity.ts | 104 ++-- src/resources/extensions/sf/reports.ts | 6 +- src/resources/extensions/sf/rethink.ts | 4 +- src/resources/extensions/sf/rule-registry.ts | 12 +- src/resources/extensions/sf/run-manager.ts | 14 +- .../sf/safety/file-change-validator.ts | 4 +- .../sf/safety/gemini-permissions.ts | 32 ++ src/resources/extensions/sf/service-tier.ts | 2 +- src/resources/extensions/sf/session-lock.ts | 30 +- .../extensions/sf/session-status-io.ts | 2 +- src/resources/extensions/sf/sf-db.ts | 4 +- src/resources/extensions/sf/skill-health.ts | 2 +- .../extensions/sf/skill-telemetry.ts | 4 +- .../extensions/sf/slice-parallel-conflict.ts | 4 +- src/resources/extensions/sf/state.ts | 4 +- src/resources/extensions/sf/sync-lock.ts | 4 +- .../extensions/sf/tests/activity-log.test.ts | 2 +- .../sf/tests/auto-dashboard.test.ts | 8 +- .../sf/tests/auto-lock-creation.test.ts | 40 +- .../sf/tests/auto-model-selection.test.ts | 12 +- .../auto-paused-session-validation.test.ts | 14 +- .../extensions/sf/tests/auto-recovery.test.ts | 78 +-- .../tests/auto-stale-lock-self-kill.test.ts | 10 +- .../sf/tests/auto-start-model-capture.test.ts | 2 +- .../tests/auto-start-needs-discussion.test.ts | 8 +- .../tests/auto-start-worktree-db-path.test.ts | 2 +- .../sf/tests/auto-supervisor.test.mjs | 2 +- .../tests/auto-worktree-auto-resolve.test.ts | 12 +- .../sf/tests/block-db-writes.test.ts | 34 +- .../tests/cache-staleness-regression.test.ts | 12 +- .../extensions/sf/tests/captures.test.ts | 16 +- .../sf/tests/claude-import-tui.test.ts | 2 +- .../extensions/sf/tests/cmux.test.ts | 6 +- .../sf/tests/codebase-generator.test.ts | 12 +- .../sf/tests/collect-from-manifest.test.ts | 2 +- .../sf/tests/commands-backlog.test.ts | 4 +- .../tests/commands-extract-learnings.test.ts | 28 +- .../sf/tests/commands-inspect-open-db.test.ts | 2 +- .../extensions/sf/tests/commands-logs.test.ts | 12 +- .../sf/tests/commands-pr-branch.test.ts | 22 +- .../sf/tests/commands-workflow-custom.test.ts | 4 +- .../sf/tests/complete-milestone.test.ts | 28 +- .../tests/complete-slice-gate-closure.test.ts | 4 +- .../complete-slice-string-coercion.test.ts | 4 +- .../sf/tests/complete-slice.test.ts | 8 +- .../complete-task-rollback-evidence.test.ts | 10 +- .../extensions/sf/tests/complete-task.test.ts | 8 +- .../completed-units-metrics-sync.test.ts | 2 +- .../extensions/sf/tests/context-store.test.ts | 6 +- .../copy-planning-artifacts-samepath.test.ts | 4 +- .../sf/tests/crash-handler-secondary.test.ts | 6 +- .../sf/tests/crash-recovery.test.ts | 14 +- .../sf/tests/db-access-guardrails.test.ts | 18 +- .../sf/tests/db-path-worktree-symlink.test.ts | 40 +- .../extensions/sf/tests/db-writer.test.ts | 34 +- .../extensions/sf/tests/debug-logger.test.ts | 6 +- .../sf/tests/defer-milestone-stamp.test.ts | 2 +- .../sf/tests/deferred-slice-dispatch.test.ts | 4 +- .../sf/tests/derive-state-crossval.test.ts | 4 +- .../derive-state-db-disk-reconcile.test.ts | 8 +- .../sf/tests/derive-state-db.test.ts | 20 +- .../sf/tests/derive-state-deps.test.ts | 16 +- .../sf/tests/derive-state-draft.test.ts | 24 +- .../sf/tests/derive-state-helpers.test.ts | 8 +- .../extensions/sf/tests/derive-state.test.ts | 40 +- .../extensions/sf/tests/detection.test.ts | 20 +- .../sf/tests/dev-engine-wrapper.test.ts | 4 +- .../tests/discuss-queued-milestones.test.ts | 10 +- .../sf/tests/dispatch-guard.test.ts | 70 +-- .../tests/dispatch-missing-task-plans.test.ts | 4 +- .../tests/dispatch-uat-last-completed.test.ts | 4 +- .../sf/tests/doctor-providers.test.ts | 30 +- .../tests/doctor-scope-db-unavailable.test.ts | 4 +- .../sf/tests/draft-promotion.test.ts | 6 +- .../sf/tests/ensure-db-open.test.ts | 38 +- .../sf/tests/export-html-all.test.ts | 2 +- .../extensions/sf/tests/flag-file-db.test.ts | 4 +- .../tests/forensics-context-persist.test.ts | 16 +- .../sf/tests/freeform-decisions.test.ts | 14 +- .../extensions/sf/tests/gate-dispatch.test.ts | 6 +- .../extensions/sf/tests/graph-context.test.ts | 10 +- .../guided-flow-session-isolation.test.ts | 2 +- .../tests/guided-flow-state-rebuild.test.ts | 4 +- .../sf/tests/headless-query.test.ts | 14 +- .../extensions/sf/tests/health-widget.test.ts | 14 +- .../extensions/sf/tests/init-wizard.test.ts | 28 +- .../sf/tests/integration-edge.test.ts | 6 +- .../all-milestones-complete-merge.test.ts | 12 +- .../integration/atomic-task-closeout.test.ts | 2 +- .../tests/integration/auto-preflight.test.ts | 2 +- .../tests/integration/auto-recovery.test.ts | 76 +-- .../integration/auto-secrets-gate.test.ts | 10 +- .../integration/auto-stash-merge.test.ts | 4 +- .../auto-worktree-milestone-merge.test.ts | 56 +-- .../tests/integration/auto-worktree.test.ts | 42 +- .../doctor-completion-deferral.test.ts | 6 +- .../integration/doctor-delimiter-fix.test.ts | 4 +- .../integration/doctor-enhancements.test.ts | 2 +- .../doctor-environment-worktree.test.ts | 12 +- .../doctor-false-positives.test.ts | 12 +- .../tests/integration/doctor-fixlevel.test.ts | 14 +- .../sf/tests/integration/doctor-git.test.ts | 110 ++--- .../integration/doctor-proactive.test.ts | 28 +- .../doctor-roadmap-summary-atomicity.test.ts | 10 +- .../tests/integration/doctor-runtime.test.ts | 60 +-- .../sf/tests/integration/doctor.test.ts | 28 +- .../e2e-workflow-pipeline-integration.test.ts | 2 +- ...ature-branch-lifecycle-integration.test.ts | 40 +- .../sf/tests/integration/git-locale.test.ts | 8 +- .../sf/tests/integration/git-service.test.ts | 256 +++++----- .../gitignore-staging-2570.test.ts | 50 +- .../integration/gitignore-tracked-sf.test.ts | 114 ++--- .../sf/tests/integration/headless-command.ts | 40 +- .../tests/integration/idle-recovery.test.ts | 36 +- .../inherited-repo-home-dir.test.ts | 64 +-- .../integration/integration-lifecycle.test.ts | 4 +- .../integration-mixed-milestones.test.ts | 16 +- .../integration/integration-proof.test.ts | 26 +- .../integration/merge-cwd-restore.test.ts | 10 +- .../tests/integration/migrate-command.test.ts | 16 +- .../milestone-transition-worktree.test.ts | 4 +- .../tests/integration/parallel-merge.test.ts | 18 +- ...rallel-workers-multi-milestone-e2e.test.ts | 10 +- .../sf/tests/integration/paths.test.ts | 32 +- .../queue-completed-milestone-perf.test.ts | 2 +- .../integration/queue-reorder-e2e.test.ts | 20 +- .../quick-branch-lifecycle.test.ts | 8 +- .../sf/tests/integration/run-uat.test.ts | 26 +- .../state-machine-edge-cases.test.ts | 102 ++-- .../state-machine-live-validation.test.ts | 92 ++-- .../state-machine-runtime-failures.test.ts | 40 +- .../tests/integration/token-savings.test.ts | 32 +- .../sf/tests/integration/worktree-e2e.test.ts | 12 +- .../sf/tests/interrupted-session-auto.test.ts | 10 +- .../sf/tests/interrupted-session-ui.test.ts | 10 +- .../sf/tests/journal-integration.test.ts | 2 +- .../sf/tests/journal-query-tool.test.ts | 2 +- .../extensions/sf/tests/journal.test.ts | 18 +- .../extensions/sf/tests/knowledge.test.ts | 32 +- .../sf/tests/manifest-status.test.ts | 8 +- .../sf/tests/markdown-renderer.test.ts | 88 ++-- .../sf/tests/mcp-project-config.test.ts | 6 +- .../extensions/sf/tests/md-importer.test.ts | 12 +- .../extensions/sf/tests/measurement.test.ts | 2 +- .../sf/tests/memory-leak-guards.test.ts | 4 +- .../tests/merge-conflict-stops-loop.test.ts | 2 +- .../extensions/sf/tests/metrics.test.ts | 22 +- .../tests/migrate-external-worktree.test.ts | 26 +- .../sf/tests/migrate-hierarchy.test.ts | 6 +- .../tests/migrate-writer-integration.test.ts | 8 +- .../sf/tests/milestone-status-tool.test.ts | 6 +- ...milestone-transition-state-rebuild.test.ts | 4 +- .../sf/tests/none-mode-gates.test.ts | 16 +- .../sf/tests/notification-store.test.ts | 10 +- .../sf/tests/notification-widget.test.ts | 2 +- .../sf/tests/notifications-handler.test.ts | 2 +- .../sf/tests/orphaned-worktree-audit.test.ts | 10 +- .../extensions/sf/tests/overrides.test.ts | 10 +- .../tests/parallel-budget-atomicity.test.ts | 6 +- .../sf/tests/parallel-commit-scope.test.ts | 40 +- .../sf/tests/parallel-crash-recovery.test.ts | 6 +- .../tests/parallel-eligibility-ghost.test.ts | 8 +- .../sf/tests/parallel-orchestration.test.ts | 4 +- ...rallel-orchestrator-zombie-cleanup.test.ts | 10 +- .../tests/parallel-research-dispatch.test.ts | 2 +- .../parallel-worker-lock-contention.test.ts | 26 +- .../tests/parallel-worker-monitoring.test.ts | 8 +- .../extensions/sf/tests/park-db-sync.test.ts | 4 +- .../sf/tests/park-edge-cases.test.ts | 8 +- .../sf/tests/park-milestone.test.ts | 12 +- .../extensions/sf/tests/phase-anchor.test.ts | 4 +- ...an-milestone-artifact-verification.test.ts | 4 +- .../plan-milestone-queue-context.test.ts | 6 +- .../sf/tests/plan-milestone.test.ts | 26 +- .../sf/tests/plan-slice-prompt.test.ts | 60 +-- .../extensions/sf/tests/plan-slice.test.ts | 22 +- .../extensions/sf/tests/plan-task.test.ts | 18 +- .../sf/tests/planning-crossval.test.ts | 12 +- .../sf/tests/post-exec-retry-bypass.test.ts | 4 +- .../sf/tests/post-mutation-hook.test.ts | 12 +- .../sf/tests/post-unit-hooks.test.ts | 16 +- .../tests/pre-execution-fail-closed.test.ts | 4 +- .../tests/pre-execution-pause-wiring.test.ts | 8 +- .../sf/tests/preferences-formatting.test.ts | 4 +- .../tests/preferences-worktree-sync.test.ts | 8 +- .../extensions/sf/tests/preferences.test.ts | 4 +- .../preflight-context-draft-filter.test.ts | 2 +- .../tests/project-relocation-recovery.test.ts | 18 +- .../tests/prompt-budget-enforcement.test.ts | 4 +- .../extensions/sf/tests/prompt-db.test.ts | 22 +- .../sf/tests/prompt-ordering.test.ts | 2 +- .../sf/tests/queue-draft-detection.test.ts | 2 +- .../sf/tests/queue-execution-guard.test.ts | 36 +- .../extensions/sf/tests/queue-order.test.ts | 10 +- .../sf/tests/reactive-executor.test.ts | 34 +- .../sf/tests/reassess-detection.test.ts | 10 +- .../sf/tests/reassess-handler.test.ts | 38 +- .../sf/tests/reassess-prompt.test.ts | 18 +- .../tests/reconciliation-edge-cases.test.ts | 6 +- .../sf/tests/recovery-attempts-reset.test.ts | 2 +- .../sf/tests/regex-hardening.test.ts | 4 +- .../sf/tests/register-shortcuts.test.ts | 10 +- .../remediation-completion-guard.test.ts | 8 +- .../extensions/sf/tests/remote-status.test.ts | 2 +- .../extensions/sf/tests/reopen-slice.test.ts | 16 +- .../extensions/sf/tests/reopen-task.test.ts | 18 +- .../sf/tests/replan-handler.test.ts | 26 +- .../extensions/sf/tests/replan-slice.test.ts | 28 +- .../sf/tests/repo-identity-worktree.test.ts | 60 +-- .../extensions/sf/tests/requirements.test.ts | 2 +- .../tests/resource-loader-import-path.test.ts | 6 +- .../tests/retry-diagnostic-reasoning.test.ts | 4 +- .../sf/tests/retry-state-reset.test.ts | 28 +- .../sf/tests/rewrite-count-persist.test.ts | 12 +- .../sf/tests/rogue-file-detection.test.ts | 48 +- .../sf/tests/routing-history.test.ts | 2 +- .../extensions/sf/tests/run-manager.test.ts | 6 +- .../sf/tests/secure-env-collect.test.ts | 2 +- .../sf/tests/session-lock-multipath.test.ts | 18 +- .../sf/tests/session-lock-regression.test.ts | 36 +- .../tests/session-lock-transient-read.test.ts | 12 +- .../extensions/sf/tests/sf-recover.test.ts | 4 +- .../extensions/sf/tests/sf-tools.test.ts | 34 +- .../tests/sfroot-worktree-detection.test.ts | 62 +-- .../extensions/sf/tests/shared-wal.test.ts | 26 +- .../sf/tests/single-writer-invariant.test.ts | 4 +- .../sf/tests/slice-disk-reconcile.test.ts | 10 +- .../sf/tests/slice-parallel-conflict.test.ts | 4 +- .../sf/tests/smart-entry-complete.test.ts | 2 +- .../sf/tests/smart-entry-draft.test.ts | 2 +- .../sf/tests/stale-queued-milestone.test.ts | 4 +- .../sf/tests/stale-worktree-cwd.test.ts | 20 +- .../sf/tests/stalled-tool-recovery.test.ts | 6 +- .../sf/tests/stash-pop-sf-conflict.test.ts | 30 +- .../tests/stash-queued-context-files.test.ts | 36 +- .../sf/tests/state-corruption-2945.test.ts | 2 +- .../sf/tests/state-derivation-parity.test.ts | 18 +- .../state-machine-full-walkthrough.test.ts | 80 ++-- .../sf/tests/steer-worktree-path.test.ts | 38 +- .../sf/tests/stop-auto-remote.test.ts | 10 +- .../sf/tests/stop-backtrack.test.ts | 12 +- .../sf/tests/stuck-detection-coverage.test.ts | 4 +- .../sf/tests/subagent-agent-discovery.test.ts | 10 +- .../sf/tests/subagent-model-dispatch.test.ts | 4 +- .../tests/symlink-extension-discovery.test.ts | 2 +- .../tests/symlink-numbered-variants.test.ts | 86 ++-- .../extensions/sf/tests/sync-lock.test.ts | 24 +- .../extensions/sf/tests/test-utils.ts | 4 +- .../sf/tests/triage-resolution.test.ts | 32 +- .../uat-stuck-loop-orphaned-worktree.test.ts | 24 +- .../extensions/sf/tests/undo.test.ts | 32 +- .../extensions/sf/tests/unit-runtime.test.ts | 42 +- .../sf/tests/uok-audit-unified.test.ts | 2 +- .../sf/tests/uok-model-policy.test.ts | 4 +- .../sf/tests/uok-plan-v2-wiring.test.ts | 6 +- .../sf/tests/vacuous-truth-slices.test.ts | 10 +- .../sf/tests/validate-directory.test.ts | 4 +- .../validate-milestone-stuck-guard.test.ts | 6 +- .../validate-milestone-write-order.test.ts | 16 +- .../sf/tests/validate-milestone.test.ts | 26 +- .../wave5-consistency-regressions.test.ts | 2 +- .../tests/windows-path-normalization.test.ts | 8 +- .../sf/tests/workflow-events.test.ts | 22 +- .../sf/tests/workflow-logger-audit.test.ts | 4 +- .../sf/tests/workflow-logger.test.ts | 6 +- .../sf/tests/workflow-manifest.test.ts | 6 +- .../extensions/sf/tests/workflow-mcp.test.ts | 8 +- .../sf/tests/workflow-reconcile.test.ts | 4 +- .../sf/tests/workflow-tool-executors.test.ts | 10 +- .../sf/tests/workspace-index.test.ts | 2 +- .../sf/tests/worktree-bugfix.test.ts | 10 +- .../sf/tests/worktree-db-integration.test.ts | 10 +- .../worktree-db-respawn-truncation.test.ts | 38 +- .../sf/tests/worktree-db-same-file.test.ts | 10 +- .../sf/tests/worktree-health.test.ts | 30 +- .../sf/tests/worktree-integration.test.ts | 10 +- .../sf/tests/worktree-journal-events.test.ts | 12 +- .../sf/tests/worktree-manager.test.ts | 14 +- .../tests/worktree-nested-git-safety.test.ts | 4 +- .../tests/worktree-post-create-hook.test.ts | 10 +- .../tests/worktree-preferences-sync.test.ts | 32 +- .../sf/tests/worktree-resolver.test.ts | 74 +-- .../sf/tests/worktree-symlink-removal.test.ts | 28 +- .../sf/tests/worktree-sync-milestones.test.ts | 180 +++---- .../worktree-sync-overwrite-loop.test.ts | 24 +- .../sf/tests/worktree-sync-tasks.test.ts | 86 ++-- .../sf/tests/worktree-teardown-safety.test.ts | 24 +- .../extensions/sf/tests/worktree.test.ts | 40 +- .../extensions/sf/tests/write-gate.test.ts | 24 +- .../sf/tests/write-intercept.test.ts | 46 +- .../sf/tests/zombie-sf-state.test.ts | 16 +- .../extensions/sf/tools/complete-milestone.ts | 2 +- .../extensions/sf/tools/complete-slice.ts | 2 +- .../extensions/sf/tools/complete-task.ts | 2 +- .../extensions/sf/tools/reassess-roadmap.ts | 4 +- .../extensions/sf/tools/validate-milestone.ts | 2 +- .../extensions/sf/triage-resolution.ts | 6 +- src/resources/extensions/sf/unit-ownership.ts | 8 +- .../extensions/sf/validate-directory.ts | 4 +- .../extensions/sf/watch/header-renderer.ts | 4 +- .../extensions/sf/workflow-events.ts | 12 +- .../extensions/sf/workflow-logger.ts | 8 +- .../extensions/sf/workflow-manifest.ts | 6 +- .../extensions/sf/workflow-migration.ts | 18 +- .../extensions/sf/workflow-projections.ts | 18 +- .../extensions/sf/workflow-reconcile.ts | 28 +- .../extensions/sf/workflow-templates.ts | 6 +- .../sf/workflow-templates/registry.json | 14 +- .../extensions/sf/worktree-command.ts | 16 +- .../extensions/sf/worktree-health.ts | 2 +- .../extensions/sf/worktree-manager.ts | 84 ++-- .../extensions/sf/worktree-resolver.ts | 8 +- src/resources/extensions/sf/worktree.ts | 32 +- .../extensions/sf/write-intercept.ts | 24 +- .../extensions/shared/interview-ui.ts | 2 +- .../extensions/shared/next-action-ui.ts | 8 +- .../extensions/shared/rtk-session-stats.ts | 4 +- src/resources/extensions/shared/rtk.ts | 2 +- .../extensions/slash-commands/audit.ts | 10 +- .../slash-commands/create-extension.ts | 40 +- .../slash-commands/create-slash-command.ts | 4 +- src/resources/extensions/subagent/agents.ts | 2 +- src/resources/extensions/subagent/index.ts | 10 +- .../extensions/subagent/isolation.ts | 10 +- src/resources/extensions/ttsr/rule-loader.ts | 6 +- src/resources/extensions/ttsr/ttsr-manager.ts | 2 +- src/resources/extensions/voice/index.ts | 2 +- src/resources/extensions/voice/linux-ready.ts | 4 +- .../extensions/voice/speech-recognizer.py | 8 +- .../voice/tests/linux-ready.test.ts | 2 +- src/rtk.ts | 2 +- src/tests/app-smoke.test.ts | 80 ++-- src/tests/auto-mode-piped.test.ts | 12 +- src/tests/auto-piped-io.test.ts | 14 +- src/tests/auto-resume-resource-loader.test.ts | 2 +- src/tests/bg-shell-persistence-cwd.test.ts | 10 +- src/tests/bridge-package-root.test.ts | 2 +- src/tests/bundled-extension-paths.test.ts | 6 +- .../cli-onboarding-custom-provider.test.ts | 4 +- src/tests/create-gsd-extension-paths.test.ts | 22 +- src/tests/docker-template.test.ts | 4 +- src/tests/ensure-workspace-builds.test.ts | 4 +- src/tests/extension-load-perf.test.ts | 2 +- src/tests/gsd-web-launcher-contract.test.ts | 8 +- src/tests/headless-cli-surface.test.ts | 48 +- src/tests/headless-detection.test.ts | 2 +- src/tests/headless-events.test.ts | 12 +- src/tests/headless-progress.test.ts | 2 +- .../headless-query-extension-path.test.ts | 4 +- src/tests/headless-v2-migration.test.ts | 4 +- src/tests/initial-gsd-header-filter.test.ts | 2 +- src/tests/integration/e2e-headless.test.ts | 22 +- src/tests/integration/e2e-smoke.test.ts | 104 ++-- src/tests/integration/pack-install.test.ts | 40 +- src/tests/integration/web-auth-token.test.ts | 2 +- ...auto-dashboard-lock-reconciliation.test.ts | 6 +- src/tests/integration/web-boot-node24.test.ts | 6 +- .../integration/web-bridge-contract.test.ts | 12 +- .../web-bridge-terminal-contract.test.ts | 2 +- src/tests/integration/web-cli-entry.test.ts | 8 +- .../web-command-parity-contract.test.ts | 128 ++--- .../web-continuity-contract.test.ts | 2 +- .../web-dashboard-rtk-contract.test.ts | 2 +- .../web-diagnostics-contract.test.ts | 44 +- .../web-live-interaction-contract.test.ts | 12 +- .../web-live-state-contract.test.ts | 16 +- .../integration/web-mode-assembled.test.ts | 24 +- src/tests/integration/web-mode-cli.test.ts | 82 ++-- .../web-mode-network-flags.test.ts | 14 +- .../integration/web-mode-onboarding.test.ts | 16 +- .../integration/web-mode-runtime-fixtures.ts | 14 +- .../integration/web-mode-runtime-harness.ts | 12 +- .../integration/web-mode-windows-hide.test.ts | 12 +- .../web-multi-project-contract.test.ts | 12 +- .../web-onboarding-contract.test.ts | 12 +- .../web-onboarding-presentation.test.ts | 2 +- .../web-project-discovery-contract.test.ts | 24 +- .../web-project-tab-preservation.test.ts | 4 +- src/tests/integration/web-project-url.test.ts | 6 +- .../web-recovery-diagnostics-contract.test.ts | 8 +- src/tests/integration/web-responsive.test.ts | 32 +- .../web-session-parity-contract.test.ts | 32 +- .../web-state-surfaces-contract.test.ts | 106 ++--- .../web-subprocess-module-resolution.test.ts | 6 +- .../integration/web-switch-project.test.ts | 2 +- .../web-terminal-preservation.test.ts | 8 +- .../web-workflow-action-execution.test.ts | 8 +- .../web-workflow-controls-contract.test.ts | 42 +- src/tests/mcp-client-oauth.test.ts | 2 +- src/tests/node-modules-symlink.test.ts | 66 +-- src/tests/parse-cli-args.test.ts | 2 +- src/tests/provider.test.ts | 2 +- src/tests/resource-loader-conflicts.test.ts | 18 +- src/tests/resource-loader.test.ts | 22 +- src/tests/resource-sync-staleness.test.ts | 4 +- src/tests/rtk-execution-seams.test.ts | 2 +- src/tests/rtk-session-stats.test.ts | 18 +- src/tests/rtk-test-utils.ts | 2 +- src/tests/rtk.test.ts | 10 +- src/tests/search-provider-command.test.ts | 2 +- src/tests/tool-bootstrap.test.ts | 6 +- src/tests/ttsr-rule-loader.test.ts | 12 +- src/tests/tui-content-cursor-desync.test.ts | 4 +- src/tests/tui-non-tty-render-loop.test.ts | 2 +- src/tests/update-check.test.ts | 26 +- src/tests/update-cmd-diagnostics.test.ts | 14 +- src/tests/welcome-screen.test.ts | 2 +- src/tests/windows-portability.test.ts | 6 +- src/tests/xterm-theme.test.ts | 4 +- src/update-check.ts | 4 +- src/web-mode.ts | 4 +- src/web/auto-dashboard-service.ts | 10 +- src/web/bridge-service.ts | 14 +- src/web/captures-service.ts | 2 +- src/web/cleanup-service.ts | 8 +- src/web/cli-entry.ts | 2 +- src/web/doctor-service.ts | 2 +- src/web/export-service.ts | 4 +- src/web/forensics-service.ts | 2 +- src/web/history-service.ts | 2 +- src/web/hooks-service.ts | 4 +- src/web/inspect-service.ts | 8 +- src/web/knowledge-service.ts | 2 +- src/web/notifications-service.ts | 2 +- src/web/project-discovery-service.ts | 6 +- src/web/recovery-diagnostics-service.ts | 16 +- src/web/safe-import-meta-resolve.ts | 2 +- src/web/settings-service.ts | 2 +- src/web/skill-health-service.ts | 2 +- src/web/undo-service.ts | 4 +- src/web/visualizer-service.ts | 2 +- src/welcome-screen.ts | 2 +- src/worktree-cli.ts | 32 +- web/app/api/browse-directories/route.ts | 2 +- web/app/api/dev-mode/route.ts | 2 +- web/app/api/experimental/route.ts | 2 +- web/app/api/files/route.ts | 44 +- web/app/api/remote-questions/route.ts | 4 +- web/app/api/steer/route.ts | 2 +- web/app/api/terminal/upload/route.ts | 4 +- web/components/sf/app-shell.tsx | 14 +- web/components/sf/chat-mode.tsx | 54 +-- web/components/sf/command-surface.tsx | 74 +-- web/components/sf/dual-terminal.tsx | 2 +- web/components/sf/file-content-viewer.tsx | 2 +- web/components/sf/files-view.tsx | 48 +- web/components/sf/onboarding/step-mode.tsx | 2 +- web/components/sf/project-welcome.tsx | 12 +- .../sf/remaining-command-panels.tsx | 26 +- web/components/sf/roadmap.tsx | 2 +- web/components/sf/settings-panels.tsx | 6 +- web/components/sf/sidebar.tsx | 10 +- web/components/sf/visualizer-view.tsx | 4 +- web/lib/auth.ts | 4 +- web/lib/browser-slash-command-dispatch.ts | 106 ++--- web/lib/command-surface-contract.ts | 88 ++-- web/lib/pty-chat-parser.ts | 2 +- web/lib/pty-manager.ts | 8 +- web/lib/use-editor-font-size.ts | 4 +- web/lib/use-terminal-font-size.ts | 4 +- web/lib/use-user-mode.ts | 2 +- web/lib/workflow-actions.ts | 34 +- web/package-lock.json | 4 +- web/package.json | 2 +- 671 files changed, 8295 insertions(+), 5356 deletions(-) create mode 100755 bin/sf-from-source create mode 100644 docs/dev/ADR-008-sf-tools-over-mcp-for-provider-parity.md create mode 100644 packages/pi-coding-agent/src/utils/proxy-server.ts create mode 100755 scripts/recover-sf-1364.sh create mode 100755 scripts/recover-sf-1668.sh create mode 100644 sf-orchestrator/SKILL.md create mode 100644 sf-orchestrator/references/answer-injection.md create mode 100644 sf-orchestrator/references/commands.md create mode 100644 sf-orchestrator/references/json-result.md create mode 100644 sf-orchestrator/templates/spec.md create mode 100644 sf-orchestrator/workflows/build-from-spec.md create mode 100644 sf-orchestrator/workflows/monitor-and-poll.md create mode 100644 sf-orchestrator/workflows/step-by-step.md create mode 100644 src/resources/extensions/genai-proxy/extension-manifest.json create mode 100644 src/resources/extensions/genai-proxy/index.ts create mode 100644 src/resources/extensions/genai-proxy/proxy-command.ts create mode 100644 src/resources/extensions/genai-proxy/proxy-server.ts create mode 100644 src/resources/extensions/sf/safety/gemini-permissions.ts diff --git a/bin/sf-from-source b/bin/sf-from-source new file mode 100755 index 000000000..f60ba2d46 --- /dev/null +++ b/bin/sf-from-source @@ -0,0 +1,24 @@ +#!/usr/bin/env bash +# +# sf-from-source — run SF directly from this source checkout via bun. +# +# Purpose: every local commit in this repo (e.g. the #4251 fix) is live +# immediately without reinstalling the bun-packaged sf-run. Subagents can +# spawn sf by pointing SF_BIN_PATH at this script instead of dist/loader.js. +# +# Contract: +# - Executable shim spawn() / exec() can launch directly. +# - Exports SF_BIN_PATH before handing off to loader.ts so loader.ts's +# `SF_BIN_PATH ||= process.argv[1]` branch preserves the shim path +# instead of clobbering it with the .ts loader path (which is not +# directly executable by child_process.spawn). +# +# Requirements: bun on PATH, node_modules populated (`bun install` once). +set -euo pipefail + +SCRIPT_DIR=$(cd -- "$(dirname -- "$(readlink -f "${BASH_SOURCE[0]}")")" &>/dev/null && pwd) +SF_SOURCE_ROOT=$(cd -- "$SCRIPT_DIR/.." &>/dev/null && pwd) + +export SF_BIN_PATH="$SCRIPT_DIR/sf-from-source" + +exec bun run "$SF_SOURCE_ROOT/src/loader.ts" "$@" diff --git a/docs/dev/ADR-008-sf-tools-over-mcp-for-provider-parity.md b/docs/dev/ADR-008-sf-tools-over-mcp-for-provider-parity.md new file mode 100644 index 000000000..8fbb2b5bf --- /dev/null +++ b/docs/dev/ADR-008-sf-tools-over-mcp-for-provider-parity.md @@ -0,0 +1,240 @@ +# ADR-008: Expose SF Workflow Tools Over MCP for Provider Parity + +**Status:** Proposed +**Date:** 2026-04-09 +**Deciders:** Jeremy McSpadden +**Related:** ADR-004 (capability-aware model routing), ADR-007 (model catalog split and provider API encapsulation), `src/resources/extensions/sf/bootstrap/db-tools.ts`, `src/resources/extensions/claude-code-cli/stream-adapter.ts`, `packages/mcp-server/src/server.ts` + +## Context + +SF currently has two different tool surfaces: + +1. **In-process extension tools** registered directly into the runtime via `pi.registerTool(...)`. +2. **An external MCP server** that exposes session orchestration and read-only project inspection. + +This split is now creating a real provider compatibility problem. + +### What exists today + +The core SF workflow tools are internal extension tools. Examples include: + +- `gsd_summary_save` +- `gsd_plan_milestone` +- `gsd_plan_slice` +- `gsd_plan_task` +- `gsd_task_complete` / `gsd_complete_task` +- `gsd_slice_complete` +- `gsd_complete_milestone` +- `gsd_validate_milestone` +- `gsd_replan_slice` +- `gsd_reassess_roadmap` + +These are registered in `src/resources/extensions/sf/bootstrap/db-tools.ts` and related bootstrap files. SF prompts assume these tools are available during discuss, plan, and execute flows. + +Separately, `packages/mcp-server/src/server.ts` exposes a different tool surface: + +- session control: `gsd_execute`, `gsd_status`, `gsd_result`, `gsd_cancel`, `gsd_query`, `gsd_resolve_blocker` +- read-only inspection: `gsd_progress`, `gsd_roadmap`, `gsd_history`, `gsd_doctor`, `gsd_captures`, `gsd_knowledge` + +That MCP server is useful, but it is **not** a transport for the internal workflow/mutation tools. + +### The current failure mode + +The Claude Code CLI provider uses the Anthropic Agent SDK through `src/resources/extensions/claude-code-cli/stream-adapter.ts`. That adapter starts a Claude SDK session, but it does not forward the internal SF tool registry into the SDK session, nor does it attach a SF MCP server for those tools. + +As a result: + +- prompts tell the model to call tools like `gsd_complete_task` +- the tools exist in SF +- but Claude Code sessions do not actually receive those tools + +This produces a contract mismatch: the model is required to use tools that are unavailable in that provider path. + +### Why this matters + +This is not a one-off Claude Code bug. It reveals a deeper architectural issue: + +- SF’s core workflow contract is transport-specific +- prompt authors assume “internal extension tool availability” +- provider integrations do not all share the same execution surface + +If SF wants provider parity, its workflow tools need a transport-neutral exposure model. + +## Decision + +**Expose the SF workflow tool contract over MCP as a first-class transport, and make MCP the compatibility layer for providers that cannot directly access the in-process SF tool registry.** + +This means: + +1. SF will keep its existing in-process tool registration for native runtime use. +2. SF will add an MCP execution surface for the same workflow tools. +3. Both surfaces must call the same underlying business logic. +4. Provider integrations such as Claude Code will use the MCP surface when they cannot access native in-process tools directly. + +The decision is explicitly **not** to replace the native tool system with MCP everywhere. MCP is the parity and portability layer, not the only runtime path. + +## Decision Details + +### 1. One handler layer, multiple transports + +SF tool behavior must not be implemented twice. + +The transport-neutral business logic for workflow tools should be shared by: + +- native extension tool registration (`pi.registerTool(...)`) +- MCP server tool registration + +The MCP server should wrap the same handlers used by `db-tools.ts`, `query-tools.ts`, and related modules. This avoids logic drift and keeps validation, DB writes, file rendering, and recovery behavior consistent. + +### 2. Add a workflow-tool MCP surface + +SF will expose the workflow tools required for discuss, planning, execution, and completion over MCP. + +Initial minimum set: + +- `gsd_summary_save` +- `gsd_decision_save` +- `gsd_plan_milestone` +- `gsd_plan_slice` +- `gsd_plan_task` +- `gsd_task_complete` +- `gsd_slice_complete` +- `gsd_complete_milestone` +- `gsd_validate_milestone` +- `gsd_replan_slice` +- `gsd_reassess_roadmap` +- `gsd_save_gate_result` +- selected read/query tools such as `gsd_milestone_status` + +Aliases should be treated conservatively. MCP should prefer canonical names unless compatibility requires exposing aliases. + +### 3. Preserve safety semantics + +The current SF safety model includes write gates, discussion gates, queue-mode restrictions, and state integrity guarantees. + +Those guarantees must continue to apply when tools are invoked over MCP. In particular: + +- MCP must not create a path that bypasses write gating +- MCP mutations must preserve the same DB/file/state invariants as native tools +- provider-specific fallback behavior must not allow manual summary writing in place of canonical completion tools + +### 4. Make provider capability checks explicit + +Before dispatching a workflow that requires SF workflow tools, SF should check whether the selected provider/session can access the required tool surface. + +If a provider cannot access either: + +- native in-process SF tools, or +- the SF MCP workflow tool surface + +then SF must fail early with a clear compatibility error rather than allowing execution to continue in a degraded, state-breaking mode. + +### 5. Keep the existing session/read MCP server + +The existing MCP server in `packages/mcp-server` remains valid. It serves a different purpose: + +- remote session orchestration +- status/result polling +- filesystem-backed project inspection + +The new workflow-tool MCP surface is complementary, not a replacement. + +## Alternatives Considered + +### Alternative A: Reroute away from Claude Code whenever tool-backed execution is needed + +This would fix the immediate failure for multi-provider users, but it does not solve provider parity. It also fails completely for users who only have Claude Code configured. + +**Rejected** because it treats the symptom, not the architectural gap. + +### Alternative B: Hard-fail Claude Code and require another provider + +This is a valid short-term guardrail and may still be used before MCP support is complete. + +**Rejected as the long-term architecture** because it permanently excludes a supported provider from first-class SF execution. + +### Alternative C: Inject the internal SF tool registry directly into the Claude Agent SDK without MCP + +This would tightly couple SF’s internal extension runtime to a provider-specific integration path. It would not generalize well to other providers or external tool clients. + +**Rejected** because it creates a provider-specific bridge instead of a transport-neutral contract. + +### Alternative D: Replace native SF tools entirely with MCP + +This would simplify the conceptual model, but it would force all runtimes through an external protocol boundary even when the native in-process path is faster and already works well. + +**Rejected** because MCP is needed for portability, not because the native tool system is flawed. + +## Consequences + +### Positive + +1. **Provider parity improves.** Providers that can consume MCP tools can participate in full SF workflow execution. +2. **The workflow contract becomes transport-neutral.** Prompts can rely on capabilities rather than a specific runtime implementation detail. +3. **One compatibility story for external clients.** Claude Code, Cursor, and other MCP-capable clients can use the same workflow tool surface. +4. **Better long-term architecture.** Internal tools and external transports converge on shared handlers instead of diverging implementations. + +### Negative + +1. **Larger surface area to secure and test.** Mutation tools over MCP are higher risk than read-only inspection tools. +2. **Migration complexity.** Tool registration, gating, and handler extraction must be refactored carefully. +3. **Two transport paths must remain aligned.** Native and MCP invocation semantics must stay behaviorally identical. + +### Neutral / Tradeoff + +The system will now support: + +- native in-process tool execution when available +- MCP-backed tool execution when native access is unavailable + +That is more complex than a single-path system, but it is the cost of provider portability without sacrificing native runtime quality. + +## Migration Plan + +### Phase 1: Extract shared handlers + +Refactor workflow tools so MCP and native registration can call the same transport-neutral functions. + +Priority targets: + +- `gsd_summary_save` +- `gsd_task_complete` +- `gsd_plan_milestone` +- `gsd_plan_slice` +- `gsd_plan_task` + +### Phase 2: Stand up the workflow-tool MCP server + +Add a new MCP surface for workflow tool execution. This may extend the existing MCP package or live as a sibling package, but it must be clearly separated from the current session/read API. + +### Phase 3: Port safety enforcement + +Move or centralize write gates and related policy checks so MCP mutations cannot bypass the existing safety model. + +### Phase 4: Attach MCP workflow tools to Claude Code sessions + +Update the Claude Code provider integration to pass a SF-managed `mcpServers` configuration into the Claude Agent SDK session when required. + +### Phase 5: Add provider capability gating + +Before tool-dependent flows begin, verify that the active provider can access the required SF workflow tools via either native registration or MCP. + +### Phase 6: Update prompts and docs + +Prompt contracts should remain strict about using canonical SF completion/planning tools, but documentation and runtime messaging must no longer assume that only native in-process tool registration satisfies that contract. + +## Validation + +Success is defined by all of the following: + +1. A Claude Code-backed execution session can complete a task using canonical SF workflow tools without manual summary writing. +2. Native provider behavior remains unchanged. +3. MCP-invoked workflow tools produce the same DB updates, rendered artifacts, and state transitions as native tool calls. +4. Write-gate and discussion-gate protections still hold under MCP invocation. +5. When required capabilities are unavailable, SF fails early with a precise compatibility error. + +## Scope Notes + +This ADR establishes the architectural direction. It does **not** require full MCP exposure of every historical alias or every auxiliary tool in the first implementation. + +The first implementation should prioritize the minimum workflow tool set needed to make discuss/plan/execute/complete flows work safely for MCP-capable providers. diff --git a/packages/daemon/src/channel-manager.ts b/packages/daemon/src/channel-manager.ts index 9d5f11b8f..462c5cf13 100644 --- a/packages/daemon/src/channel-manager.ts +++ b/packages/daemon/src/channel-manager.ts @@ -21,7 +21,7 @@ import type { Logger } from './logger.js'; const DEFAULT_CATEGORY_NAME = 'SF Projects'; const ARCHIVE_CATEGORY_NAME = 'SF Archive'; -const CHANNEL_PREFIX = 'gsd-'; +const CHANNEL_PREFIX = 'sf-'; const MAX_CHANNEL_NAME_LENGTH = 100; // Discord's limit // --------------------------------------------------------------------------- @@ -36,10 +36,10 @@ const MAX_CHANNEL_NAME_LENGTH = 100; // Discord's limit * - Replaces non-alphanumeric (except hyphens) with hyphens * - Collapses consecutive hyphens * - Trims leading/trailing hyphens - * - Prefixes with 'gsd-' + * - Prefixes with 'sf-' * - Caps total length at 100 chars (Discord limit) * - * Returns 'gsd-unnamed' for empty/whitespace-only inputs. + * Returns 'sf-unnamed' for empty/whitespace-only inputs. */ export function sanitizeChannelName(projectDir: string): string { // Extract basename — handle both forward and back slashes @@ -51,7 +51,7 @@ export function sanitizeChannelName(projectDir: string): string { // Fallback for empty basename if (!basename) { - return 'gsd-unnamed'; + return 'sf-unnamed'; } // Lowercase @@ -68,7 +68,7 @@ export function sanitizeChannelName(projectDir: string): string { // Fallback if nothing remains after sanitization if (!name) { - return 'gsd-unnamed'; + return 'sf-unnamed'; } // Prefix diff --git a/packages/daemon/src/cli.ts b/packages/daemon/src/cli.ts index 5449ad761..32e78abf1 100644 --- a/packages/daemon/src/cli.ts +++ b/packages/daemon/src/cli.ts @@ -7,10 +7,10 @@ import { Logger } from './logger.js'; import { Daemon } from './daemon.js'; import { install, uninstall, status } from './launchd.js'; -const USAGE = `Usage: gsd-daemon [options] +const USAGE = `Usage: sf-daemon [options] Options: - --config Path to YAML config file (default: ~/.gsd/daemon.yaml) + --config Path to YAML config file (default: ~/.sf/daemon.yaml) --verbose Print log entries to stderr in addition to the log file --install Install the launchd LaunchAgent (auto-starts on login) --uninstall Uninstall the launchd LaunchAgent @@ -48,27 +48,27 @@ async function main(): Promise { scriptPath, configPath, }); - process.stdout.write('gsd-daemon: launchd agent installed and loaded.\n'); + process.stdout.write('sf-daemon: launchd agent installed and loaded.\n'); process.exit(0); } if (values.uninstall) { uninstall(); - process.stdout.write('gsd-daemon: launchd agent uninstalled.\n'); + process.stdout.write('sf-daemon: launchd agent uninstalled.\n'); process.exit(0); } if (values.status) { const result = status(); if (!result.registered) { - process.stdout.write('gsd-daemon: not registered with launchd.\n'); + process.stdout.write('sf-daemon: not registered with launchd.\n'); } else if (result.pid != null) { process.stdout.write( - `gsd-daemon: running (PID ${result.pid}, last exit status: ${result.lastExitStatus ?? 'n/a'})\n`, + `sf-daemon: running (PID ${result.pid}, last exit status: ${result.lastExitStatus ?? 'n/a'})\n`, ); } else { process.stdout.write( - `gsd-daemon: registered but not running (last exit status: ${result.lastExitStatus ?? 'n/a'})\n`, + `sf-daemon: registered but not running (last exit status: ${result.lastExitStatus ?? 'n/a'})\n`, ); } process.exit(0); @@ -91,6 +91,6 @@ async function main(): Promise { main().catch((err: unknown) => { const msg = err instanceof Error ? err.message : String(err); - process.stderr.write(`gsd-daemon: fatal: ${msg}\n`); + process.stderr.write(`sf-daemon: fatal: ${msg}\n`); process.exit(1); }); diff --git a/packages/daemon/src/commands.ts b/packages/daemon/src/commands.ts index b3fed3a6c..273040e48 100644 --- a/packages/daemon/src/commands.ts +++ b/packages/daemon/src/commands.ts @@ -90,7 +90,7 @@ export async function registerGuildCommands( // --------------------------------------------------------------------------- /** - * Format session list for /gsd-status reply. + * Format session list for /sf-status reply. * Shows projectName, status, duration, and cost for each session. * Returns 'No active sessions.' if the array is empty. */ diff --git a/packages/daemon/src/config.ts b/packages/daemon/src/config.ts index 50a77a7b6..224bc97cc 100644 --- a/packages/daemon/src/config.ts +++ b/packages/daemon/src/config.ts @@ -20,7 +20,7 @@ function defaults(): DaemonConfig { discord: undefined, projects: { scan_roots: [] }, log: { - file: resolve(homedir(), '.gsd', 'daemon.log'), + file: resolve(homedir(), '.sf', 'daemon.log'), level: 'info', max_size_mb: 50, }, @@ -29,13 +29,13 @@ function defaults(): DaemonConfig { /** * Resolve the config file path. - * Priority: explicit CLI arg → SF_DAEMON_CONFIG env → ~/.gsd/daemon.yaml + * Priority: explicit CLI arg → SF_DAEMON_CONFIG env → ~/.sf/daemon.yaml */ export function resolveConfigPath(cliPath?: string): string { if (cliPath) return expandTilde(cliPath); const envPath = process.env['SF_DAEMON_CONFIG']; if (envPath) return expandTilde(envPath); - return resolve(homedir(), '.gsd', 'daemon.yaml'); + return resolve(homedir(), '.sf', 'daemon.yaml'); } /** diff --git a/packages/daemon/src/daemon.test.ts b/packages/daemon/src/daemon.test.ts index b3c2aeb51..d78812f2b 100644 --- a/packages/daemon/src/daemon.test.ts +++ b/packages/daemon/src/daemon.test.ts @@ -53,12 +53,12 @@ describe('resolveConfigPath', () => { } }); - it('defaults to ~/.gsd/daemon.yaml', () => { + it('defaults to ~/.sf/daemon.yaml', () => { const prev = process.env['SF_DAEMON_CONFIG']; try { delete process.env['SF_DAEMON_CONFIG']; const p = resolveConfigPath(); - assert.equal(p, join(homedir(), '.gsd', 'daemon.yaml')); + assert.equal(p, join(homedir(), '.sf', 'daemon.yaml')); } finally { if (prev !== undefined) process.env['SF_DAEMON_CONFIG'] = prev; } @@ -529,7 +529,7 @@ describe('CLI integration', () => { [join(__dirname, 'cli.js'), '--help'], { encoding: 'utf-8', timeout: 5000 }, ); - assert.ok(result.includes('Usage: gsd-daemon')); + assert.ok(result.includes('Usage: sf-daemon')); assert.ok(result.includes('--config')); assert.ok(result.includes('--verbose')); }); diff --git a/packages/daemon/src/discord-bot.test.ts b/packages/daemon/src/discord-bot.test.ts index 6757fbbb2..2ae539936 100644 --- a/packages/daemon/src/discord-bot.test.ts +++ b/packages/daemon/src/discord-bot.test.ts @@ -230,52 +230,52 @@ describe('Daemon + DiscordBot wiring', () => { // ---------- sanitizeChannelName ---------- describe('sanitizeChannelName', () => { - it('converts basic path to gsd-prefixed name', () => { - assert.equal(sanitizeChannelName('/home/user/my-project'), 'gsd-my-project'); + it('converts basic path to sf-prefixed name', () => { + assert.equal(sanitizeChannelName('/home/user/my-project'), 'sf-my-project'); }); it('converts path with special characters to hyphens', () => { - assert.equal(sanitizeChannelName('/home/user/My_Cool.Project!v2'), 'gsd-my-cool-project-v2'); + assert.equal(sanitizeChannelName('/home/user/My_Cool.Project!v2'), 'sf-my-cool-project-v2'); }); it('truncates very long names to 100 chars', () => { const longName = 'a'.repeat(200); const result = sanitizeChannelName(`/home/${longName}`); assert.ok(result.length <= 100, `Expected <= 100 chars, got ${result.length}`); - assert.ok(result.startsWith('gsd-')); + assert.ok(result.startsWith('sf-')); }); it('cleans leading/trailing dots and underscores', () => { - assert.equal(sanitizeChannelName('/home/...___project___...'), 'gsd-project'); + assert.equal(sanitizeChannelName('/home/...___project___...'), 'sf-project'); }); - it('returns gsd-unnamed for empty basename', () => { - assert.equal(sanitizeChannelName(''), 'gsd-unnamed'); - assert.equal(sanitizeChannelName('/'), 'gsd-unnamed'); + it('returns sf-unnamed for empty basename', () => { + assert.equal(sanitizeChannelName(''), 'sf-unnamed'); + assert.equal(sanitizeChannelName('/'), 'sf-unnamed'); }); - it('returns gsd-unnamed for basename with only special chars', () => { - assert.equal(sanitizeChannelName('/home/!!!'), 'gsd-unnamed'); + it('returns sf-unnamed for basename with only special chars', () => { + assert.equal(sanitizeChannelName('/home/!!!'), 'sf-unnamed'); }); it('collapses consecutive hyphens', () => { - assert.equal(sanitizeChannelName('/home/a---b---c'), 'gsd-a-b-c'); + assert.equal(sanitizeChannelName('/home/a---b---c'), 'sf-a-b-c'); }); it('handles Windows-style backslash paths', () => { - assert.equal(sanitizeChannelName('C:\\Users\\lex\\my-project'), 'gsd-my-project'); + assert.equal(sanitizeChannelName('C:\\Users\\lex\\my-project'), 'sf-my-project'); }); it('handles name at exact prefix + 96 chars = 100 char limit', () => { - // gsd- is 4 chars, so a 96-char basename should produce exactly 100 + // sf- is 4 chars, so a 96-char basename should produce exactly 100 const name96 = 'a'.repeat(96); const result = sanitizeChannelName(`/home/${name96}`); assert.equal(result.length, 100); - assert.equal(result, `gsd-${'a'.repeat(96)}`); + assert.equal(result, `sf-${'a'.repeat(96)}`); }); it('handles whitespace-only basename', () => { - assert.equal(sanitizeChannelName('/home/ '), 'gsd-unnamed'); + assert.equal(sanitizeChannelName('/home/ '), 'sf-unnamed'); }); }); @@ -383,7 +383,7 @@ describe('ChannelManager', () => { const mgr = new ChannelManager({ guild: guild as any, logger: logger as any }); const channel = await mgr.createProjectChannel('/home/user/my-project'); - assert.equal(channel.name, 'gsd-my-project'); + assert.equal(channel.name, 'sf-my-project'); assert.equal(channel.type, ChannelType.GuildText); // Category was created first (chan-1), then channel (chan-2) assert.equal(channel.parentId, 'chan-1'); @@ -443,10 +443,10 @@ describe('buildCommands', () => { const commands = buildCommands(); assert.equal(commands.length, 4); const names = commands.map((c) => c.name); - assert.ok(names.includes('gsd-status'), 'should include gsd-status'); - assert.ok(names.includes('gsd-start'), 'should include gsd-start'); - assert.ok(names.includes('gsd-stop'), 'should include gsd-stop'); - assert.ok(names.includes('gsd-verbose'), 'should include gsd-verbose'); + assert.ok(names.includes('sf-status'), 'should include sf-status'); + assert.ok(names.includes('sf-start'), 'should include sf-start'); + assert.ok(names.includes('sf-stop'), 'should include sf-stop'); + assert.ok(names.includes('sf-verbose'), 'should include sf-verbose'); }); it('each command has a description', () => { @@ -551,8 +551,8 @@ describe('command dispatch', () => { // The command routing logic is tested indirectly through integration of the // pure helpers (buildCommands, formatSessionStatus, isAuthorized). - it('gsd-status with no sessions produces empty message', () => { - // Tests the formatSessionStatus path that /gsd-status calls + it('sf-status with no sessions produces empty message', () => { + // Tests the formatSessionStatus path that /sf-status calls const result = formatSessionStatus([]); assert.equal(result, 'No active sessions.'); }); @@ -560,7 +560,7 @@ describe('command dispatch', () => { it('unknown command name is not in buildCommands list', () => { const commands = buildCommands(); const names = commands.map((c) => c.name); - assert.ok(!names.includes('gsd-unknown'), 'unknown should not be in command list'); + assert.ok(!names.includes('sf-unknown'), 'unknown should not be in command list'); }); it('auth guard rejects non-owner on interaction', () => { @@ -733,14 +733,14 @@ describe('Daemon orchestrator wiring', () => { }); }); -// ---------- /gsd-start and /gsd-stop logic paths ---------- +// ---------- /sf-start and /sf-stop logic paths ---------- -describe('/gsd-start and /gsd-stop logic', () => { +describe('/sf-start and /sf-stop logic', () => { // These test the observable logic paths exercised by the handlers. // Since handleGsdStart/handleGsdStop are private, we test the data layer // they depend on — project scanning, session listing, and edge cases. - it('/gsd-start: scanForProjects returning 0 projects', async () => { + it('/sf-start: scanForProjects returning 0 projects', async () => { // Simulates the "no projects" path const { scanForProjects } = await import('./project-scanner.js'); // With no scan roots, should return empty @@ -748,7 +748,7 @@ describe('/gsd-start and /gsd-stop logic', () => { assert.equal(projects.length, 0); }); - it('/gsd-stop: getAllSessions returns empty when no sessions active', async () => { + it('/sf-stop: getAllSessions returns empty when no sessions active', async () => { const { SessionManager } = await import('./session-manager.js'); const dir = tmpDir(); cleanupDirs.push(dir); @@ -760,7 +760,7 @@ describe('/gsd-start and /gsd-stop logic', () => { await logger.close(); }); - it('/gsd-stop: filters to active sessions only', () => { + it('/sf-stop: filters to active sessions only', () => { // Simulate the filter logic used in handleGsdStop const allSessions: Partial[] = [ { sessionId: 's1', status: 'running', projectName: 'alpha' }, @@ -777,7 +777,7 @@ describe('/gsd-start and /gsd-stop logic', () => { assert.deepEqual(active.map((s) => s.projectName), ['alpha', 'gamma', 'epsilon']); }); - it('/gsd-start: >25 projects are truncated for select menu', () => { + it('/sf-start: >25 projects are truncated for select menu', () => { // Simulate the truncation logic const projects = Array.from({ length: 30 }, (_, i) => ({ name: `project-${i}`, diff --git a/packages/daemon/src/discord-bot.ts b/packages/daemon/src/discord-bot.ts index 9c90ac1ab..9aed1475c 100644 --- a/packages/daemon/src/discord-bot.ts +++ b/packages/daemon/src/discord-bot.ts @@ -256,7 +256,7 @@ export class DiscordBot { } /** - * Set the EventBridge reference so the bot can dispatch /gsd-verbose commands. + * Set the EventBridge reference so the bot can dispatch /sf-verbose commands. * Called by Daemon after creating the EventBridge. */ setEventBridge(bridge: EventBridge): void { @@ -286,34 +286,34 @@ export class DiscordBot { this.logger.info('command handled', { commandName, userId: interaction.user.id }); switch (commandName) { - case 'gsd-status': { + case 'sf-status': { const sessions = this.sessionManager.getAllSessions(); const content = formatSessionStatus(sessions); interaction.reply({ content, ephemeral: true }).catch((err) => { - this.logger.warn('gsd-status reply failed', { + this.logger.warn('sf-status reply failed', { error: err instanceof Error ? err.message : String(err), }); }); break; } - case 'gsd-start': + case 'sf-start': this.handleGsdStart(interaction).catch((err) => { - this.logger.warn('gsd-start handler error', { + this.logger.warn('sf-start handler error', { error: err instanceof Error ? err.message : String(err), }); }); break; - case 'gsd-stop': + case 'sf-stop': this.handleGsdStop(interaction).catch((err) => { - this.logger.warn('gsd-stop handler error', { + this.logger.warn('sf-stop handler error', { error: err instanceof Error ? err.message : String(err), }); }); break; - case 'gsd-verbose': { + case 'sf-verbose': { if (!this.eventBridge) { interaction.reply({ content: 'Event bridge not available.', ephemeral: true }).catch((err) => { - this.logger.warn('gsd-verbose reply failed', { + this.logger.warn('sf-verbose reply failed', { error: err instanceof Error ? err.message : String(err), }); }); @@ -323,7 +323,7 @@ export class DiscordBot { const channelId = interaction.channelId; this.eventBridge.getVerbosityManager().setLevel(channelId, level); interaction.reply({ content: `Verbosity set to **${level}** for this channel.`, ephemeral: true }).catch((err) => { - this.logger.warn('gsd-verbose reply failed', { + this.logger.warn('sf-verbose reply failed', { error: err instanceof Error ? err.message : String(err), }); }); @@ -340,12 +340,12 @@ export class DiscordBot { } // --------------------------------------------------------------------------- - // Private: /gsd-start handler + // Private: /sf-start handler // --------------------------------------------------------------------------- private async handleGsdStart(interaction: import('discord.js').ChatInputCommandInteraction): Promise { await interaction.deferReply({ ephemeral: true }); - this.logger.info('gsd-start: scanning projects'); + this.logger.info('sf-start: scanning projects'); if (!this.scanProjects) { await interaction.editReply({ content: 'Project scanning not available.' }); @@ -356,7 +356,7 @@ export class DiscordBot { try { projects = await this.scanProjects(); } catch (err) { - this.logger.error('gsd-start: scan failed', { + this.logger.error('sf-start: scan failed', { error: err instanceof Error ? err.message : String(err), }); await interaction.editReply({ content: 'Failed to scan for projects.' }); @@ -371,7 +371,7 @@ export class DiscordBot { // Discord select menus support max 25 options const truncated = projects.slice(0, 25); const select = new StringSelectMenuBuilder() - .setCustomId('gsd-start-select') + .setCustomId('sf-start-select') .setPlaceholder('Select a project to start') .addOptions( truncated.map((p) => ({ @@ -395,7 +395,7 @@ export class DiscordBot { }) as StringSelectMenuInteraction; const projectPath = collected.values[0]; - this.logger.info('gsd-start: project selected', { projectPath }); + this.logger.info('sf-start: project selected', { projectPath }); // Defer the update immediately — startSession can take 10-30s to spawn the SF process, // and Discord's component interaction token expires in 3 seconds without deferral. @@ -409,7 +409,7 @@ export class DiscordBot { }); } catch (err) { const errMsg = err instanceof Error ? err.message : String(err); - this.logger.error('gsd-start: startSession failed', { error: errMsg, projectPath }); + this.logger.error('sf-start: startSession failed', { error: errMsg, projectPath }); await interaction.editReply({ content: `❌ Failed to start session: ${errMsg}`, components: [], @@ -417,18 +417,18 @@ export class DiscordBot { } } catch { // Timeout or other collector error - this.logger.info('gsd-start: selection timed out'); + this.logger.info('sf-start: selection timed out'); await interaction.editReply({ content: 'Selection timed out.', components: [] }); } } // --------------------------------------------------------------------------- - // Private: /gsd-stop handler + // Private: /sf-stop handler // --------------------------------------------------------------------------- private async handleGsdStop(interaction: import('discord.js').ChatInputCommandInteraction): Promise { await interaction.deferReply({ ephemeral: true }); - this.logger.info('gsd-stop: listing sessions'); + this.logger.info('sf-stop: listing sessions'); const allSessions = this.sessionManager.getAllSessions(); const activeSessions = allSessions.filter( @@ -443,7 +443,7 @@ export class DiscordBot { // Discord select menus support max 25 options const truncated = activeSessions.slice(0, 25); const select = new StringSelectMenuBuilder() - .setCustomId('gsd-stop-select') + .setCustomId('sf-stop-select') .setPlaceholder('Select a session to stop') .addOptions( truncated.map((s) => ({ @@ -466,7 +466,7 @@ export class DiscordBot { }) as StringSelectMenuInteraction; const sessionId = collected.values[0]; - this.logger.info('gsd-stop: session selected', { sessionId }); + this.logger.info('sf-stop: session selected', { sessionId }); try { await this.sessionManager.cancelSession(sessionId); @@ -476,7 +476,7 @@ export class DiscordBot { }); } catch (err) { const errMsg = err instanceof Error ? err.message : String(err); - this.logger.error('gsd-stop: cancelSession failed', { error: errMsg, sessionId }); + this.logger.error('sf-stop: cancelSession failed', { error: errMsg, sessionId }); await collected.update({ content: `❌ Failed to stop session: ${errMsg}`, components: [], @@ -484,7 +484,7 @@ export class DiscordBot { } } catch { // Timeout or other collector error - this.logger.info('gsd-stop: selection timed out'); + this.logger.info('sf-stop: selection timed out'); await interaction.editReply({ content: 'Selection timed out.', components: [] }); } } diff --git a/packages/daemon/src/launchd.test.ts b/packages/daemon/src/launchd.test.ts index f92185344..96ecf073f 100644 --- a/packages/daemon/src/launchd.test.ts +++ b/packages/daemon/src/launchd.test.ts @@ -31,8 +31,8 @@ afterEach(() => { function basePlistOpts(overrides?: Partial): PlistOptions { return { nodePath: '/usr/local/bin/node', - scriptPath: '/usr/local/lib/gsd-daemon/dist/cli.js', - configPath: join(homedir(), '.gsd', 'daemon.yaml'), + scriptPath: '/usr/local/lib/sf-daemon/dist/cli.js', + configPath: join(homedir(), '.sf', 'daemon.yaml'), ...overrides, }; } @@ -69,9 +69,9 @@ describe('generatePlist', () => { assert.ok(xml.includes('')); }); - it('includes label com.gsd.daemon', () => { + it('includes label com.sf.daemon', () => { const xml = generatePlist(basePlistOpts()); - assert.ok(xml.includes('com.gsd.daemon')); + assert.ok(xml.includes('com.sf.daemon')); }); it('uses the absolute node path from opts', () => { @@ -149,8 +149,8 @@ describe('generatePlist', () => { // ---------- getPlistPath ---------- describe('getPlistPath', () => { - it('returns ~/Library/LaunchAgents/com.gsd.daemon.plist', () => { - const expected = join(homedir(), 'Library', 'LaunchAgents', 'com.gsd.daemon.plist'); + it('returns ~/Library/LaunchAgents/com.sf.daemon.plist', () => { + const expected = join(homedir(), 'Library', 'LaunchAgents', 'com.sf.daemon.plist'); assert.equal(getPlistPath(), expected); }); }); @@ -193,7 +193,7 @@ describe('install', () => { // (install is a thin wrapper around generatePlist + writeFile + launchctl) const xml = generatePlist(basePlistOpts()); assert.ok(xml.includes('Label')); - assert.ok(xml.includes('com.gsd.daemon')); + assert.ok(xml.includes('com.sf.daemon')); }); it('handles idempotent install (unloads first if plist exists)', () => { @@ -266,7 +266,7 @@ describe('uninstall', () => { describe('status', () => { it('parses running daemon output (PID present)', () => { const mockRun: RunCommandFn = (_cmd: string) => { - return '{\n\t"PID" = 1234;\n\t"Label" = "com.gsd.daemon";\n}\nPID\tStatus\tLabel\n1234\t0\tcom.gsd.daemon\n'; + return '{\n\t"PID" = 1234;\n\t"Label" = "com.sf.daemon";\n}\nPID\tStatus\tLabel\n1234\t0\tcom.sf.daemon\n'; }; const result = status(mockRun); @@ -277,7 +277,7 @@ describe('status', () => { it('parses stopped daemon output (no PID)', () => { const mockRun: RunCommandFn = (_cmd: string) => { - return 'PID\tStatus\tLabel\n-\t78\tcom.gsd.daemon\n'; + return 'PID\tStatus\tLabel\n-\t78\tcom.sf.daemon\n'; }; const result = status(mockRun); @@ -288,7 +288,7 @@ describe('status', () => { it('returns not-registered when launchctl list fails', () => { const mockRun: RunCommandFn = (_cmd: string) => { - throw new Error('Could not find service "com.gsd.daemon" in domain for port'); + throw new Error('Could not find service "com.sf.daemon" in domain for port'); }; const result = status(mockRun); @@ -299,7 +299,7 @@ describe('status', () => { it('returns structured result with all fields', () => { const mockRun: RunCommandFn = (_cmd: string) => { - return 'PID\tStatus\tLabel\n5678\t0\tcom.gsd.daemon\n'; + return 'PID\tStatus\tLabel\n5678\t0\tcom.sf.daemon\n'; }; const result = status(mockRun); @@ -311,10 +311,10 @@ describe('status', () => { it('parses JSON-style dict output (newer macOS)', () => { const mockRun: RunCommandFn = (_cmd: string) => { return `{ -\t"StandardOutPath" = "/Users/me/.gsd/daemon-stdout.log"; +\t"StandardOutPath" = "/Users/me/.sf/daemon-stdout.log"; \t"LimitLoadToSessionType" = "Aqua"; -\t"StandardErrorPath" = "/Users/me/.gsd/daemon-stderr.log"; -\t"Label" = "com.gsd.daemon"; +\t"StandardErrorPath" = "/Users/me/.sf/daemon-stderr.log"; +\t"Label" = "com.sf.daemon"; \t"OnDemand" = true; \t"LastExitStatus" = 0; \t"PID" = 23802; @@ -331,7 +331,7 @@ describe('status', () => { it('parses JSON-style dict output when daemon stopped (no PID key)', () => { const mockRun: RunCommandFn = (_cmd: string) => { return `{ -\t"Label" = "com.gsd.daemon"; +\t"Label" = "com.sf.daemon"; \t"LastExitStatus" = 1; \t"OnDemand" = true; };`; diff --git a/packages/daemon/src/launchd.ts b/packages/daemon/src/launchd.ts index 92916b155..a2b19b283 100644 --- a/packages/daemon/src/launchd.ts +++ b/packages/daemon/src/launchd.ts @@ -34,7 +34,7 @@ export type RunCommandFn = (cmd: string) => string; // --------------- constants --------------- -const LABEL = 'com.gsd.daemon'; +const LABEL = 'com.sf.daemon'; const PLIST_FILENAME = `${LABEL}.plist`; // --------------- helpers --------------- @@ -71,8 +71,8 @@ function buildEnvPath(nodePath: string): string { export function generatePlist(opts: PlistOptions): string { const home = homedir(); const workDir = opts.workingDirectory ?? home; - const stdoutPath = opts.stdoutPath ?? resolve(home, '.gsd', 'daemon-stdout.log'); - const stderrPath = opts.stderrPath ?? resolve(home, '.gsd', 'daemon-stderr.log'); + const stdoutPath = opts.stdoutPath ?? resolve(home, '.sf', 'daemon-stdout.log'); + const stderrPath = opts.stderrPath ?? resolve(home, '.sf', 'daemon-stderr.log'); const envPath = buildEnvPath(opts.nodePath); // Forward ANTHROPIC_API_KEY so the orchestrator LLM can authenticate. diff --git a/packages/daemon/src/orchestrator.test.ts b/packages/daemon/src/orchestrator.test.ts index 21ea82ff5..00c072bc5 100644 --- a/packages/daemon/src/orchestrator.test.ts +++ b/packages/daemon/src/orchestrator.test.ts @@ -1,5 +1,5 @@ /** - * Tests for Orchestrator — LLM agent for #gsd-control channel. + * Tests for Orchestrator — LLM agent for #sf-control channel. * * Uses a MockAnthropicClient that simulates messages.create() responses, * allowing tool execution and conversation flow testing without real API calls. @@ -226,7 +226,7 @@ function makeOrchestrator(opts?: { if (opts?.sessions) sessionManager.sessions = opts.sessions; const projects: ProjectInfo[] = opts?.projects ?? [ - { name: 'alpha', path: '/home/user/alpha', markers: ['git', 'node', 'gsd'], lastModified: Date.now() }, + { name: 'alpha', path: '/home/user/alpha', markers: ['git', 'node', 'sf'], lastModified: Date.now() }, { name: 'bravo', path: '/home/user/bravo', markers: ['git', 'rust'], lastModified: Date.now() }, ]; @@ -568,7 +568,7 @@ describe('Orchestrator', () => { const mockClient = new MockAnthropicClient( MockAnthropicClient.toolThenTextHandler( 'start_session', - { projectPath: '/p', command: '/gsd quick fix tests' }, + { projectPath: '/p', command: '/sf quick fix tests' }, 'Started', ), ); @@ -577,7 +577,7 @@ describe('Orchestrator', () => { await orchestrator.handleMessage(msg); assert.equal(sessionManager.startSessionCalls.length, 1); - assert.equal(sessionManager.startSessionCalls[0]!.command, '/gsd quick fix tests'); + assert.equal(sessionManager.startSessionCalls[0]!.command, '/sf quick fix tests'); }); }); diff --git a/packages/daemon/src/orchestrator.ts b/packages/daemon/src/orchestrator.ts index a945554b8..d80e2b243 100644 --- a/packages/daemon/src/orchestrator.ts +++ b/packages/daemon/src/orchestrator.ts @@ -1,5 +1,5 @@ /** - * Orchestrator — LLM-powered agent for the #gsd-control Discord channel. + * Orchestrator — LLM-powered agent for the #sf-control Discord channel. * * Receives Discord messages, maintains conversation history, calls the * Anthropic messages API with 5 tool definitions (list_projects, start_session, @@ -35,7 +35,7 @@ function resolveAnthropicApiKey(): string { const apiKey = process.env.ANTHROPIC_API_KEY; if (!apiKey) { throw new Error( - 'ANTHROPIC_API_KEY is required. Set it in your environment or run `gsd config`.', + 'ANTHROPIC_API_KEY is required. Set it in your environment or run `sf config`.', ); } return apiKey; @@ -84,7 +84,7 @@ Response guidelines: const TOOLS: Tool[] = [ { name: 'list_projects', - description: 'List all detected projects across configured scan roots. Returns project names, paths, and detected markers (git, node, gsd, etc.).', + description: 'List all detected projects across configured scan roots. Returns project names, paths, and detected markers (git, node, sf, etc.).', input_schema: { type: 'object' as const, properties: {}, @@ -93,12 +93,12 @@ const TOOLS: Tool[] = [ }, { name: 'start_session', - description: 'Start a new SF auto-mode session for a project. Provide the absolute project path. Optionally provide a command to run instead of the default "/gsd auto".', + description: 'Start a new SF auto-mode session for a project. Provide the absolute project path. Optionally provide a command to run instead of the default "/sf auto".', input_schema: { type: 'object' as const, properties: { projectPath: { type: 'string', description: 'Absolute path to the project directory' }, - command: { type: 'string', description: 'Optional command to send instead of "/gsd auto"' }, + command: { type: 'string', description: 'Optional command to send instead of "/sf auto"' }, }, required: ['projectPath'], }, diff --git a/packages/daemon/src/project-scanner.test.ts b/packages/daemon/src/project-scanner.test.ts index 6812c3871..0aa0a7c23 100644 --- a/packages/daemon/src/project-scanner.test.ts +++ b/packages/daemon/src/project-scanner.test.ts @@ -31,7 +31,7 @@ function createProject(root: string, name: string, markers: string[]): string { for (const marker of markers) { const markerPath = join(projDir, marker); if (marker.startsWith('.') && !marker.includes('.')) { - // Likely a directory marker (.git, .gsd) + // Likely a directory marker (.git, .sf) mkdirSync(markerPath, { recursive: true }); } else { // File marker (package.json, Cargo.toml, etc.) @@ -91,7 +91,7 @@ describe('scanForProjects', () => { const root = tmpDir(); cleanupDirs.push(root); - createProject(root, 'full-stack', ['.git', 'package.json', '.gsd']); + createProject(root, 'full-stack', ['.git', 'package.json', '.sf']); const results = await scanForProjects([root]); @@ -99,7 +99,7 @@ describe('scanForProjects', () => { assert.equal(results[0]!.markers.length, 3); assert.ok(results[0]!.markers.includes('git')); assert.ok(results[0]!.markers.includes('node')); - assert.ok(results[0]!.markers.includes('gsd')); + assert.ok(results[0]!.markers.includes('sf')); }); it('returns results sorted alphabetically by name', async () => { @@ -181,7 +181,7 @@ describe('scanForProjects', () => { createProject(root, 'git-proj', ['.git']); createProject(root, 'node-proj', ['package.json']); - createProject(root, 'gsd-proj', ['.gsd']); + createProject(root, 'sf-proj', ['.sf']); createProject(root, 'rust-proj', ['Cargo.toml']); createProject(root, 'python-proj', ['pyproject.toml']); createProject(root, 'go-proj', ['go.mod']); @@ -193,7 +193,7 @@ describe('scanForProjects', () => { const byName = new Map(results.map(r => [r.name, r])); assert.deepEqual(byName.get('git-proj')!.markers, ['git']); assert.deepEqual(byName.get('node-proj')!.markers, ['node']); - assert.deepEqual(byName.get('gsd-proj')!.markers, ['gsd']); + assert.deepEqual(byName.get('sf-proj')!.markers, ['sf']); assert.deepEqual(byName.get('rust-proj')!.markers, ['rust']); assert.deepEqual(byName.get('python-proj')!.markers, ['python']); assert.deepEqual(byName.get('go-proj')!.markers, ['go']); diff --git a/packages/daemon/src/project-scanner.ts b/packages/daemon/src/project-scanner.ts index 3eb9b5926..5cb093815 100644 --- a/packages/daemon/src/project-scanner.ts +++ b/packages/daemon/src/project-scanner.ts @@ -14,7 +14,7 @@ import type { ProjectInfo, ProjectMarker } from './types.js'; const MARKER_MAP: ReadonlyMap = new Map([ ['.git', 'git'], ['package.json', 'node'], - ['.gsd', 'gsd'], + ['.sf', 'sf'], ['Cargo.toml', 'rust'], ['pyproject.toml', 'python'], ['go.mod', 'go'], diff --git a/packages/daemon/src/session-manager.test.ts b/packages/daemon/src/session-manager.test.ts index 79584cba9..5e9748239 100644 --- a/packages/daemon/src/session-manager.test.ts +++ b/packages/daemon/src/session-manager.test.ts @@ -160,7 +160,7 @@ class TestableSessionManager extends SessionManager { }); // Kick off auto-mode - const command = options.command ?? '/gsd auto'; + const command = options.command ?? '/sf auto'; await client.prompt(command); // Emit lifecycle events (matching parent behavior) @@ -801,11 +801,11 @@ describe('SessionManager', () => { it('sends custom command when provided', async () => { const { manager } = createManager(); - await manager.startSession({ projectDir: '/tmp/custom-cmd', command: '/gsd quick fix-typo' }); + await manager.startSession({ projectDir: '/tmp/custom-cmd', command: '/sf quick fix-typo' }); const client = manager.lastClient!; - assert.ok(client.prompted.includes('/gsd quick fix-typo')); - assert.ok(!client.prompted.includes('/gsd auto')); + assert.ok(client.prompted.includes('/sf quick fix-typo')); + assert.ok(!client.prompted.includes('/sf auto')); }); // ---- getSessionByDir returns session by directory lookup ---- diff --git a/packages/daemon/src/session-manager.ts b/packages/daemon/src/session-manager.ts index 54c4260a5..40589f5d7 100644 --- a/packages/daemon/src/session-manager.ts +++ b/packages/daemon/src/session-manager.ts @@ -71,7 +71,7 @@ export class SessionManager extends EventEmitter { * * Rejects if a session already exists for this projectDir. * Creates an RpcClient, starts the process, performs the v2 init handshake, - * wires event tracking, and sends '/gsd auto' to begin execution. + * wires event tracking, and sends '/sf auto' to begin execution. */ async startSession(options: StartSessionOptions): Promise { const { projectDir } = options; @@ -140,7 +140,7 @@ export class SessionManager extends EventEmitter { }); // Kick off auto-mode - const command = options.command ?? '/gsd auto'; + const command = options.command ?? '/sf auto'; await client.prompt(command); this.logger.info('session started', { sessionId: session.sessionId, projectDir: resolvedDir }); @@ -278,21 +278,21 @@ export class SessionManager extends EventEmitter { * Resolve the SF CLI path. * * 1. SF_CLI_PATH env var (highest priority) - * 2. `which gsd` → resolve to the actual dist/cli.js + * 2. `which sf` → resolve to the actual dist/cli.js */ static resolveCLIPath(): string { const envPath = process.env['SF_CLI_PATH']; if (envPath) return resolve(envPath); try { - const gsdBin = execSync('which gsd', { encoding: 'utf-8' }).trim(); + const gsdBin = execSync('which sf', { encoding: 'utf-8' }).trim(); if (gsdBin) return resolve(gsdBin); } catch { // which failed } throw new Error( - 'Cannot find SF CLI. Set SF_CLI_PATH environment variable or ensure `gsd` is in PATH.' + 'Cannot find SF CLI. Set SF_CLI_PATH environment variable or ensure `sf` is in PATH.' ); } diff --git a/packages/daemon/src/types.ts b/packages/daemon/src/types.ts index 4fd16c5ea..9db856878 100644 --- a/packages/daemon/src/types.ts +++ b/packages/daemon/src/types.ts @@ -137,7 +137,7 @@ export interface CostAccumulator { // --------------------------------------------------------------------------- /** Marker types detectable by the project scanner */ -export type ProjectMarker = 'git' | 'node' | 'gsd' | 'rust' | 'python' | 'go'; +export type ProjectMarker = 'git' | 'node' | 'sf' | 'rust' | 'python' | 'go'; export interface ProjectInfo { /** Directory name (basename) */ @@ -161,7 +161,7 @@ export interface StartSessionOptions { /** Absolute path to the project directory */ projectDir: string; - /** Command to send after '/gsd auto' (default: none) */ + /** Command to send after '/sf auto' (default: none) */ command?: string; /** Model ID override */ diff --git a/packages/mcp-server/src/cli.ts b/packages/mcp-server/src/cli.ts index aaea896cb..adb96affc 100644 --- a/packages/mcp-server/src/cli.ts +++ b/packages/mcp-server/src/cli.ts @@ -29,7 +29,7 @@ async function main(): Promise { async function cleanup(): Promise { if (cleaningUp) return; cleaningUp = true; - process.stderr.write('[gsd-mcp-server] Shutting down...\n'); + process.stderr.write('[sf-mcp-server] Shutting down...\n'); try { await sessionManager.cleanup(); } catch { @@ -52,10 +52,10 @@ async function main(): Promise { // Connect and start serving try { await server.connect(transport); - process.stderr.write('[gsd-mcp-server] MCP server started on stdio\n'); + process.stderr.write('[sf-mcp-server] MCP server started on stdio\n'); } catch (err) { process.stderr.write( - `[gsd-mcp-server] Fatal: failed to start — ${err instanceof Error ? err.message : String(err)}\n` + `[sf-mcp-server] Fatal: failed to start — ${err instanceof Error ? err.message : String(err)}\n` ); await sessionManager.cleanup(); process.exit(1); @@ -64,7 +64,7 @@ async function main(): Promise { main().catch((err) => { process.stderr.write( - `[gsd-mcp-server] Fatal: ${err instanceof Error ? err.message : String(err)}\n` + `[sf-mcp-server] Fatal: ${err instanceof Error ? err.message : String(err)}\n` ); process.exit(1); }); diff --git a/packages/mcp-server/src/mcp-server.test.ts b/packages/mcp-server/src/mcp-server.test.ts index a0f510d4e..b93d78f92 100644 --- a/packages/mcp-server/src/mcp-server.test.ts +++ b/packages/mcp-server/src/mcp-server.test.ts @@ -172,7 +172,7 @@ class TestableSessionManager extends SessionManager { }); // Kick off auto-mode - const command = options.command ?? '/gsd auto'; + const command = options.command ?? '/sf auto'; await client.prompt(command); return session.sessionId; @@ -227,7 +227,7 @@ describe('SessionManager', () => { }); it('startSession creates session and returns sessionId', async () => { - const sessionId = await sm.startSession('/tmp/test-project', { cliPath: '/usr/bin/gsd' }); + const sessionId = await sm.startSession('/tmp/test-project', { cliPath: '/usr/bin/sf' }); assert.equal(sessionId, 'mock-session-001'); const session = sm.getSession(sessionId); @@ -236,22 +236,22 @@ describe('SessionManager', () => { assert.equal(session.projectDir, resolve('/tmp/test-project')); }); - it('startSession sends /gsd auto by default', async () => { - await sm.startSession('/tmp/test-prompt', { cliPath: '/usr/bin/gsd' }); + it('startSession sends /sf auto by default', async () => { + await sm.startSession('/tmp/test-prompt', { cliPath: '/usr/bin/sf' }); assert.ok(sm.lastClient); - assert.deepEqual(sm.lastClient.prompted, ['/gsd auto']); + assert.deepEqual(sm.lastClient.prompted, ['/sf auto']); }); it('startSession sends custom command when provided', async () => { - await sm.startSession('/tmp/test-cmd', { cliPath: '/usr/bin/gsd', command: '/gsd auto --resume' }); + await sm.startSession('/tmp/test-cmd', { cliPath: '/usr/bin/sf', command: '/sf auto --resume' }); assert.ok(sm.lastClient); - assert.deepEqual(sm.lastClient.prompted, ['/gsd auto --resume']); + assert.deepEqual(sm.lastClient.prompted, ['/sf auto --resume']); }); it('startSession rejects duplicate projectDir', async () => { - await sm.startSession('/tmp/dup-test', { cliPath: '/usr/bin/gsd' }); + await sm.startSession('/tmp/dup-test', { cliPath: '/usr/bin/sf' }); await assert.rejects( - () => sm.startSession('/tmp/dup-test', { cliPath: '/usr/bin/gsd' }), + () => sm.startSession('/tmp/dup-test', { cliPath: '/usr/bin/sf' }), (err: Error) => { assert.ok(err.message.includes('Session already active')); return true; @@ -261,7 +261,7 @@ describe('SessionManager', () => { it('startSession rejects empty projectDir', async () => { await assert.rejects( - () => sm.startSession('', { cliPath: '/usr/bin/gsd' }), + () => sm.startSession('', { cliPath: '/usr/bin/sf' }), (err: Error) => { assert.ok(err.message.includes('projectDir is required')); return true; @@ -273,7 +273,7 @@ describe('SessionManager', () => { sm.nextStartError = new Error('spawn failed'); await assert.rejects( - () => sm.startSession('/tmp/fail-start', { cliPath: '/usr/bin/gsd' }), + () => sm.startSession('/tmp/fail-start', { cliPath: '/usr/bin/sf' }), (err: Error) => { assert.ok(err.message.includes('Failed to start session')); assert.ok(err.message.includes('spawn failed')); @@ -286,7 +286,7 @@ describe('SessionManager', () => { sm.nextInitError = new Error('handshake failed'); await assert.rejects( - () => sm.startSession('/tmp/fail-init', { cliPath: '/usr/bin/gsd' }), + () => sm.startSession('/tmp/fail-init', { cliPath: '/usr/bin/sf' }), (err: Error) => { assert.ok(err.message.includes('Failed to start session')); assert.ok(err.message.includes('handshake failed')); @@ -301,14 +301,14 @@ describe('SessionManager', () => { }); it('getSessionByDir returns session for known dir', async () => { - await sm.startSession('/tmp/by-dir', { cliPath: '/usr/bin/gsd' }); + await sm.startSession('/tmp/by-dir', { cliPath: '/usr/bin/sf' }); const session = sm.getSessionByDir('/tmp/by-dir'); assert.ok(session); assert.equal(session.sessionId, 'mock-session-001'); }); it('resolveBlocker errors when no pending blocker', async () => { - const sessionId = await sm.startSession('/tmp/no-blocker', { cliPath: '/usr/bin/gsd' }); + const sessionId = await sm.startSession('/tmp/no-blocker', { cliPath: '/usr/bin/sf' }); await assert.rejects( () => sm.resolveBlocker(sessionId, 'some response'), (err: Error) => { @@ -329,7 +329,7 @@ describe('SessionManager', () => { }); it('resolveBlocker clears pendingBlocker and sends UI response', async () => { - const sessionId = await sm.startSession('/tmp/blocker-resolve', { cliPath: '/usr/bin/gsd' }); + const sessionId = await sm.startSession('/tmp/blocker-resolve', { cliPath: '/usr/bin/sf' }); const client = sm.lastClient!; // Simulate a blocking UI request event @@ -354,7 +354,7 @@ describe('SessionManager', () => { }); it('cancelSession calls abort + stop on client', async () => { - const sessionId = await sm.startSession('/tmp/cancel-test', { cliPath: '/usr/bin/gsd' }); + const sessionId = await sm.startSession('/tmp/cancel-test', { cliPath: '/usr/bin/sf' }); const client = sm.lastClient!; await sm.cancelSession(sessionId); @@ -377,8 +377,8 @@ describe('SessionManager', () => { }); it('cleanup stops all active sessions', async () => { - await sm.startSession('/tmp/cleanup-1', { cliPath: '/usr/bin/gsd' }); - await sm.startSession('/tmp/cleanup-2', { cliPath: '/usr/bin/gsd' }); + await sm.startSession('/tmp/cleanup-1', { cliPath: '/usr/bin/sf' }); + await sm.startSession('/tmp/cleanup-2', { cliPath: '/usr/bin/sf' }); assert.equal(sm.allClients.length, 2); @@ -390,7 +390,7 @@ describe('SessionManager', () => { }); it('event ring buffer caps at MAX_EVENTS', async () => { - const sessionId = await sm.startSession('/tmp/ring-buffer', { cliPath: '/usr/bin/gsd' }); + const sessionId = await sm.startSession('/tmp/ring-buffer', { cliPath: '/usr/bin/sf' }); const client = sm.lastClient!; for (let i = 0; i < MAX_EVENTS + 20; i++) { @@ -404,7 +404,7 @@ describe('SessionManager', () => { }); it('blocker detection: non-fire-and-forget extension_ui_request sets pendingBlocker', async () => { - const sessionId = await sm.startSession('/tmp/blocker-detect', { cliPath: '/usr/bin/gsd' }); + const sessionId = await sm.startSession('/tmp/blocker-detect', { cliPath: '/usr/bin/sf' }); const client = sm.lastClient!; // 'select' is not in FIRE_AND_FORGET_METHODS @@ -423,7 +423,7 @@ describe('SessionManager', () => { }); it('fire-and-forget methods do not set pendingBlocker', async () => { - const sessionId = await sm.startSession('/tmp/fire-forget', { cliPath: '/usr/bin/gsd' }); + const sessionId = await sm.startSession('/tmp/fire-forget', { cliPath: '/usr/bin/sf' }); const client = sm.lastClient!; // 'notify' is fire-and-forget — on its own (no terminal prefix) should not block @@ -440,7 +440,7 @@ describe('SessionManager', () => { }); it('terminal detection: auto-mode stopped sets status to completed', async () => { - const sessionId = await sm.startSession('/tmp/terminal', { cliPath: '/usr/bin/gsd' }); + const sessionId = await sm.startSession('/tmp/terminal', { cliPath: '/usr/bin/sf' }); const client = sm.lastClient!; client.emitEvent({ @@ -455,7 +455,7 @@ describe('SessionManager', () => { }); it('terminal detection with blocked: message sets status to blocked', async () => { - const sessionId = await sm.startSession('/tmp/terminal-blocked', { cliPath: '/usr/bin/gsd' }); + const sessionId = await sm.startSession('/tmp/terminal-blocked', { cliPath: '/usr/bin/sf' }); const client = sm.lastClient!; client.emitEvent({ @@ -471,7 +471,7 @@ describe('SessionManager', () => { }); it('cost tracking: cumulative-max from cost_update events', async () => { - const sessionId = await sm.startSession('/tmp/cost-track', { cliPath: '/usr/bin/gsd' }); + const sessionId = await sm.startSession('/tmp/cost-track', { cliPath: '/usr/bin/sf' }); const client = sm.lastClient!; client.emitEvent({ @@ -495,7 +495,7 @@ describe('SessionManager', () => { }); it('getResult returns HeadlessJsonResult-shaped object', async () => { - const sessionId = await sm.startSession('/tmp/result-shape', { cliPath: '/usr/bin/gsd' }); + const sessionId = await sm.startSession('/tmp/result-shape', { cliPath: '/usr/bin/sf' }); const result = sm.getResult(sessionId); assert.equal(result.sessionId, sessionId); @@ -539,9 +539,9 @@ describe('SessionManager.resolveCLIPath', () => { }); it('SF_CLI_PATH env var takes precedence', () => { - process.env['SF_CLI_PATH'] = '/custom/path/to/gsd'; + process.env['SF_CLI_PATH'] = '/custom/path/to/sf'; const result = SessionManager.resolveCLIPath(); - assert.equal(result, resolve('/custom/path/to/gsd')); + assert.equal(result, resolve('/custom/path/to/sf')); }); it('throws when SF_CLI_PATH not set and which fails', () => { @@ -585,13 +585,13 @@ describe('createMcpServer tool registration', () => { }); it('gsd_execute flow returns sessionId on success', async () => { - const sessionId = await sm.startSession('/tmp/tool-exec', { cliPath: '/usr/bin/gsd' }); + const sessionId = await sm.startSession('/tmp/tool-exec', { cliPath: '/usr/bin/sf' }); assert.equal(typeof sessionId, 'string'); assert.ok(sessionId.length > 0); }); it('gsd_status flow returns correct shape', async () => { - const sessionId = await sm.startSession('/tmp/tool-status', { cliPath: '/usr/bin/gsd' }); + const sessionId = await sm.startSession('/tmp/tool-status', { cliPath: '/usr/bin/sf' }); const session = sm.getSession(sessionId)!; assert.equal(typeof session.status, 'string'); @@ -601,7 +601,7 @@ describe('createMcpServer tool registration', () => { }); it('gsd_resolve_blocker flow returns error when no blocker', async () => { - const sessionId = await sm.startSession('/tmp/tool-resolve', { cliPath: '/usr/bin/gsd' }); + const sessionId = await sm.startSession('/tmp/tool-resolve', { cliPath: '/usr/bin/sf' }); await assert.rejects( () => sm.resolveBlocker(sessionId, 'fix'), (err: Error) => { @@ -612,7 +612,7 @@ describe('createMcpServer tool registration', () => { }); it('gsd_result flow returns HeadlessJsonResult shape', async () => { - const sessionId = await sm.startSession('/tmp/tool-result', { cliPath: '/usr/bin/gsd' }); + const sessionId = await sm.startSession('/tmp/tool-result', { cliPath: '/usr/bin/sf' }); const result = sm.getResult(sessionId); assert.ok('sessionId' in result); @@ -626,7 +626,7 @@ describe('createMcpServer tool registration', () => { }); it('gsd_cancel flow marks session as cancelled', async () => { - const sessionId = await sm.startSession('/tmp/tool-cancel', { cliPath: '/usr/bin/gsd' }); + const sessionId = await sm.startSession('/tmp/tool-cancel', { cliPath: '/usr/bin/sf' }); await sm.cancelSession(sessionId); const session = sm.getSession(sessionId)!; assert.equal(session.status, 'cancelled'); diff --git a/packages/mcp-server/src/readers/captures.ts b/packages/mcp-server/src/readers/captures.ts index 9c48fd1bd..e184d5dee 100644 --- a/packages/mcp-server/src/readers/captures.ts +++ b/packages/mcp-server/src/readers/captures.ts @@ -86,8 +86,8 @@ export function readCaptures( projectDir: string, filter: 'all' | 'pending' | 'actionable' = 'all', ): CapturesResult { - const gsd = resolveGsdRoot(projectDir); - const capturesPath = resolveRootFile(gsd, 'CAPTURES.md'); + const sf = resolveGsdRoot(projectDir); + const capturesPath = resolveRootFile(sf, 'CAPTURES.md'); if (!existsSync(capturesPath)) { return { captures: [], counts: { total: 0, pending: 0, resolved: 0, actionable: 0 } }; diff --git a/packages/mcp-server/src/readers/doctor-lite.ts b/packages/mcp-server/src/readers/doctor-lite.ts index 8212b7118..ad1197cc0 100644 --- a/packages/mcp-server/src/readers/doctor-lite.ts +++ b/packages/mcp-server/src/readers/doctor-lite.ts @@ -62,7 +62,7 @@ function checkProjectLevel(gsdRoot: string, issues: DoctorIssue[]): void { code: 'missing_state_md', scope: 'project', unitId: '', - message: 'STATE.md is missing — run /gsd status to regenerate', + message: 'STATE.md is missing — run /sf status to regenerate', file: statePath, }); } @@ -192,7 +192,7 @@ export function runDoctorLite(projectDir: string, scope?: string): DoctorResult code: 'no_gsd_directory', scope: 'project', unitId: '', - message: 'No .gsd/ directory found — project not initialized', + message: 'No .sf/ directory found — project not initialized', }], counts: { error: 0, warning: 0, info: 1 }, }; diff --git a/packages/mcp-server/src/readers/graph.test.ts b/packages/mcp-server/src/readers/graph.test.ts index 236e61646..2a6ad70f3 100644 --- a/packages/mcp-server/src/readers/graph.test.ts +++ b/packages/mcp-server/src/readers/graph.test.ts @@ -23,7 +23,7 @@ import type { KnowledgeGraph } from './graph.js'; // --------------------------------------------------------------------------- function tmpProject(): string { - const dir = join(tmpdir(), `gsd-graph-test-${randomBytes(4).toString('hex')}`); + const dir = join(tmpdir(), `sf-graph-test-${randomBytes(4).toString('hex')}`); mkdirSync(dir, { recursive: true }); return dir; } @@ -35,7 +35,7 @@ function writeFixture(base: string, relPath: string, content: string): void { } function makeProjectWithArtifacts(projectDir: string): void { - writeFixture(projectDir, '.gsd/STATE.md', [ + writeFixture(projectDir, '.sf/STATE.md', [ '# SF State', '', '**Active Milestone:** M001: Auth System', @@ -51,7 +51,7 @@ function makeProjectWithArtifacts(projectDir: string): void { 'Execute T01 in S01.', ].join('\n')); - writeFixture(projectDir, '.gsd/KNOWLEDGE.md', [ + writeFixture(projectDir, '.sf/KNOWLEDGE.md', [ '# Project Knowledge', '', '## Rules', @@ -74,7 +74,7 @@ function makeProjectWithArtifacts(projectDir: string): void { '| L001 | CI tests failed | Env diff | Added setup script | testing |', ].join('\n')); - writeFixture(projectDir, '.gsd/milestones/M001/M001-ROADMAP.md', [ + writeFixture(projectDir, '.sf/milestones/M001/M001-ROADMAP.md', [ '# M001: Auth System', '', '## Vision', @@ -88,7 +88,7 @@ function makeProjectWithArtifacts(projectDir: string): void { '| S01 | Login flow | low | — | 🔄 | Users can log in |', ].join('\n')); - writeFixture(projectDir, '.gsd/milestones/M001/slices/S01/S01-PLAN.md', [ + writeFixture(projectDir, '.sf/milestones/M001/slices/S01/S01-PLAN.md', [ '# S01: Login flow', '', '## Tasks', @@ -103,7 +103,7 @@ function makeProjectWithArtifacts(projectDir: string): void { // --------------------------------------------------------------------------- function writeLearningsFixture(projectDir: string, milestoneId: string, content: string): void { - writeFixture(projectDir, `.gsd/milestones/${milestoneId}/${milestoneId}-LEARNINGS.md`, content); + writeFixture(projectDir, `.sf/milestones/${milestoneId}/${milestoneId}-LEARNINGS.md`, content); } const SAMPLE_LEARNINGS = `--- @@ -174,14 +174,14 @@ describe('buildGraph', () => { it('skips unparseable artifact and does not throw', async () => { const badProject = tmpProject(); // Write a corrupt/minimal STATE.md that is technically valid but empty - writeFixture(badProject, '.gsd/STATE.md', 'not valid gsd state at all \0\0\0'); + writeFixture(badProject, '.sf/STATE.md', 'not valid sf state at all \0\0\0'); // Should not throw const graph = await buildGraph(badProject); assert.ok(graph.nodes.length >= 0); rmSync(badProject, { recursive: true, force: true }); }); - it('returns empty graph for project with no .gsd/ directory', async () => { + it('returns empty graph for project with no .sf/ directory', async () => { const emptyProject = tmpProject(); const graph = await buildGraph(emptyProject); assert.ok(graph.nodes.length >= 0); // no throw @@ -215,7 +215,7 @@ describe('buildGraph — LEARNINGS.md parsing', () => { beforeEach(() => { projectDir = tmpProject(); // Create minimal milestone directory so parseMilestoneFiles finds it - mkdirSync(join(projectDir, '.gsd', 'milestones', 'M001'), { recursive: true }); + mkdirSync(join(projectDir, '.sf', 'milestones', 'M001'), { recursive: true }); writeLearningsFixture(projectDir, 'M001', SAMPLE_LEARNINGS); }); @@ -284,7 +284,7 @@ describe('buildGraph — LEARNINGS.md parsing', () => { it('skips LEARNINGS.md gracefully when file is malformed', async () => { const badProject = tmpProject(); - mkdirSync(join(badProject, '.gsd', 'milestones', 'M002'), { recursive: true }); + mkdirSync(join(badProject, '.sf', 'milestones', 'M002'), { recursive: true }); writeLearningsFixture(badProject, 'M002', '\0\0\0 not valid yaml or markdown \0\0\0'); // Must not throw const graph = await buildGraph(badProject); @@ -295,7 +295,7 @@ describe('buildGraph — LEARNINGS.md parsing', () => { it('produces no learning nodes when all sections are empty', async () => { const emptyProject = tmpProject(); - mkdirSync(join(emptyProject, '.gsd', 'milestones', 'M003'), { recursive: true }); + mkdirSync(join(emptyProject, '.sf', 'milestones', 'M003'), { recursive: true }); writeLearningsFixture(emptyProject, 'M003', `--- phase: "M003" phase_name: "Empty" @@ -332,7 +332,7 @@ missing_artifacts: [] it('does not crash when LEARNINGS.md is missing entirely', async () => { const noLearningsProject = tmpProject(); - mkdirSync(join(noLearningsProject, '.gsd', 'milestones', 'M004'), { recursive: true }); + mkdirSync(join(noLearningsProject, '.sf', 'milestones', 'M004'), { recursive: true }); // No LEARNINGS.md file written const graph = await buildGraph(noLearningsProject); assert.ok(graph.nodes.length >= 0); @@ -356,22 +356,22 @@ describe('writeGraph', () => { after(() => rmSync(projectDir, { recursive: true, force: true })); - it('creates graph.json in .gsd/graphs/ after writeGraph()', async () => { - const gsdRoot = join(projectDir, '.gsd'); + it('creates graph.json in .sf/graphs/ after writeGraph()', async () => { + const gsdRoot = join(projectDir, '.sf'); await writeGraph(gsdRoot, graph); const graphPath = join(gsdRoot, 'graphs', 'graph.json'); assert.ok(existsSync(graphPath), `Expected ${graphPath} to exist`); }); it('write is atomic — no temp file remains after writeGraph()', async () => { - const gsdRoot = join(projectDir, '.gsd'); + const gsdRoot = join(projectDir, '.sf'); await writeGraph(gsdRoot, graph); const tmpPath = join(gsdRoot, 'graphs', 'graph.tmp.json'); assert.ok(!existsSync(tmpPath), 'Temp file should not exist after successful write'); }); it('written graph.json is valid JSON with nodes and edges', async () => { - const gsdRoot = join(projectDir, '.gsd'); + const gsdRoot = join(projectDir, '.sf'); await writeGraph(gsdRoot, graph); const raw = readFileSync(join(gsdRoot, 'graphs', 'graph.json'), 'utf-8'); const parsed = JSON.parse(raw) as KnowledgeGraph; @@ -401,7 +401,7 @@ describe('graphStatus', () => { it('returns { exists: true, nodeCount, edgeCount, ageHours } when graph exists', async () => { makeProjectWithArtifacts(projectDir); - const gsdRoot = join(projectDir, '.gsd'); + const gsdRoot = join(projectDir, '.sf'); const graph = await buildGraph(projectDir); await writeGraph(gsdRoot, graph); @@ -415,7 +415,7 @@ describe('graphStatus', () => { it('stale = false for a freshly built graph', async () => { makeProjectWithArtifacts(projectDir); - const gsdRoot = join(projectDir, '.gsd'); + const gsdRoot = join(projectDir, '.sf'); const graph = await buildGraph(projectDir); await writeGraph(gsdRoot, graph); @@ -425,7 +425,7 @@ describe('graphStatus', () => { it('stale = true for a graph older than 24h (builtAt backdated)', async () => { makeProjectWithArtifacts(projectDir); - const gsdRoot = join(projectDir, '.gsd'); + const gsdRoot = join(projectDir, '.sf'); mkdirSync(join(gsdRoot, 'graphs'), { recursive: true }); // Write a graph with a builtAt 25 hours ago @@ -456,7 +456,7 @@ describe('graphQuery', () => { before(async () => { projectDir = tmpProject(); makeProjectWithArtifacts(projectDir); - const gsdRoot = join(projectDir, '.gsd'); + const gsdRoot = join(projectDir, '.sf'); const graph = await buildGraph(projectDir); await writeGraph(gsdRoot, graph); }); @@ -486,7 +486,7 @@ describe('graphQuery', () => { }); it('budget trims AMBIGUOUS edges first', async () => { - const gsdRoot = join(projectDir, '.gsd'); + const gsdRoot = join(projectDir, '.sf'); // Write a graph with mixed confidence edges const mixedGraph: KnowledgeGraph = { builtAt: new Date().toISOString(), @@ -523,7 +523,7 @@ describe('graphDiff', () => { beforeEach(async () => { projectDir = tmpProject(); makeProjectWithArtifacts(projectDir); - const gsdRoot = join(projectDir, '.gsd'); + const gsdRoot = join(projectDir, '.sf'); const graph = await buildGraph(projectDir); await writeGraph(gsdRoot, graph); }); @@ -531,7 +531,7 @@ describe('graphDiff', () => { afterEach(() => rmSync(projectDir, { recursive: true, force: true })); it('returns empty diff when comparing graph to itself (snapshot = current)', async () => { - const gsdRoot = join(projectDir, '.gsd'); + const gsdRoot = join(projectDir, '.sf'); await writeSnapshot(gsdRoot); const diff = await graphDiff(projectDir); assert.ok(Array.isArray(diff.nodes.added)); @@ -542,7 +542,7 @@ describe('graphDiff', () => { }); it('returns added nodes when a new node appears after snapshot', async () => { - const gsdRoot = join(projectDir, '.gsd'); + const gsdRoot = join(projectDir, '.sf'); // Take snapshot of the original graph await writeSnapshot(gsdRoot); @@ -561,7 +561,7 @@ describe('graphDiff', () => { }); it('returns removed nodes when a node disappears after snapshot', async () => { - const gsdRoot = join(projectDir, '.gsd'); + const gsdRoot = join(projectDir, '.sf'); // Create snapshot with a node that won't exist in current graph const snapshotGraph: KnowledgeGraph = { builtAt: new Date().toISOString(), @@ -592,7 +592,7 @@ describe('graphDiff', () => { }); it('writeSnapshot creates .last-build-snapshot.json with snapshotAt', async () => { - const gsdRoot = join(projectDir, '.gsd'); + const gsdRoot = join(projectDir, '.sf'); await writeSnapshot(gsdRoot); const snapshotPath = join(gsdRoot, 'graphs', '.last-build-snapshot.json'); assert.ok(existsSync(snapshotPath)); diff --git a/packages/mcp-server/src/readers/graph.ts b/packages/mcp-server/src/readers/graph.ts index 165fcaeb2..4574a025f 100644 --- a/packages/mcp-server/src/readers/graph.ts +++ b/packages/mcp-server/src/readers/graph.ts @@ -4,7 +4,7 @@ /** * Knowledge Graph for SF projects. * - * Parses .gsd/ artifacts (STATE.md, milestone ROADMAPs, slice PLANs, + * Parses .sf/ artifacts (STATE.md, milestone ROADMAPs, slice PLANs, * KNOWLEDGE.md) into a graph of nodes and edges. Parse errors in any * single artifact are caught and never propagate — the artifact is skipped * and the rest of the graph is returned. @@ -537,7 +537,7 @@ function parseLearningsSection( // --------------------------------------------------------------------------- /** - * Build a KnowledgeGraph by parsing all .gsd/ artifacts. + * Build a KnowledgeGraph by parsing all .sf/ artifacts. * * Parse errors in any single artifact are caught — the artifact is skipped * and never causes buildGraph() to throw. @@ -590,7 +590,7 @@ export async function buildGraph(projectDir: string): Promise { // --------------------------------------------------------------------------- /** - * Write the graph to .gsd/graphs/graph.json atomically. + * Write the graph to .sf/graphs/graph.json atomically. * * Writes to graph.tmp.json first, then renames to graph.json. * Creates the graphs/ directory if it does not exist. diff --git a/packages/mcp-server/src/readers/knowledge.ts b/packages/mcp-server/src/readers/knowledge.ts index b6fe3c85a..89d127ecc 100644 --- a/packages/mcp-server/src/readers/knowledge.ts +++ b/packages/mcp-server/src/readers/knowledge.ts @@ -90,8 +90,8 @@ function parseKnowledgeMarkdown(content: string): KnowledgeEntry[] { // --------------------------------------------------------------------------- export function readKnowledge(projectDir: string): KnowledgeResult { - const gsd = resolveGsdRoot(projectDir); - const knowledgePath = resolveRootFile(gsd, 'KNOWLEDGE.md'); + const sf = resolveGsdRoot(projectDir); + const knowledgePath = resolveRootFile(sf, 'KNOWLEDGE.md'); if (!existsSync(knowledgePath)) { return { entries: [], counts: { rules: 0, patterns: 0, lessons: 0 } }; diff --git a/packages/mcp-server/src/readers/metrics.ts b/packages/mcp-server/src/readers/metrics.ts index f0222eccc..51af8f105 100644 --- a/packages/mcp-server/src/readers/metrics.ts +++ b/packages/mcp-server/src/readers/metrics.ts @@ -72,10 +72,10 @@ function parseMetricsJson(content: string): MetricsUnit[] { // --------------------------------------------------------------------------- export function readHistory(projectDir: string, limit?: number): HistoryResult { - const gsd = resolveGsdRoot(projectDir); + const sf = resolveGsdRoot(projectDir); // metrics.json (primary) - const metricsPath = resolveRootFile(gsd, 'metrics.json'); + const metricsPath = resolveRootFile(sf, 'metrics.json'); let units: MetricsUnit[] = []; if (existsSync(metricsPath)) { diff --git a/packages/mcp-server/src/readers/paths.ts b/packages/mcp-server/src/readers/paths.ts index d8987e2dc..49fa75580 100644 --- a/packages/mcp-server/src/readers/paths.ts +++ b/packages/mcp-server/src/readers/paths.ts @@ -1,4 +1,4 @@ -// SF MCP Server — .gsd/ directory resolution +// SF MCP Server — .sf/ directory resolution // Copyright (c) 2026 Jeremy McSpadden import { existsSync, statSync, readdirSync } from 'node:fs'; @@ -6,19 +6,19 @@ import { join, resolve, dirname, basename } from 'node:path'; import { execFileSync } from 'node:child_process'; /** - * Resolve the .gsd/ root directory for a project. + * Resolve the .sf/ root directory for a project. * * Probes in order: - * 1. projectDir/.gsd (fast path) - * 2. git repo root/.gsd + * 1. projectDir/.sf (fast path) + * 2. git repo root/.sf * 3. Walk up from projectDir - * 4. Fallback: projectDir/.gsd (even if missing — for init) + * 4. Fallback: projectDir/.sf (even if missing — for init) */ export function resolveGsdRoot(projectDir: string): string { const resolved = resolve(projectDir); - // Fast path: .gsd/ in the given directory - const direct = join(resolved, '.gsd'); + // Fast path: .sf/ in the given directory + const direct = join(resolved, '.sf'); if (existsSync(direct) && statSync(direct).isDirectory()) { return direct; } @@ -30,7 +30,7 @@ export function resolveGsdRoot(projectDir: string): string { encoding: 'utf-8', stdio: ['pipe', 'pipe', 'pipe'], }).trim(); - const gitGsd = join(gitRoot, '.gsd'); + const gitGsd = join(gitRoot, '.sf'); if (existsSync(gitGsd) && statSync(gitGsd).isDirectory()) { return gitGsd; } @@ -41,7 +41,7 @@ export function resolveGsdRoot(projectDir: string): string { // Walk up from projectDir let dir = resolved; while (dir !== dirname(dir)) { - const candidate = join(dir, '.gsd'); + const candidate = join(dir, '.sf'); if (existsSync(candidate) && statSync(candidate).isDirectory()) { return candidate; } @@ -52,7 +52,7 @@ export function resolveGsdRoot(projectDir: string): string { return direct; } -/** Resolve path to a .gsd/ root file (STATE.md, KNOWLEDGE.md, etc.) */ +/** Resolve path to a .sf/ root file (STATE.md, KNOWLEDGE.md, etc.) */ export function resolveRootFile(gsdRoot: string, name: string): string { return join(gsdRoot, name); } diff --git a/packages/mcp-server/src/readers/readers.test.ts b/packages/mcp-server/src/readers/readers.test.ts index df7453ed5..902c0a10e 100644 --- a/packages/mcp-server/src/readers/readers.test.ts +++ b/packages/mcp-server/src/readers/readers.test.ts @@ -20,7 +20,7 @@ import { runDoctorLite } from './doctor-lite.js'; // --------------------------------------------------------------------------- function tmpProject(): string { - const dir = join(tmpdir(), `gsd-mcp-test-${randomBytes(4).toString('hex')}`); + const dir = join(tmpdir(), `sf-mcp-test-${randomBytes(4).toString('hex')}`); mkdirSync(dir, { recursive: true }); return dir; } @@ -41,7 +41,7 @@ describe('readProgress', () => { before(() => { projectDir = tmpProject(); - writeFixture(projectDir, '.gsd/STATE.md', `# SF State + writeFixture(projectDir, '.sf/STATE.md', `# SF State **Active Milestone:** M002: Auth System **Active Slice:** S01: Login flow @@ -64,16 +64,16 @@ Execute T02 in S01 — implement token refresh. `); // Create filesystem structure - const m1 = '.gsd/milestones/M001/slices/S01/tasks'; + const m1 = '.sf/milestones/M001/slices/S01/tasks'; writeFixture(projectDir, `${m1}/T01-PLAN.md`, '# T01'); writeFixture(projectDir, `${m1}/T01-SUMMARY.md`, '# T01 done'); - const m2 = '.gsd/milestones/M002/slices/S01/tasks'; + const m2 = '.sf/milestones/M002/slices/S01/tasks'; writeFixture(projectDir, `${m2}/T01-PLAN.md`, '# T01'); writeFixture(projectDir, `${m2}/T01-SUMMARY.md`, '# T01 done'); writeFixture(projectDir, `${m2}/T02-PLAN.md`, '# T02'); - mkdirSync(join(projectDir, '.gsd/milestones/M003'), { recursive: true }); + mkdirSync(join(projectDir, '.sf/milestones/M003'), { recursive: true }); }); after(() => rmSync(projectDir, { recursive: true, force: true })); @@ -126,7 +126,7 @@ Execute T02 in S01 — implement token refresh. assert.ok(result.nextAction.includes('T02')); }); - it('returns defaults for missing .gsd/', () => { + it('returns defaults for missing .sf/', () => { const empty = tmpProject(); const result = readProgress(empty); assert.equal(result.phase, 'unknown'); @@ -145,8 +145,8 @@ describe('readRoadmap', () => { before(() => { projectDir = tmpProject(); - writeFixture(projectDir, '.gsd/milestones/M001/M001-CONTEXT.md', '# M001: Core Setup\n'); - writeFixture(projectDir, '.gsd/milestones/M001/M001-ROADMAP.md', `# M001: Core Setup + writeFixture(projectDir, '.sf/milestones/M001/M001-CONTEXT.md', '# M001: Core Setup\n'); + writeFixture(projectDir, '.sf/milestones/M001/M001-ROADMAP.md', `# M001: Core Setup ## Vision @@ -160,27 +160,27 @@ Build the foundation for the project. | S02 | API endpoints | medium | S01 | 🟫 | REST API live | `); - writeFixture(projectDir, '.gsd/milestones/M001/slices/S01/S01-PLAN.md', `# S01: Database schema + writeFixture(projectDir, '.sf/milestones/M001/slices/S01/S01-PLAN.md', `# S01: Database schema ## Tasks - [x] **T01: Create migrations** — Set up schema - [x] **T02: Seed data** — Initial seed `); - writeFixture(projectDir, '.gsd/milestones/M001/slices/S01/tasks/T01-PLAN.md', '# T01'); - writeFixture(projectDir, '.gsd/milestones/M001/slices/S01/tasks/T01-SUMMARY.md', '# T01 done'); - writeFixture(projectDir, '.gsd/milestones/M001/slices/S01/tasks/T02-PLAN.md', '# T02'); - writeFixture(projectDir, '.gsd/milestones/M001/slices/S01/tasks/T02-SUMMARY.md', '# T02 done'); + writeFixture(projectDir, '.sf/milestones/M001/slices/S01/tasks/T01-PLAN.md', '# T01'); + writeFixture(projectDir, '.sf/milestones/M001/slices/S01/tasks/T01-SUMMARY.md', '# T01 done'); + writeFixture(projectDir, '.sf/milestones/M001/slices/S01/tasks/T02-PLAN.md', '# T02'); + writeFixture(projectDir, '.sf/milestones/M001/slices/S01/tasks/T02-SUMMARY.md', '# T02 done'); - writeFixture(projectDir, '.gsd/milestones/M001/slices/S02/S02-PLAN.md', `# S02: API endpoints + writeFixture(projectDir, '.sf/milestones/M001/slices/S02/S02-PLAN.md', `# S02: API endpoints ## Tasks - [ ] **T01: Auth routes** — Implement auth - [ ] **T02: User routes** — CRUD users `); - writeFixture(projectDir, '.gsd/milestones/M001/slices/S02/tasks/T01-PLAN.md', '# T01'); - writeFixture(projectDir, '.gsd/milestones/M001/slices/S02/tasks/T02-PLAN.md', '# T02'); + writeFixture(projectDir, '.sf/milestones/M001/slices/S02/tasks/T01-PLAN.md', '# T01'); + writeFixture(projectDir, '.sf/milestones/M001/slices/S02/tasks/T02-PLAN.md', '# T02'); }); after(() => rmSync(projectDir, { recursive: true, force: true })); @@ -235,7 +235,7 @@ describe('readHistory', () => { before(() => { projectDir = tmpProject(); - writeFixture(projectDir, '.gsd/metrics.json', JSON.stringify({ + writeFixture(projectDir, '.sf/metrics.json', JSON.stringify({ version: 1, projectStartedAt: 1700000000000, units: [ @@ -288,7 +288,7 @@ describe('readHistory', () => { it('returns empty for missing metrics', () => { const empty = tmpProject(); - mkdirSync(join(empty, '.gsd'), { recursive: true }); + mkdirSync(join(empty, '.sf'), { recursive: true }); const result = readHistory(empty); assert.equal(result.entries.length, 0); assert.equal(result.totals.units, 0); @@ -305,7 +305,7 @@ describe('readCaptures', () => { before(() => { projectDir = tmpProject(); - writeFixture(projectDir, '.gsd/CAPTURES.md', `# Captures + writeFixture(projectDir, '.sf/CAPTURES.md', `# Captures ### CAP-aaa11111 @@ -365,7 +365,7 @@ describe('readCaptures', () => { it('returns empty for missing CAPTURES.md', () => { const empty = tmpProject(); - mkdirSync(join(empty, '.gsd'), { recursive: true }); + mkdirSync(join(empty, '.sf'), { recursive: true }); const result = readCaptures(empty); assert.equal(result.captures.length, 0); rmSync(empty, { recursive: true, force: true }); @@ -381,7 +381,7 @@ describe('readKnowledge', () => { before(() => { projectDir = tmpProject(); - writeFixture(projectDir, '.gsd/KNOWLEDGE.md', `# Project Knowledge + writeFixture(projectDir, '.sf/KNOWLEDGE.md', `# Project Knowledge ## Rules @@ -429,7 +429,7 @@ describe('readKnowledge', () => { it('returns empty for missing KNOWLEDGE.md', () => { const empty = tmpProject(); - mkdirSync(join(empty, '.gsd'), { recursive: true }); + mkdirSync(join(empty, '.sf'), { recursive: true }); const result = readKnowledge(empty); assert.equal(result.entries.length, 0); rmSync(empty, { recursive: true, force: true }); @@ -447,24 +447,24 @@ describe('runDoctorLite', () => { projectDir = tmpProject(); // M001: complete milestone (has summary) - writeFixture(projectDir, '.gsd/PROJECT.md', '# Test Project'); - writeFixture(projectDir, '.gsd/STATE.md', '# SF State'); - writeFixture(projectDir, '.gsd/milestones/M001/M001-CONTEXT.md', '# M001'); - writeFixture(projectDir, '.gsd/milestones/M001/M001-ROADMAP.md', '# Roadmap'); - writeFixture(projectDir, '.gsd/milestones/M001/M001-SUMMARY.md', '# Done'); - writeFixture(projectDir, '.gsd/milestones/M001/slices/S01/S01-PLAN.md', '# Plan'); - writeFixture(projectDir, '.gsd/milestones/M001/slices/S01/tasks/T01-PLAN.md', '# T01'); - writeFixture(projectDir, '.gsd/milestones/M001/slices/S01/tasks/T01-SUMMARY.md', '# T01 done'); + writeFixture(projectDir, '.sf/PROJECT.md', '# Test Project'); + writeFixture(projectDir, '.sf/STATE.md', '# SF State'); + writeFixture(projectDir, '.sf/milestones/M001/M001-CONTEXT.md', '# M001'); + writeFixture(projectDir, '.sf/milestones/M001/M001-ROADMAP.md', '# Roadmap'); + writeFixture(projectDir, '.sf/milestones/M001/M001-SUMMARY.md', '# Done'); + writeFixture(projectDir, '.sf/milestones/M001/slices/S01/S01-PLAN.md', '# Plan'); + writeFixture(projectDir, '.sf/milestones/M001/slices/S01/tasks/T01-PLAN.md', '# T01'); + writeFixture(projectDir, '.sf/milestones/M001/slices/S01/tasks/T01-SUMMARY.md', '# T01 done'); // M002: incomplete — has all tasks done but no SUMMARY - writeFixture(projectDir, '.gsd/milestones/M002/M002-CONTEXT.md', '# M002'); - writeFixture(projectDir, '.gsd/milestones/M002/M002-ROADMAP.md', '# Roadmap'); - writeFixture(projectDir, '.gsd/milestones/M002/slices/S01/S01-PLAN.md', '# Plan'); - writeFixture(projectDir, '.gsd/milestones/M002/slices/S01/tasks/T01-PLAN.md', '# T01'); - writeFixture(projectDir, '.gsd/milestones/M002/slices/S01/tasks/T01-SUMMARY.md', '# T01 done'); + writeFixture(projectDir, '.sf/milestones/M002/M002-CONTEXT.md', '# M002'); + writeFixture(projectDir, '.sf/milestones/M002/M002-ROADMAP.md', '# Roadmap'); + writeFixture(projectDir, '.sf/milestones/M002/slices/S01/S01-PLAN.md', '# Plan'); + writeFixture(projectDir, '.sf/milestones/M002/slices/S01/tasks/T01-PLAN.md', '# T01'); + writeFixture(projectDir, '.sf/milestones/M002/slices/S01/tasks/T01-SUMMARY.md', '# T01 done'); // M003: empty — no context, no slices - mkdirSync(join(projectDir, '.gsd/milestones/M003'), { recursive: true }); + mkdirSync(join(projectDir, '.sf/milestones/M003'), { recursive: true }); }); after(() => rmSync(projectDir, { recursive: true, force: true })); @@ -492,14 +492,14 @@ describe('runDoctorLite', () => { it('returns ok:true for healthy project', () => { const healthy = tmpProject(); - writeFixture(healthy, '.gsd/PROJECT.md', '# Project'); - writeFixture(healthy, '.gsd/STATE.md', '# State'); + writeFixture(healthy, '.sf/PROJECT.md', '# Project'); + writeFixture(healthy, '.sf/STATE.md', '# State'); const result = runDoctorLite(healthy); assert.equal(result.ok, true); rmSync(healthy, { recursive: true, force: true }); }); - it('handles missing .gsd/ gracefully', () => { + it('handles missing .sf/ gracefully', () => { const empty = tmpProject(); const result = runDoctorLite(empty); assert.equal(result.ok, true); diff --git a/packages/mcp-server/src/readers/roadmap.ts b/packages/mcp-server/src/readers/roadmap.ts index 74b47e883..83ca3f888 100644 --- a/packages/mcp-server/src/readers/roadmap.ts +++ b/packages/mcp-server/src/readers/roadmap.ts @@ -182,8 +182,8 @@ function readVision(gsdRoot: string, mid: string): string { // --------------------------------------------------------------------------- export function readRoadmap(projectDir: string, filterMilestoneId?: string): RoadmapResult { - const gsd = resolveGsdRoot(projectDir); - let milestoneIds = findMilestoneIds(gsd); + const sf = resolveGsdRoot(projectDir); + let milestoneIds = findMilestoneIds(sf); if (filterMilestoneId) { milestoneIds = milestoneIds.filter((id) => id === filterMilestoneId); @@ -192,19 +192,19 @@ export function readRoadmap(projectDir: string, filterMilestoneId?: string): Roa const milestones: MilestoneInfo[] = []; for (const mid of milestoneIds) { - const title = readMilestoneTitle(gsd, mid); - const vision = readVision(gsd, mid); + const title = readMilestoneTitle(sf, mid); + const vision = readVision(sf, mid); - const summaryPath = resolveMilestoneFile(gsd, mid, 'SUMMARY'); + const summaryPath = resolveMilestoneFile(sf, mid, 'SUMMARY'); const hasSummary = summaryPath !== null && existsSync(summaryPath); - const roadmapPath = resolveMilestoneFile(gsd, mid, 'ROADMAP'); + const roadmapPath = resolveMilestoneFile(sf, mid, 'ROADMAP'); let roadmapSlices: ReturnType = []; if (roadmapPath && existsSync(roadmapPath)) { roadmapSlices = parseRoadmapTable(readFileSync(roadmapPath, 'utf-8')); } - const fsSliceIds = findSliceIds(gsd, mid); + const fsSliceIds = findSliceIds(sf, mid); const sliceIdSet = new Set([ ...roadmapSlices.map((s) => s.id), ...fsSliceIds, @@ -213,9 +213,9 @@ export function readRoadmap(projectDir: string, filterMilestoneId?: string): Roa const slices: SliceInfo[] = []; for (const sid of Array.from(sliceIdSet).sort()) { const roadmapEntry = roadmapSlices.find((s) => s.id === sid); - const taskFiles = findTaskFiles(gsd, mid, sid); + const taskFiles = findTaskFiles(sf, mid, sid); - const planPath = resolveSliceFile(gsd, mid, sid, 'PLAN'); + const planPath = resolveSliceFile(sf, mid, sid, 'PLAN'); let planTasks: ReturnType = []; if (planPath && existsSync(planPath)) { planTasks = parseSlicePlanTasks(readFileSync(planPath, 'utf-8')); diff --git a/packages/mcp-server/src/readers/state.ts b/packages/mcp-server/src/readers/state.ts index 309ea76c9..beb2d8df1 100644 --- a/packages/mcp-server/src/readers/state.ts +++ b/packages/mcp-server/src/readers/state.ts @@ -158,8 +158,8 @@ function countSlicesAndTasks(gsdRoot: string, milestoneIds: string[]): { // --------------------------------------------------------------------------- export function readProgress(projectDir: string): ProgressResult { - const gsd = resolveGsdRoot(projectDir); - const statePath = resolveRootFile(gsd, 'STATE.md'); + const sf = resolveGsdRoot(projectDir); + const statePath = resolveRootFile(sf, 'STATE.md'); // Defaults const result: ProgressResult = { @@ -177,10 +177,10 @@ export function readProgress(projectDir: string): ProgressResult { if (!existsSync(statePath)) { // No STATE.md — derive from filesystem only - const milestoneIds = findMilestoneIds(gsd); + const milestoneIds = findMilestoneIds(sf); result.milestones.total = milestoneIds.length; result.milestones.pending = milestoneIds.length; - const counts = countSlicesAndTasks(gsd, milestoneIds); + const counts = countSlicesAndTasks(sf, milestoneIds); result.slices = counts.slices; result.tasks = counts.tasks; return result; @@ -208,14 +208,14 @@ export function readProgress(projectDir: string): ProgressResult { result.milestones.done - result.milestones.active - result.milestones.parked; } else { // Fallback: count directories - const milestoneIds = findMilestoneIds(gsd); + const milestoneIds = findMilestoneIds(sf); result.milestones.total = milestoneIds.length; result.milestones.pending = milestoneIds.length; } // Slice/task counts from filesystem - const milestoneIds = findMilestoneIds(gsd); - const counts = countSlicesAndTasks(gsd, milestoneIds); + const milestoneIds = findMilestoneIds(sf); + const counts = countSlicesAndTasks(sf, milestoneIds); result.slices = counts.slices; result.tasks = counts.tasks; diff --git a/packages/mcp-server/src/server.ts b/packages/mcp-server/src/server.ts index 373b689f2..0d8799d44 100644 --- a/packages/mcp-server/src/server.ts +++ b/packages/mcp-server/src/server.ts @@ -31,7 +31,7 @@ import { applySecrets, checkExistingEnvKeys, detectDestination } from './env-wri // --------------------------------------------------------------------------- const MCP_PKG = '@modelcontextprotocol/sdk'; -const SERVER_NAME = 'gsd'; +const SERVER_NAME = 'sf'; const SERVER_VERSION = '2.53.0'; // --------------------------------------------------------------------------- @@ -82,7 +82,7 @@ function normalizeQuery(query: string | undefined): QueryCategory { } async function readProjectState(projectDir: string, query: string | undefined): Promise> { - const gsdDir = join(resolve(projectDir), '.gsd'); + const gsdDir = join(resolve(projectDir), '.sf'); const category = normalizeQuery(query); const wanted = new Set(QUERY_FIELDS[category]); @@ -367,7 +367,7 @@ export async function createMcpServer(sessionManager: SessionManager): Promise<{ 'Start a SF auto-mode session for a project directory. Returns a sessionId for tracking.', { projectDir: z.string().describe('Absolute path to the project directory'), - command: z.string().optional().describe('Command to send (default: "/gsd auto")'), + command: z.string().optional().describe('Command to send (default: "/sf auto")'), model: z.string().optional().describe('Model ID override'), bare: z.boolean().optional().describe('Run in bare mode (skip user config)'), }, @@ -689,7 +689,7 @@ export async function createMcpServer(sessionManager: SessionManager): Promise<{ // ----------------------------------------------------------------------- server.tool( 'gsd_progress', - 'Get structured project progress: active milestone/slice/task, phase, completion counts, blockers, and next action. No session required — reads directly from .gsd/ on disk.', + 'Get structured project progress: active milestone/slice/task, phase, completion counts, blockers, and next action. No session required — reads directly from .sf/ on disk.', { projectDir: z.string().describe('Absolute path to the project directory'), }, @@ -748,7 +748,7 @@ export async function createMcpServer(sessionManager: SessionManager): Promise<{ // ----------------------------------------------------------------------- server.tool( 'gsd_doctor', - 'Run a lightweight structural health check on the .gsd/ directory. Checks for missing files, status inconsistencies, and orphaned state. No session required.', + 'Run a lightweight structural health check on the .sf/ directory. Checks for missing files, status inconsistencies, and orphaned state. No session required.', { projectDir: z.string().describe('Absolute path to the project directory'), scope: z.string().optional().describe('Limit checks to a specific milestone (e.g. "M001")'), @@ -806,7 +806,7 @@ export async function createMcpServer(sessionManager: SessionManager): Promise<{ // gsd_graph — knowledge graph for SF projects // // Modes: - // build Parse .gsd/ artifacts and write graph.json atomically. + // build Parse .sf/ artifacts and write graph.json atomically. // query Search the graph for nodes matching a term (BFS, budget-trimmed). // status Check whether graph.json exists and whether it is stale (>24h). // diff Compare graph.json with the last build snapshot. @@ -817,8 +817,8 @@ export async function createMcpServer(sessionManager: SessionManager): Promise<{ 'Manage the SF project knowledge graph. No session required.', '', 'Modes:', - ' build Parse .gsd/ artifacts (STATE.md, milestone ROADMAPs, slice PLANs,', - ' KNOWLEDGE.md) and write .gsd/graphs/graph.json atomically.', + ' build Parse .sf/ artifacts (STATE.md, milestone ROADMAPs, slice PLANs,', + ' KNOWLEDGE.md) and write .sf/graphs/graph.json atomically.', ' query Search graph nodes by term (BFS from seed matches, budget-trimmed).', ' Returns matching nodes and reachable edges within the token budget.', ' status Show whether graph.json exists, its age, node/edge counts, and', diff --git a/packages/mcp-server/src/session-manager.ts b/packages/mcp-server/src/session-manager.ts index 88341eb80..9dd2eb013 100644 --- a/packages/mcp-server/src/session-manager.ts +++ b/packages/mcp-server/src/session-manager.ts @@ -60,7 +60,7 @@ export class SessionManager { * * Rejects if a session already exists for this projectDir. * Creates an RpcClient, starts the process, performs the v2 init handshake, - * wires event tracking, and sends '/gsd auto' to begin execution. + * wires event tracking, and sends '/sf auto' to begin execution. */ async startSession(projectDir: string, options: ExecuteOptions = {}): Promise { if (!projectDir || projectDir.trim() === '') { @@ -125,7 +125,7 @@ export class SessionManager { }); // Kick off auto-mode - const command = options.command ?? '/gsd auto'; + const command = options.command ?? '/sf auto'; await client.prompt(command); return session.sessionId; @@ -240,18 +240,18 @@ export class SessionManager { * Resolve the SF CLI path. * * 1. SF_CLI_PATH env var (highest priority) - * 2. `which gsd` → resolve to the actual dist/cli.js + * 2. `which sf` → resolve to the actual dist/cli.js */ static resolveCLIPath(): string { // Check env var first const envPath = process.env['SF_CLI_PATH']; if (envPath) return resolve(envPath); - // Fallback: locate `gsd` via which + // Fallback: locate `sf` via which try { - const gsdBin = execSync('which gsd', { encoding: 'utf-8' }).trim(); + const gsdBin = execSync('which sf', { encoding: 'utf-8' }).trim(); if (gsdBin) { - // gsd bin is typically a symlink to dist/loader.js — return the resolved path + // sf bin is typically a symlink to dist/loader.js — return the resolved path return resolve(gsdBin); } } catch { @@ -259,7 +259,7 @@ export class SessionManager { } throw new Error( - 'Cannot find SF CLI. Set SF_CLI_PATH environment variable or ensure `gsd` is in PATH.' + 'Cannot find SF CLI. Set SF_CLI_PATH environment variable or ensure `sf` is in PATH.' ); } diff --git a/packages/mcp-server/src/tool-credentials.test.ts b/packages/mcp-server/src/tool-credentials.test.ts index 573f93c0d..0827ee52a 100644 --- a/packages/mcp-server/src/tool-credentials.test.ts +++ b/packages/mcp-server/src/tool-credentials.test.ts @@ -8,7 +8,7 @@ import { loadStoredCredentialEnvKeys, resolveAuthPath } from "./tool-credentials describe("tool credentials", () => { it("hydrates supported model and tool keys from auth.json", () => { - const tempRoot = mkdtempSync(join(tmpdir(), "gsd-mcp-auth-")); + const tempRoot = mkdtempSync(join(tmpdir(), "sf-mcp-auth-")); const authPath = join(tempRoot, "auth.json"); const env: NodeJS.ProcessEnv = {}; @@ -37,7 +37,7 @@ describe("tool credentials", () => { }); it("does not overwrite explicit environment variables", () => { - const tempRoot = mkdtempSync(join(tmpdir(), "gsd-mcp-auth-")); + const tempRoot = mkdtempSync(join(tmpdir(), "sf-mcp-auth-")); const authPath = join(tempRoot, "auth.json"); const env: NodeJS.ProcessEnv = { BRAVE_API_KEY: "already-set", @@ -59,7 +59,7 @@ describe("tool credentials", () => { }); it("ignores oauth credentials because they are resolved through auth storage, not env hydration", () => { - const tempRoot = mkdtempSync(join(tmpdir(), "gsd-mcp-auth-")); + const tempRoot = mkdtempSync(join(tmpdir(), "sf-mcp-auth-")); const authPath = join(tempRoot, "auth.json"); const env: NodeJS.ProcessEnv = {}; @@ -79,7 +79,7 @@ describe("tool credentials", () => { }); it("resolves auth.json from SF_CODING_AGENT_DIR", () => { - const tempRoot = mkdtempSync(join(tmpdir(), "gsd-mcp-agent-dir-")); + const tempRoot = mkdtempSync(join(tmpdir(), "sf-mcp-agent-dir-")); const agentDir = join(tempRoot, "agent"); mkdirSync(agentDir, { recursive: true }); diff --git a/packages/mcp-server/src/tool-credentials.ts b/packages/mcp-server/src/tool-credentials.ts index b9d05d970..d9f2821b8 100644 --- a/packages/mcp-server/src/tool-credentials.ts +++ b/packages/mcp-server/src/tool-credentials.ts @@ -63,7 +63,7 @@ function getStoredApiKey(data: AuthStorageData, providerId: string): string | un export function resolveAuthPath(env: NodeJS.ProcessEnv = process.env): string { const agentDir = env.SF_CODING_AGENT_DIR?.trim(); if (agentDir) return join(expandHome(agentDir), "auth.json"); - return join(homedir(), ".gsd", "agent", "auth.json"); + return join(homedir(), ".sf", "agent", "auth.json"); } export function loadStoredCredentialEnvKeys(options: { diff --git a/packages/mcp-server/src/types.ts b/packages/mcp-server/src/types.ts index 26ac37b25..dc5588259 100644 --- a/packages/mcp-server/src/types.ts +++ b/packages/mcp-server/src/types.ts @@ -83,7 +83,7 @@ export interface CostAccumulator { // --------------------------------------------------------------------------- export interface ExecuteOptions { - /** Command to send after '/gsd auto' (default: none) */ + /** Command to send after '/sf auto' (default: none) */ command?: string; /** Model ID override */ diff --git a/packages/mcp-server/src/workflow-tools.test.ts b/packages/mcp-server/src/workflow-tools.test.ts index 5565b964e..272529d7f 100644 --- a/packages/mcp-server/src/workflow-tools.test.ts +++ b/packages/mcp-server/src/workflow-tools.test.ts @@ -9,8 +9,8 @@ import { _getAdapter, closeDatabase } from "../../../src/resources/extensions/sf import { registerWorkflowTools, WORKFLOW_TOOL_NAMES } from "./workflow-tools.ts"; function makeTmpBase(): string { - const base = join(tmpdir(), `gsd-mcp-workflow-${randomUUID()}`); - mkdirSync(join(base, ".gsd"), { recursive: true }); + const base = join(tmpdir(), `sf-mcp-workflow-${randomUUID()}`); + mkdirSync(join(base, ".sf"), { recursive: true }); return base; } @@ -31,9 +31,9 @@ function writeWriteGateSnapshot( base: string, snapshot: { verifiedDepthMilestones?: string[]; activeQueuePhase?: boolean; pendingGateId?: string | null }, ): void { - mkdirSync(join(base, ".gsd", "runtime"), { recursive: true }); + mkdirSync(join(base, ".sf", "runtime"), { recursive: true }); writeFileSync( - join(base, ".gsd", "runtime", "write-gate-state.json"), + join(base, ".sf", "runtime", "write-gate-state.json"), JSON.stringify( { verifiedDepthMilestones: snapshot.verifiedDepthMilestones ?? [], @@ -97,7 +97,7 @@ describe("workflow MCP tools", () => { assert.match(text, /Saved SUMMARY artifact/); assert.equal(process.cwd(), originalCwd, "workflow MCP tools should not mutate process.cwd"); assert.ok( - existsSync(join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-SUMMARY.md")), + existsSync(join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-SUMMARY.md")), "summary file should exist on disk", ); } finally { @@ -178,9 +178,9 @@ describe("workflow MCP tools", () => { it("blocks workflow mutation tools while a discussion gate is pending", async () => { const base = makeTmpBase(); try { - mkdirSync(join(base, ".gsd", "milestones", "M001", "slices", "S01"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M001", "slices", "S01"), { recursive: true }); writeFileSync( - join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-PLAN.md"), + join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-PLAN.md"), "# S01\n\n- [ ] **T01: Demo** `est:5m`\n", ); writeWriteGateSnapshot(base, { pendingGateId: "depth_verification_M001_confirm" }); @@ -211,9 +211,9 @@ describe("workflow MCP tools", () => { it("blocks workflow mutation tools during queue mode", async () => { const base = makeTmpBase(); try { - mkdirSync(join(base, ".gsd", "milestones", "M001", "slices", "S01"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M001", "slices", "S01"), { recursive: true }); writeFileSync( - join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-PLAN.md"), + join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-PLAN.md"), "# S01\n\n- [ ] **T01: Demo** `est:5m`\n", ); writeWriteGateSnapshot(base, { activeQueuePhase: true }); @@ -244,9 +244,9 @@ describe("workflow MCP tools", () => { it("gsd_task_complete and gsd_milestone_status work end-to-end", async () => { const base = makeTmpBase(); try { - mkdirSync(join(base, ".gsd", "milestones", "M001", "slices", "S01"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M001", "slices", "S01"), { recursive: true }); writeFileSync( - join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-PLAN.md"), + join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-PLAN.md"), "# S01\n\n- [ ] **T01: Demo** `est:5m`\n", ); @@ -269,7 +269,7 @@ describe("workflow MCP tools", () => { assert.match((taskResult as any).content[0].text as string, /Completed task T01/); assert.ok( - existsSync(join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks", "T01-SUMMARY.md")), + existsSync(join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks", "T01-SUMMARY.md")), "task summary should be written to disk", ); @@ -289,9 +289,9 @@ describe("workflow MCP tools", () => { it("gsd_complete_task alias delegates to gsd_task_complete behavior", async () => { const base = makeTmpBase(); try { - mkdirSync(join(base, ".gsd", "milestones", "M002", "slices", "S02"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M002", "slices", "S02"), { recursive: true }); writeFileSync( - join(base, ".gsd", "milestones", "M002", "slices", "S02", "S02-PLAN.md"), + join(base, ".sf", "milestones", "M002", "slices", "S02", "S02-PLAN.md"), "# S02\n\n- [ ] **T02: Demo** `est:5m`\n", ); @@ -312,7 +312,7 @@ describe("workflow MCP tools", () => { assert.match((result as any).content[0].text as string, /Completed task T02/); assert.ok( - existsSync(join(base, ".gsd", "milestones", "M002", "slices", "S02", "tasks", "T02-SUMMARY.md")), + existsSync(join(base, ".sf", "milestones", "M002", "slices", "S02", "tasks", "T02-SUMMARY.md")), "alias should write task summary to disk", ); } finally { @@ -372,11 +372,11 @@ describe("workflow MCP tools", () => { }); assert.match((sliceResult as any).content[0].text as string, /Planned slice S01/); assert.ok( - existsSync(join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-PLAN.md")), + existsSync(join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-PLAN.md")), "slice plan should exist on disk", ); assert.ok( - existsSync(join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks", "T01-PLAN.md")), + existsSync(join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks", "T01-PLAN.md")), "task plan should exist on disk", ); } finally { @@ -406,7 +406,7 @@ describe("workflow MCP tools", () => { }); assert.match((result as any).content[0].text as string, /Saved requirement R\d+/); - assert.ok(existsSync(join(base, ".gsd", "REQUIREMENTS.md")), "REQUIREMENTS.md should be written to disk"); + assert.ok(existsSync(join(base, ".sf", "REQUIREMENTS.md")), "REQUIREMENTS.md should be written to disk"); const row = _getAdapter()! .prepare("SELECT id, class, description FROM requirements WHERE description = ?") .get("Inline MCP requirement save regression") as Record | undefined; @@ -486,7 +486,7 @@ describe("workflow MCP tools", () => { assert.match((result as any).content[0].text as string, /Planned task T11/); assert.ok( - existsSync(join(base, ".gsd", "milestones", "M010", "slices", "S10", "tasks", "T11-PLAN.md")), + existsSync(join(base, ".sf", "milestones", "M010", "slices", "S10", "tasks", "T11-PLAN.md")), "T11 plan should be written after reopening the DB", ); } finally { @@ -624,11 +624,11 @@ describe("workflow MCP tools", () => { }); assert.match((aliasResult as any).content[0].text as string, /Replanned slice S09/); assert.ok( - existsSync(join(base, ".gsd", "milestones", "M099", "slices", "S09", "S09-REPLAN.md")), + existsSync(join(base, ".sf", "milestones", "M099", "slices", "S09", "S09-REPLAN.md")), "replan artifact should exist on disk", ); assert.ok( - existsSync(join(base, ".gsd", "milestones", "M099", "slices", "S09", "S09-PLAN.md")), + existsSync(join(base, ".sf", "milestones", "M099", "slices", "S09", "S09-PLAN.md")), "updated plan should exist on disk", ); const removedTask = _getAdapter()!.prepare( @@ -776,11 +776,11 @@ describe("workflow MCP tools", () => { }); assert.match((aliasResult as any).content[0].text as string, /Completed slice S04/); assert.ok( - existsSync(join(base, ".gsd", "milestones", "M004", "slices", "S04", "S04-SUMMARY.md")), + existsSync(join(base, ".sf", "milestones", "M004", "slices", "S04", "S04-SUMMARY.md")), "alias should write slice summary to disk", ); assert.ok( - existsSync(join(base, ".gsd", "milestones", "M004", "slices", "S04", "S04-UAT.md")), + existsSync(join(base, ".sf", "milestones", "M004", "slices", "S04", "S04-UAT.md")), "alias should write slice UAT to disk", ); } finally { @@ -887,11 +887,11 @@ describe("workflow MCP tools", () => { }); assert.match((completionResult as any).content[0].text as string, /Completed milestone M005/); assert.ok( - existsSync(join(base, ".gsd", "milestones", "M005", "M005-VALIDATION.md")), + existsSync(join(base, ".sf", "milestones", "M005", "M005-VALIDATION.md")), "validation artifact should exist on disk", ); assert.ok( - existsSync(join(base, ".gsd", "milestones", "M005", "M005-SUMMARY.md")), + existsSync(join(base, ".sf", "milestones", "M005", "M005-SUMMARY.md")), "milestone summary should exist on disk", ); } finally { @@ -1051,11 +1051,11 @@ describe("workflow MCP tools", () => { }); assert.match((reassessAliasResult as any).content[0].text as string, /Reassessed roadmap for milestone M006 after S06/); assert.ok( - existsSync(join(base, ".gsd", "milestones", "M006", "slices", "S06", "S06-ASSESSMENT.md")), + existsSync(join(base, ".sf", "milestones", "M006", "slices", "S06", "S06-ASSESSMENT.md")), "assessment artifact should exist on disk", ); assert.ok( - existsSync(join(base, ".gsd", "milestones", "M006", "M006-ROADMAP.md")), + existsSync(join(base, ".sf", "milestones", "M006", "M006-ROADMAP.md")), "roadmap artifact should exist on disk", ); } finally { diff --git a/packages/mcp-server/src/workflow-tools.ts b/packages/mcp-server/src/workflow-tools.ts index eda1ce8ba..efbc1b41a 100644 --- a/packages/mcp-server/src/workflow-tools.ts +++ b/packages/mcp-server/src/workflow-tools.ts @@ -657,7 +657,7 @@ async function handleSaveGateResult( async function ensureMilestoneDbRow(milestoneId: string): Promise { try { - const { insertMilestone } = await importLocalModule("../../../src/resources/extensions/sf/gsd-db.js"); + const { insertMilestone } = await importLocalModule("../../../src/resources/extensions/sf/sf-db.js"); insertMilestone({ id: milestoneId, status: "queued" }); } catch { // Ignore pre-existing rows or transient DB availability issues. @@ -1249,7 +1249,7 @@ export function registerWorkflowTools(server: McpToolServer): void { const { projectDir, milestoneId, sliceId, reason } = parseWorkflowArgs(skipSliceSchema, args); await enforceWorkflowWriteGate("gsd_skip_slice", projectDir, milestoneId); await runSerializedWorkflowDbOperation(projectDir, async () => { - const { getSlice, updateSliceStatus } = await importLocalModule("../../../src/resources/extensions/sf/gsd-db.js"); + const { getSlice, updateSliceStatus } = await importLocalModule("../../../src/resources/extensions/sf/sf-db.js"); const { invalidateStateCache } = await importLocalModule("../../../src/resources/extensions/sf/state.js"); const { rebuildState } = await importLocalModule("../../../src/resources/extensions/sf/doctor.js"); const slice = getSlice(milestoneId, sliceId); diff --git a/packages/pi-coding-agent/src/core/agent-session-print-mode-persist.test.ts b/packages/pi-coding-agent/src/core/agent-session-print-mode-persist.test.ts index 7bc454b0e..389284763 100644 --- a/packages/pi-coding-agent/src/core/agent-session-print-mode-persist.test.ts +++ b/packages/pi-coding-agent/src/core/agent-session-print-mode-persist.test.ts @@ -4,7 +4,7 @@ import { readFileSync } from "node:fs"; import { join } from "node:path"; /** - * Regression #4251: `gsd -p --model / "msg"` must never mutate + * Regression #4251: `sf -p --model / "msg"` must never mutate * the persisted defaultProvider/defaultModel in settings.json. The one-shot * print invocation used to verify a provider (e.g. Bearer-auth smoke test) * was silently overwriting the global default. @@ -55,7 +55,7 @@ test("AgentSession stores persistModelChanges and defaults it to false (#4251)", ); }); -test("gsd src/cli.ts interactive branch opts into persistence (#4251)", () => { +test("sf src/cli.ts interactive branch opts into persistence (#4251)", () => { const printGuardIdx = gsdCliSource.indexOf("if (isPrintMode)"); // Interactive createAgentSession call lives after the print-mode branch. const interactiveCreateIdx = gsdCliSource.indexOf("createAgentSession({", printGuardIdx + 10); @@ -107,7 +107,7 @@ test("CreateAgentSessionOptions forwards persistModelChanges to AgentSession (#4 // assignment, now that the AgentSessionConfig default is false. The assertion // moved to the "main.ts sets persistModelChanges = isInteractive" test below. -test("gsd src/cli.ts print-mode createAgentSession passes persistModelChanges: false (#4251)", () => { +test("sf src/cli.ts print-mode createAgentSession passes persistModelChanges: false (#4251)", () => { const printGuardIdx = gsdCliSource.indexOf("if (isPrintMode)"); assert.ok(printGuardIdx >= 0, "missing isPrintMode branch in src/cli.ts"); const createIdx = gsdCliSource.indexOf("createAgentSession({", printGuardIdx); @@ -119,7 +119,7 @@ test("gsd src/cli.ts print-mode createAgentSession passes persistModelChanges: f ); }); -test("gsd src/cli.ts print-mode --model override calls setModel with persist: false (#4251)", () => { +test("sf src/cli.ts print-mode --model override calls setModel with persist: false (#4251)", () => { const printGuardIdx = gsdCliSource.indexOf("if (isPrintMode)"); const overrideIdx = gsdCliSource.indexOf("if (cliFlags.model)", printGuardIdx); assert.ok(overrideIdx >= 0, "missing --model override block in print-mode branch"); @@ -130,7 +130,7 @@ test("gsd src/cli.ts print-mode --model override calls setModel with persist: fa ); }); -test("gsd src/cli.ts print-mode skips validateConfiguredModel when --model is set (#4251)", () => { +test("sf src/cli.ts print-mode skips validateConfiguredModel when --model is set (#4251)", () => { const printGuardIdx = gsdCliSource.indexOf("if (isPrintMode)"); const validateIdx = gsdCliSource.indexOf("validateConfiguredModel(", printGuardIdx); assert.ok(validateIdx >= 0, "missing validateConfiguredModel call in print-mode branch"); diff --git a/packages/pi-coding-agent/src/core/agent-session.ts b/packages/pi-coding-agent/src/core/agent-session.ts index 78ec05701..846c28e2c 100644 --- a/packages/pi-coding-agent/src/core/agent-session.ts +++ b/packages/pi-coding-agent/src/core/agent-session.ts @@ -171,7 +171,7 @@ export interface AgentSessionConfig { isClaudeCodeReady?: () => boolean; /** When false, model changes (via setModel/cycleModel/extension setModel) do NOT * write defaultProvider/defaultModel back to settings.json. Used by print/one-shot - * mode so that `gsd -p --model X "msg"` never mutates the persisted default (#4251). */ + * mode so that `sf -p --model X "msg"` never mutates the persisted default (#4251). */ persistModelChanges?: boolean; } @@ -307,7 +307,7 @@ export class AgentSession { // Defaults to false — callers must explicitly opt into persistence. This is the // safe default for SDK consumers: a third party building on @sf-run/pi-coding-agent // should not silently mutate the user's global settings just by switching models. - // Interactive CLI entry points (gsd wrapper's interactive branch and pi main's + // Interactive CLI entry points (sf wrapper's interactive branch and pi main's // isInteractive branch) explicitly set this to true so user model picks still // persist. One-shot/print/rpc/mcp leave it false. (#4251) private _persistModelChanges: boolean; diff --git a/packages/pi-coding-agent/src/core/lifecycle-hooks.test.ts b/packages/pi-coding-agent/src/core/lifecycle-hooks.test.ts index d19c87d16..b005729ee 100644 --- a/packages/pi-coding-agent/src/core/lifecycle-hooks.test.ts +++ b/packages/pi-coding-agent/src/core/lifecycle-hooks.test.ts @@ -156,9 +156,9 @@ describe("verifyRuntimeDependencies", () => { it("includes appName and source in error for retry hint", () => { assert.throws( - () => verifyRuntimeDependencies(["__missing__"], "github:user/repo", "gsd"), + () => verifyRuntimeDependencies(["__missing__"], "github:user/repo", "sf"), (err: Error) => { - assert.ok(err.message.includes("gsd")); + assert.ok(err.message.includes("sf")); assert.ok(err.message.includes("github:user/repo")); return true; }, diff --git a/packages/pi-coding-agent/src/core/lsp/defaults.json b/packages/pi-coding-agent/src/core/lsp/defaults.json index 6bc16ba82..79ff30cd8 100644 --- a/packages/pi-coding-agent/src/core/lsp/defaults.json +++ b/packages/pi-coding-agent/src/core/lsp/defaults.json @@ -45,7 +45,7 @@ "fileTypes": [".ts", ".tsx", ".js", ".jsx", ".mjs", ".cjs"], "rootMarkers": ["package.json", "tsconfig.json", "jsconfig.json"], "initOptions": { - "hostInfo": "gsd-coding-agent", + "hostInfo": "sf-coding-agent", "preferences": { "includeInlayParameterNameHints": "all", "includeInlayVariableTypeHints": true, diff --git a/packages/pi-coding-agent/src/core/resource-loader.ts b/packages/pi-coding-agent/src/core/resource-loader.ts index 34ab7565e..a4e8391c9 100644 --- a/packages/pi-coding-agent/src/core/resource-loader.ts +++ b/packages/pi-coding-agent/src/core/resource-loader.ts @@ -823,7 +823,7 @@ export class DefaultResourceLoader implements ResourceLoader { /** * Extract the extension name from its path. * For root-level files: basename without extension (e.g. "search-the-web.ts" → "search-the-web") - * For subdirectory extensions: the directory name (e.g. "/path/to/gsd/index.ts" → "gsd") + * For subdirectory extensions: the directory name (e.g. "/path/to/sf/index.ts" → "sf") */ private getExtensionNameFromPath(extPath: string): string { const base = basename(extPath); @@ -840,8 +840,8 @@ export class DefaultResourceLoader implements ResourceLoader { /** * Extract the extension directory name (key) from a full extension path. - * Given extensionsDir `/home/user/.gsd/agent/extensions` and - * ownerPath `/home/user/.gsd/agent/extensions/mcp-client/index.js`, + * Given extensionsDir `/home/user/.sf/agent/extensions` and + * ownerPath `/home/user/.sf/agent/extensions/mcp-client/index.js`, * returns `"mcp-client"`. Returns `undefined` when the path is not * under extensionsDir. */ diff --git a/packages/pi-coding-agent/src/core/sdk.ts b/packages/pi-coding-agent/src/core/sdk.ts index 66f3d39c6..315196c20 100644 --- a/packages/pi-coding-agent/src/core/sdk.ts +++ b/packages/pi-coding-agent/src/core/sdk.ts @@ -101,7 +101,7 @@ export interface CreateAgentSessionOptions { isClaudeCodeReady?: () => boolean; /** When false, model changes do NOT write defaultProvider/defaultModel back to * settings.json. main.ts sets this to false for print/one-shot mode so - * `gsd -p --model X "msg"` cannot mutate the persisted default (#4251). */ + * `sf -p --model X "msg"` cannot mutate the persisted default (#4251). */ persistModelChanges?: boolean; } diff --git a/packages/pi-coding-agent/src/core/session-manager.test.ts b/packages/pi-coding-agent/src/core/session-manager.test.ts index 470336567..4891a9d38 100644 --- a/packages/pi-coding-agent/src/core/session-manager.test.ts +++ b/packages/pi-coding-agent/src/core/session-manager.test.ts @@ -31,7 +31,7 @@ describe("SessionManager usage totals", () => { }); it("tracks assistant usage incrementally without rescanning entries", () => { - dir = mkdtempSync(join(tmpdir(), "gsd-session-manager-test-")); + dir = mkdtempSync(join(tmpdir(), "sf-session-manager-test-")); const manager = SessionManager.create(dir, dir); manager.appendMessage({ role: "user", content: [{ type: "text", text: "hello" }] } as any); @@ -48,7 +48,7 @@ describe("SessionManager usage totals", () => { }); it("resets totals when starting a new session", () => { - dir = mkdtempSync(join(tmpdir(), "gsd-session-manager-test-")); + dir = mkdtempSync(join(tmpdir(), "sf-session-manager-test-")); const manager = SessionManager.create(dir, dir); manager.appendMessage(makeAssistantMessage(5, 5, 0, 0, 0.05)); assert.equal(manager.getUsageTotals().input, 5); diff --git a/packages/pi-coding-agent/src/core/skills.ts b/packages/pi-coding-agent/src/core/skills.ts index a8ab488ef..ae023d34c 100644 --- a/packages/pi-coding-agent/src/core/skills.ts +++ b/packages/pi-coding-agent/src/core/skills.ts @@ -20,7 +20,7 @@ export const ECOSYSTEM_SKILLS_DIR = join(homedir(), ".agents", "skills"); export const ECOSYSTEM_PROJECT_SKILLS_DIR = ".agents"; /** - * Legacy skills directory (~/.gsd/agent/skills/ or ~/.pi/agent/skills/). + * Legacy skills directory (~/.sf/agent/skills/ or ~/.pi/agent/skills/). * Read as a fallback so existing installs don't lose skills before migration runs. */ const LEGACY_SKILLS_DIR = join(homedir(), CONFIG_DIR_NAME, "agent", "skills"); @@ -424,7 +424,7 @@ export function loadSkills(options: LoadSkillsOptions = {}): LoadSkillsResult { // Primary project: .agents/skills/ — standard project-level location addSkills(loadSkillsFromDirInternal(resolve(cwd, ECOSYSTEM_PROJECT_SKILLS_DIR, "skills"), "project", true)); - // Legacy fallback: read skills from ~/.gsd/agent/skills/ so existing + // Legacy fallback: read skills from ~/.sf/agent/skills/ so existing // installs keep working until the one-time migration in resource-loader // copies them to ~/.agents/skills/. Skip if migration has completed. const legacyMigrated = existsSync(join(LEGACY_SKILLS_DIR, ".migrated-to-agents")); diff --git a/packages/pi-coding-agent/src/core/tools/spawn-shell-windows.test.ts b/packages/pi-coding-agent/src/core/tools/spawn-shell-windows.test.ts index e44b170b5..a61f35a0c 100644 --- a/packages/pi-coding-agent/src/core/tools/spawn-shell-windows.test.ts +++ b/packages/pi-coding-agent/src/core/tools/spawn-shell-windows.test.ts @@ -30,8 +30,8 @@ const coreDir = join(__dirname, ".."); * it does not need the guard and should NOT appear here. */ const SPAWN_FILES_NEEDING_SHELL_GUARD = [ - // Extension's SF client — spawns the `gsd` binary which is a .cmd on Windows - join(coreDir, "..", "..", "..", "vscode-extension", "src", "gsd-client.ts"), + // Extension's SF client — spawns the `sf` binary which is a .cmd on Windows + join(coreDir, "..", "..", "..", "vscode-extension", "src", "sf-client.ts"), // exec.ts — used by extensions to run arbitrary commands join(coreDir, "exec.ts"), // LSP index — spawns project-type commands (tsc, cargo, etc.) @@ -86,7 +86,7 @@ test("all spawn sites that invoke user-facing binaries include shell: process.pl [], `The following spawn sites are missing 'shell: process.platform === "win32"':\n` + failures.map(f => ` - ${f}`).join("\n") + - `\nOn Windows, .cmd wrapper scripts (npm, npx, tsc, gsd) require shell ` + + `\nOn Windows, .cmd wrapper scripts (npm, npx, tsc, sf) require shell ` + `resolution. Without this guard, spawn fails with ENOENT or EINVAL.`, ); }); diff --git a/packages/pi-coding-agent/src/main.ts b/packages/pi-coding-agent/src/main.ts index 89391cba7..573b0528c 100644 --- a/packages/pi-coding-agent/src/main.ts +++ b/packages/pi-coding-agent/src/main.ts @@ -407,7 +407,7 @@ export async function main(args: string[]) { // Auto-detect: all models are local, enable offline mode process.env.PI_OFFLINE = "1"; process.env.PI_SKIP_VERSION_CHECK = "1"; - console.log("[gsd] All configured models are local \u2014 enabling offline mode automatically."); + console.log("[sf] All configured models are local \u2014 enabling offline mode automatically."); } const resourceLoader = new DefaultResourceLoader({ diff --git a/packages/pi-coding-agent/src/modes/interactive/components/__tests__/tool-execution.test.ts b/packages/pi-coding-agent/src/modes/interactive/components/__tests__/tool-execution.test.ts index 43fd3f7a5..283a31f99 100644 --- a/packages/pi-coding-agent/src/modes/interactive/components/__tests__/tool-execution.test.ts +++ b/packages/pi-coding-agent/src/modes/interactive/components/__tests__/tool-execution.test.ts @@ -52,11 +52,11 @@ describe("ToolExecutionComponent", () => { const rendered = renderTool( "Bash", { command: "pwd" }, - { content: [{ type: "text", text: "/tmp/gsd-pr-fix" }], isError: false }, + { content: [{ type: "text", text: "/tmp/sf-pr-fix" }], isError: false }, ); assert.match(rendered, /\$ pwd/); - assert.match(rendered, /\/tmp\/gsd-pr-fix/); + assert.match(rendered, /\/tmp\/sf-pr-fix/); assert.doesNotMatch(rendered, /^\{\s*\}$/m); }); diff --git a/packages/pi-coding-agent/src/modes/interactive/controllers/input-controller.test.ts b/packages/pi-coding-agent/src/modes/interactive/controllers/input-controller.test.ts index 6f5d22da5..cb0b50108 100644 --- a/packages/pi-coding-agent/src/modes/interactive/controllers/input-controller.test.ts +++ b/packages/pi-coding-agent/src/modes/interactive/controllers/input-controller.test.ts @@ -97,13 +97,13 @@ test("input-controller: built-in slash commands stay in TUI dispatch", async () }); test("input-controller: extension slash commands fall through to session.prompt", async () => { - const { host, prompted, errors, history } = createHost({ knownSlashCommands: ["gsd"] }); + const { host, prompted, errors, history } = createHost({ knownSlashCommands: ["sf"] }); - await host.defaultEditor.onSubmit("/gsd help"); + await host.defaultEditor.onSubmit("/sf help"); - assert.deepEqual(prompted, ["/gsd help"], "known extension slash commands should reach session.prompt"); + assert.deepEqual(prompted, ["/sf help"], "known extension slash commands should reach session.prompt"); assert.deepEqual(errors, [], "known extension slash commands should not show unknown-command errors"); - assert.deepEqual(history, ["/gsd help"], "known extension slash commands should still be added to history"); + assert.deepEqual(history, ["/sf help"], "known extension slash commands should still be added to history"); }); test("input-controller: prompt template slash commands fall through to session.prompt", async () => { diff --git a/packages/pi-coding-agent/src/modes/interactive/controllers/input-controller.ts b/packages/pi-coding-agent/src/modes/interactive/controllers/input-controller.ts index dad40f16c..29aa546c7 100644 --- a/packages/pi-coding-agent/src/modes/interactive/controllers/input-controller.ts +++ b/packages/pi-coding-agent/src/modes/interactive/controllers/input-controller.ts @@ -129,12 +129,12 @@ export function setupEditorSubmitHandler(host: InteractiveModeStateHost & { * Drag-and-drop inserts paths like "/Users/name/Desktop/file.png" which * should be treated as plain text input, not a /Users command. * - * Heuristic: a slash command is a single token like "/help" or "/gsd auto". + * Heuristic: a slash command is a single token like "/help" or "/sf auto". * File paths have a second "/" within the first token (e.g., "/Users/..."). */ function looksLikeFilePath(text: string): boolean { const firstToken = text.split(/\s/)[0]; - // Slash commands: /help, /gsd, /commit — single "/" at start only. + // Slash commands: /help, /sf, /commit — single "/" at start only. // File paths: /Users/name/file, /home/user/file, /tmp/x — contain "/" after position 0. return firstToken.indexOf("/", 1) !== -1; } diff --git a/packages/pi-coding-agent/src/resources/extensions/memory/storage.test.ts b/packages/pi-coding-agent/src/resources/extensions/memory/storage.test.ts index b4c1dd6dd..fc8fedf3f 100644 --- a/packages/pi-coding-agent/src/resources/extensions/memory/storage.test.ts +++ b/packages/pi-coding-agent/src/resources/extensions/memory/storage.test.ts @@ -7,7 +7,7 @@ import { tmpdir } from "node:os"; import { MemoryStorage } from "./storage.js"; function makeTmpDir(): string { - return mkdtempSync(join(tmpdir(), "gsd-memory-storage-test-")); + return mkdtempSync(join(tmpdir(), "sf-memory-storage-test-")); } function wait(ms: number): Promise { diff --git a/packages/pi-coding-agent/src/tests/path-display.test.ts b/packages/pi-coding-agent/src/tests/path-display.test.ts index 31741e22a..92eb2113e 100644 --- a/packages/pi-coding-agent/src/tests/path-display.test.ts +++ b/packages/pi-coding-agent/src/tests/path-display.test.ts @@ -32,10 +32,10 @@ test("toPosixPath: handles Windows UNC paths", () => { assert.equal(toPosixPath("\\\\server\\share\\dir"), "//server/share/dir"); }); -test("toPosixPath: handles .gsd/worktrees path on Windows", () => { +test("toPosixPath: handles .sf/worktrees path on Windows", () => { assert.equal( - toPosixPath("C:\\Users\\name\\project\\.gsd\\worktrees\\M001"), - "C:/Users/name/project/.gsd/worktrees/M001", + toPosixPath("C:\\Users\\name\\project\\.sf\\worktrees\\M001"), + "C:/Users/name/project/.sf/worktrees/M001", ); }); @@ -74,7 +74,7 @@ const WINDOWS_ABS_PATH_RE = /[A-Z]:\\[A-Za-z]/; test("buildSystemPrompt: no Windows absolute paths with backslashes in output", () => { // Simulate a Windows-like cwd const prompt = buildSystemPrompt({ - cwd: "D:\\Projects\\my-app\\.gsd\\worktrees\\M002", + cwd: "D:\\Projects\\my-app\\.sf\\worktrees\\M002", }); const lines = prompt.split("\n"); const violations = lines.filter(line => WINDOWS_ABS_PATH_RE.test(line)); diff --git a/packages/pi-coding-agent/src/utils/proxy-server.ts b/packages/pi-coding-agent/src/utils/proxy-server.ts new file mode 100644 index 000000000..ca4957a71 --- /dev/null +++ b/packages/pi-coding-agent/src/utils/proxy-server.ts @@ -0,0 +1,202 @@ +import express from "express"; +import type { Server } from "http"; +import { + getModels, + stream, + type Context, + type Message, + type Model, + type StreamOptions, +} from "@sf-run/pi-ai"; +import { AuthStorage } from "../core/auth-storage.js"; +import { ModelRegistry } from "../core/model-registry.js"; + +export type ProxyServerOptions = { + port: number; + authStorage: AuthStorage; + modelRegistry: ModelRegistry; + onLog?: (msg: string) => void; +}; + +export class ProxyServer { + private server: Server | null = null; + + constructor(private options: ProxyServerOptions) {} + + async start(): Promise { + if (this.server) return; + + const app = express(); + app.use(express.json()); + + const { authStorage, modelRegistry, onLog } = this.options; + + const log = (msg: string) => onLog?.(msg); + + // 1. Model Listing + app.get(["/v1/models", "/v1beta/models"], async (req, res) => { + const providers = ["google", "google-gemini-cli", "google-vertex", "anthropic", "openai"]; + const allModels = providers.flatMap((p) => getModels(p as any)); + + const formatted = allModels.map((m) => ({ + id: m.id, + object: "model", + created: 1677610602, + owned_by: m.provider, + name: m.name, + capabilities: m.capabilities, + })); + + if (req.path.startsWith("/v1beta")) { + res.json({ models: formatted }); + } else { + res.json({ data: formatted, object: "list" }); + } + }); + + // 2. Chat Completions (OpenAI & GenAI) + const handleChat = async (req: express.Request, res: express.Response) => { + const body = req.body; + const isOpenAi = req.path.includes("/v1/chat/completions"); + const modelId = isOpenAi ? body.model : req.params.modelId?.replace(/:streamGenerateContent$/, ""); + + if (!modelId) { + return res.status(400).json({ error: "Model ID is required" }); + } + + try { + // Resolve model and provider + const resolvedModel = modelRegistry.getModel(modelId); + if (!resolvedModel) { + return res.status(404).json({ error: `Model ${modelId} not found` }); + } + + // Resolve API key + const apiKey = await authStorage.getApiKey(resolvedModel.provider); + if (!apiKey) { + return res.status(401).json({ error: `No API key for provider ${resolvedModel.provider}. Use /login first.` }); + } + + // Normalize messages + const context: Context = isOpenAi + ? this.normalizeOpenAi(body) + : this.normalizeGoogle(body); + + const streamOptions: StreamOptions = { + apiKey, + temperature: body.temperature, + maxTokens: isOpenAi ? body.max_tokens : body.generationConfig?.maxOutputTokens, + }; + + const eventStream = stream(resolvedModel as any, context, streamOptions); + + if (body.stream) { + this.handleStreamingResponse(eventStream, res, isOpenAi, modelId); + } else { + await this.handleStaticResponse(eventStream, res, isOpenAi, modelId); + } + + } catch (err: any) { + log(`Proxy error: ${err.message}`); + res.status(500).json({ error: err.message }); + } + }; + + app.post("/v1/chat/completions", handleChat); + app.post("/v1beta/models/:modelId\\:streamGenerateContent", handleChat); + + return new Promise((resolve) => { + this.server = app.listen(this.options.port, () => { + log(`Proxy Server running on http://localhost:${this.options.port}`); + resolve(); + }); + }); + } + + stop(): void { + if (this.server) { + this.server.close(); + this.server = null; + } + } + + private normalizeOpenAi(body: any): Context { + const messages = body.messages || []; + const system = messages.find((m: any) => m.role === "system")?.content; + const history = messages.filter((m: any) => m.role !== "system").map((m: any) => ({ + role: m.role === "user" ? "user" : "assistant", + content: typeof m.content === "string" ? [{ type: "text", text: m.content }] : m.content, + })); + return { messages: history, systemPrompt: system }; + } + + private normalizeGoogle(body: any): Context { + const contents = body.contents || []; + const history = contents.map((c: any) => ({ + role: c.role === "user" ? "user" : "assistant", + content: (c.parts || []).map((p: any) => ({ type: "text", text: p.text })), + })); + const system = body.systemInstruction?.parts?.[0]?.text; + return { messages: history, systemPrompt: system }; + } + + private handleStreamingResponse(eventStream: any, res: express.Response, isOpenAi: boolean, modelId: string) { + res.setHeader("Content-Type", isOpenAi ? "text/event-stream" : "application/json"); + + eventStream.on("data", (ev: any) => { + if (ev.type === "text_delta") { + if (isOpenAi) { + const chunk = { + id: `chatcmpl-${Date.now()}`, + object: "chat.completion.chunk", + created: Math.floor(Date.now() / 1000), + model: modelId, + choices: [{ index: 0, delta: { content: ev.delta }, finish_reason: null }], + }; + res.write(`data: ${JSON.stringify(chunk)}\n\n`); + } else { + const chunk = { candidates: [{ content: { parts: [{ text: ev.delta }] } }] }; + res.write(JSON.stringify(chunk) + "\n"); + } + } + }); + + eventStream.on("done", () => { + if (isOpenAi) res.write("data: [DONE]\n\n"); + res.end(); + }); + + eventStream.on("error", (ev: any) => { + if (!res.headersSent) res.status(500).json({ error: ev.error.errorMessage }); + else res.end(); + }); + } + + private async handleStaticResponse(eventStream: any, res: express.Response, isOpenAi: boolean, modelId: string) { + let fullContent = ""; + eventStream.on("data", (ev: any) => { + if (ev.type === "text_delta") fullContent += ev.delta; + }); + + return new Promise((resolve) => { + eventStream.on("done", () => { + if (isOpenAi) { + res.json({ + id: `chatcmpl-${Date.now()}`, + object: "chat.completion", + created: Math.floor(Date.now() / 1000), + model: modelId, + choices: [{ index: 0, message: { role: "assistant", content: fullContent }, finish_reason: "stop" }], + }); + } else { + res.json({ candidates: [{ content: { parts: [{ text: fullContent }] } }] }); + } + resolve(); + }); + eventStream.on("error", (ev: any) => { + res.status(500).json({ error: ev.error.errorMessage }); + resolve(); + }); + }); + } +} diff --git a/packages/rpc-client/src/rpc-client.test.ts b/packages/rpc-client/src/rpc-client.test.ts index 9fcb7874f..55f7c53c6 100644 --- a/packages/rpc-client/src/rpc-client.test.ts +++ b/packages/rpc-client/src/rpc-client.test.ts @@ -260,7 +260,7 @@ describe("RpcClient construction", () => { it("creates with custom options", () => { const client = new RpcClient({ - cliPath: "/usr/local/bin/gsd", + cliPath: "/usr/local/bin/sf", cwd: "/tmp", env: { NODE_ENV: "test" }, provider: "anthropic", diff --git a/scripts/base64-scan.sh b/scripts/base64-scan.sh index e79428430..5732ca36a 100755 --- a/scripts/base64-scan.sh +++ b/scripts/base64-scan.sh @@ -114,7 +114,7 @@ should_scan() { esac # Skip generated/vendor dirs case "$file" in - node_modules/*|dist/*|coverage/*|.gsd/*) + node_modules/*|dist/*|coverage/*|.sf/*) return 1 ;; esac return 0 diff --git a/scripts/build-web-if-stale.cjs b/scripts/build-web-if-stale.cjs index 10bf5fa4e..81e6ffe3c 100644 --- a/scripts/build-web-if-stale.cjs +++ b/scripts/build-web-if-stale.cjs @@ -3,7 +3,7 @@ * Rebuild the Next.js web host only when web source files are newer than the * staged standalone build. Skips the build when nothing has changed. * - * Also self-heals a missing/incomplete web dependency install so `npm run gsd:web` + * Also self-heals a missing/incomplete web dependency install so `npm run sf:web` * doesn't fail with bare `next` command-not-found errors. * * Exit codes: diff --git a/scripts/compile-tests.mjs b/scripts/compile-tests.mjs index 3d6ac5e57..7f7525b0b 100644 --- a/scripts/compile-tests.mjs +++ b/scripts/compile-tests.mjs @@ -175,8 +175,8 @@ async function main() { const { existsSync } = await import('node:fs'); const testDirsToClean = [ [join(ROOT, 'dist-test', 'src', 'tests'), join(ROOT, 'src', 'tests')], - [join(ROOT, 'dist-test', 'src', 'resources', 'extensions', 'gsd', 'tests'), - join(ROOT, 'src', 'resources', 'extensions', 'gsd', 'tests')], + [join(ROOT, 'dist-test', 'src', 'resources', 'extensions', 'sf', 'tests'), + join(ROOT, 'src', 'resources', 'extensions', 'sf', 'tests')], ]; let staleCleaned = 0; for (const [distDir, srcDir] of testDirsToClean) { diff --git a/scripts/dev-cli.js b/scripts/dev-cli.js index fdeb26791..12cc902cf 100644 --- a/scripts/dev-cli.js +++ b/scripts/dev-cli.js @@ -7,7 +7,7 @@ import { fileURLToPath } from 'node:url' const __dirname = dirname(fileURLToPath(import.meta.url)) const root = resolve(__dirname, '..') const srcLoaderPath = resolve(root, 'src', 'loader.ts') -const resolveTsPath = resolve(root, 'src', 'resources', 'extensions', 'gsd', 'tests', 'resolve-ts.mjs') +const resolveTsPath = resolve(root, 'src', 'resources', 'extensions', 'sf', 'tests', 'resolve-ts.mjs') const child = spawn( process.execPath, diff --git a/scripts/dist-test-resolve.mjs b/scripts/dist-test-resolve.mjs index f7e1db86e..a977d1cf9 100644 --- a/scripts/dist-test-resolve.mjs +++ b/scripts/dist-test-resolve.mjs @@ -5,7 +5,7 @@ * .js files still import '../foo.ts'. This hook redirects those to '.js' so * Node can find the compiled output. * - * Also redirects @gsd bare imports to their compiled counterparts in dist-test. + * Also redirects @sf bare imports to their compiled counterparts in dist-test. */ import { fileURLToPath, pathToFileURL } from 'node:url'; diff --git a/scripts/install-hooks.mjs b/scripts/install-hooks.mjs index dea550585..53386329a 100644 --- a/scripts/install-hooks.mjs +++ b/scripts/install-hooks.mjs @@ -4,7 +4,7 @@ import { execFileSync } from 'node:child_process'; import { chmodSync, existsSync, mkdirSync, readFileSync, writeFileSync } from 'node:fs'; import { join } from 'node:path'; -const MARKER = '# gsd-secret-scan'; +const MARKER = '# sf-secret-scan'; function git(args) { return execFileSync('git', args, { @@ -36,7 +36,7 @@ if (existsSync(hookFile)) { const hookBody = [ '#!/usr/bin/env sh', - '# gsd-secret-scan', + '# sf-secret-scan', '# Pre-commit hook: scan staged files for hardcoded secrets', hookCommand, '', diff --git a/scripts/install-hooks.sh b/scripts/install-hooks.sh index 30bfd629e..f79c4ec5a 100755 --- a/scripts/install-hooks.sh +++ b/scripts/install-hooks.sh @@ -6,7 +6,7 @@ set -euo pipefail HOOK_DIR="$(git rev-parse --git-dir)/hooks" HOOK_FILE="$HOOK_DIR/pre-commit" -MARKER="# gsd-secret-scan" +MARKER="# sf-secret-scan" mkdir -p "$HOOK_DIR" @@ -25,7 +25,7 @@ if [[ -f "$HOOK_FILE" ]]; then else cat > "$HOOK_FILE" << 'EOF' #!/usr/bin/env bash -# gsd-secret-scan +# sf-secret-scan # Pre-commit hook: scan staged files for hardcoded secrets bash "$(git rev-parse --show-toplevel)/scripts/secret-scan.sh" EOF diff --git a/scripts/parallel-monitor.mjs b/scripts/parallel-monitor.mjs index a90cf5ec7..e566f1fe2 100755 --- a/scripts/parallel-monitor.mjs +++ b/scripts/parallel-monitor.mjs @@ -18,15 +18,15 @@ * --heal Auto-respawn dead workers (opt-in, off by default) * --heal-retries Max respawn attempts per worker (default: 3) * --heal-cooldown Seconds between respawn attempts (default: 30) - * --dir Status file directory (default: .gsd/parallel) + * --dir Status file directory (default: .sf/parallel) * --root Project root (default: cwd) * * Data sources: - * .gsd/parallel/M0xx.status.json — heartbeat, cost, state (written by orchestrator) - * .gsd/worktrees/M0xx/.gsd/auto.lock — current unit type + ID (written by worker) - * .gsd/worktrees/M0xx/.gsd/gsd.db — task/slice completion (SQLite, queried via cli) - * .gsd/parallel/M0xx.stdout.log — NDJSON events (cost extraction, notify messages) - * .gsd/parallel/M0xx.stderr.log — error surfacing + * .sf/parallel/M0xx.status.json — heartbeat, cost, state (written by orchestrator) + * .sf/worktrees/M0xx/.sf/auto.lock — current unit type + ID (written by worker) + * .sf/worktrees/M0xx/.sf/sf.db — task/slice completion (SQLite, queried via cli) + * .sf/parallel/M0xx.stdout.log — NDJSON events (cost extraction, notify messages) + * .sf/parallel/M0xx.stderr.log — error surfacing * * Health indicators: * ● green — PID alive, fresh heartbeat (<30s) @@ -48,7 +48,7 @@ import { execSync, spawn, spawnSync } from 'node:child_process'; const args = process.argv.slice(2); const INTERVAL_SEC = parseInt(getArg('--interval', '5'), 10); -const PARALLEL_DIR = getArg('--dir', '.gsd/parallel'); +const PARALLEL_DIR = getArg('--dir', '.sf/parallel'); const PROJECT_ROOT = getArg('--root', process.cwd()); const ONE_SHOT = args.includes('--once'); const HEAL_MODE = args.includes('--heal'); @@ -122,7 +122,7 @@ function isPidAlive(pid) { function discoverWorkers() { const dir = path.resolve(PROJECT_ROOT, PARALLEL_DIR); - const worktreeDir = path.resolve(PROJECT_ROOT, '.gsd/worktrees'); + const worktreeDir = path.resolve(PROJECT_ROOT, '.sf/worktrees'); const mids = new Set(); // From status files @@ -143,7 +143,7 @@ function discoverWorkers() { // From worktree directories that have auto.lock (actively running) if (fs.existsSync(worktreeDir)) { for (const d of fs.readdirSync(worktreeDir)) { - if (d.startsWith('M') && fs.existsSync(path.join(worktreeDir, d, '.gsd', 'auto.lock'))) { + if (d.startsWith('M') && fs.existsSync(path.join(worktreeDir, d, '.sf', 'auto.lock'))) { mids.add(d); } } @@ -158,12 +158,12 @@ function readWorkerStatus(mid) { } function readAutoLock(mid) { - const lockPath = path.resolve(PROJECT_ROOT, `.gsd/worktrees/${mid}/.gsd/auto.lock`); + const lockPath = path.resolve(PROJECT_ROOT, `.sf/worktrees/${mid}/.sf/auto.lock`); return readJsonSafe(lockPath); } function querySliceProgress(mid) { - const dbPath = path.resolve(PROJECT_ROOT, `.gsd/worktrees/${mid}/.gsd/gsd.db`); + const dbPath = path.resolve(PROJECT_ROOT, `.sf/worktrees/${mid}/.sf/sf.db`); if (!fs.existsSync(dbPath)) return []; try { @@ -276,7 +276,7 @@ function extractCostFromNdjson(mid) { // Auto-detect the SF loader path — works across npm global, homebrew, and local installs function findGsdLoader() { - // 1. Check if we're running from inside the gsd-2 repo itself + // 1. Check if we're running from inside the sf-2 repo itself const repoLoader = path.resolve(import.meta.dirname, '..', 'dist', 'loader.js'); if (fs.existsSync(repoLoader)) return repoLoader; @@ -285,17 +285,17 @@ function findGsdLoader() { const globalRoot = execSync('npm root -g', { encoding: 'utf-8', timeout: 3000 }).trim(); const candidates = [ path.join(globalRoot, 'sf-run', 'dist', 'loader.js'), - path.join(globalRoot, '@gsd', 'pi', 'dist', 'loader.js'), + path.join(globalRoot, '@sf', 'pi', 'dist', 'loader.js'), ]; for (const c of candidates) { if (fs.existsSync(c)) return c; } } catch { /* skip */ } - // 3. Try `which gsd` and resolve symlink + // 3. Try `which sf` and resolve symlink try { const pathLookup = process.platform === 'win32' ? 'where.exe' : 'which'; - const lookupArgs = ['gsd']; + const lookupArgs = ['sf']; const result = spawnSync(pathLookup, lookupArgs, { encoding: 'utf-8', timeout: 3000 }); const bin = result.status === 0 ? result.stdout.trim().split(/\r?\n/)[0]?.trim() : ''; if (bin) { @@ -315,7 +315,7 @@ const SF_LOADER = findGsdLoader(); * Uses a detached Node child with log file descriptors so the child is fully detached. */ function respawnWorker(mid) { - const worktreeDir = path.resolve(PROJECT_ROOT, `.gsd/worktrees/${mid}`); + const worktreeDir = path.resolve(PROJECT_ROOT, `.sf/worktrees/${mid}`); if (!fs.existsSync(worktreeDir)) return null; if (!fs.existsSync(SF_LOADER)) return null; @@ -517,7 +517,7 @@ function truncate(str, maxLen) { * Get recently completed tasks/slices from the worktree DB for the event feed. */ function queryRecentCompletions(mid) { - const dbPath = path.resolve(PROJECT_ROOT, `.gsd/worktrees/${mid}/.gsd/gsd.db`); + const dbPath = path.resolve(PROJECT_ROOT, `.sf/worktrees/${mid}/.sf/sf.db`); if (!fs.existsSync(dbPath)) return []; try { @@ -653,7 +653,7 @@ function render(workers) { if (workers.length === 0) { buf.push(''); buf.push(` ${FG.yellow}No workers found in ${PARALLEL_DIR}/${RESET}`); - buf.push(` ${DIM}Waiting for .gsd/parallel/*.status.json files...${RESET}`); + buf.push(` ${DIM}Waiting for .sf/parallel/*.status.json files...${RESET}`); } else { for (const wk of workers) { buf.push(''); diff --git a/scripts/postinstall.js b/scripts/postinstall.js index 6ae9ef0c4..892492420 100644 --- a/scripts/postinstall.js +++ b/scripts/postinstall.js @@ -28,7 +28,7 @@ const RTK_SKIP = const RTK_VERSION = '0.33.1' const RTK_REPO = 'rtk-ai/rtk' const RTK_ENV = { ...process.env, RTK_TELEMETRY_DISABLED: '1' } -const managedBinDir = join(process.env.SF_HOME || process.env.GSD_HOME || join(homedir(), '.gsd'), 'agent', 'bin') +const managedBinDir = join(process.env.SF_HOME || process.env.GSD_HOME || join(homedir(), '.sf'), 'agent', 'bin') const managedBinaryPath = join(managedBinDir, platform() === 'win32' ? 'rtk.exe' : 'rtk') function run(cmd) { diff --git a/scripts/pr-risk-check.mjs b/scripts/pr-risk-check.mjs index 6e6eb1752..94e662c2f 100644 --- a/scripts/pr-risk-check.mjs +++ b/scripts/pr-risk-check.mjs @@ -124,7 +124,7 @@ function normalizePath(filePath) { * Check if a changed file matches a map entry pattern. * Supports: * - Exact suffix match: src/cli.ts matches src/cli.ts - * - Glob prefix match: gsd/auto/* matches gsd/auto/anything.ts + * - Glob prefix match: sf/auto/* matches sf/auto/anything.ts * - Wildcard extension: *.tsx matches any .tsx */ function fileMatchesPattern(filePath, pattern) { diff --git a/scripts/recover-gsd-1364.ps1 b/scripts/recover-gsd-1364.ps1 index 1e6aacdc5..c6c159dae 100644 --- a/scripts/recover-gsd-1364.ps1 +++ b/scripts/recover-gsd-1364.ps1 @@ -1,18 +1,18 @@ -# recover-gsd-1364.ps1 - Recovery script for issue #1364 (Windows) +# recover-sf-1364.ps1 - Recovery script for issue #1364 (Windows) # # CRITICAL DATA-LOSS BUG: SF versions 2.30.0-2.35.x unconditionally added -# ".gsd" to .gitignore via ensureGitignore(), causing git to report all -# tracked .gsd/ files as deleted. Fixed in v2.36.0 (PR #1367). +# ".sf" to .gitignore via ensureGitignore(), causing git to report all +# tracked .sf/ files as deleted. Fixed in v2.36.0 (PR #1367). # # This script: # 1. Detects whether the repo was affected # 2. Finds the last clean commit before the damage -# 3. Restores all deleted .gsd/ files from that commit -# 4. Removes the bad ".gsd" line from .gitignore (if .gsd/ is tracked) +# 3. Restores all deleted .sf/ files from that commit +# 4. Removes the bad ".sf" line from .gitignore (if .sf/ is tracked) # 5. Prints a ready-to-commit summary # # Usage: -# powershell -ExecutionPolicy Bypass -File scripts\recover-gsd-1364.ps1 [-DryRun] +# powershell -ExecutionPolicy Bypass -File scripts\recover-sf-1364.ps1 [-DryRun] # # Options: # -DryRun Show what would be done without making any changes @@ -66,7 +66,7 @@ function Invoke-GitOrDryRun { } # Check whether a path is a symlink OR a junction (Windows uses junctions for -# the .gsd external-state migration via symlinkSync(..., "junction")) +# the .sf external-state migration via symlinkSync(..., "junction")) function Test-ReparsePoint { param([string]$Path) if (-not (Test-Path $Path)) { return $false } @@ -99,30 +99,30 @@ if ($DryRun) { Write-Warn "DRY-RUN mode — no changes will be made." } -# ── Step 1: Detect .gsd/ ───────────────────────────────────────────────────── +# ── Step 1: Detect .sf/ ───────────────────────────────────────────────────── -Write-Section "── Step 1: Detect .gsd/ directory ─────────────────────────────────" +Write-Section "── Step 1: Detect .sf/ directory ─────────────────────────────────" -$sfDir = Join-Path $repoRoot '.gsd' +$sfDir = Join-Path $repoRoot '.sf' $GsdIsSymlink = $false if (-not (Test-Path $sfDir)) { - Write-Ok ".gsd/ does not exist in this repo — not affected." + Write-Ok ".sf/ does not exist in this repo — not affected." exit 0 } if (Test-ReparsePoint $sfDir) { # Scenario C: migration succeeded (symlink/junction in place) but git index was never - # cleaned — tracked .gsd/* files still appear as deleted through the reparse point. + # cleaned — tracked .sf/* files still appear as deleted through the reparse point. $GsdIsSymlink = $true - Write-Warn ".gsd/ is a symlink/junction — checking for stale git index entries (Scenario C)..." + Write-Warn ".sf/ is a symlink/junction — checking for stale git index entries (Scenario C)..." } else { - Write-Info ".gsd/ is a real directory (Scenario A/B)." + Write-Info ".sf/ is a real directory (Scenario A/B)." } -# ── Step 2: Check .gitignore for .gsd entry ────────────────────────────────── +# ── Step 2: Check .gitignore for .sf entry ────────────────────────────────── -Write-Section "── Step 2: Check .gitignore for .gsd entry ─────────────────────────" +Write-Section "── Step 2: Check .gitignore for .sf entry ─────────────────────────" $gitignorePath = Join-Path $repoRoot '.gitignore' @@ -137,36 +137,36 @@ if (Test-Path $gitignorePath) { $gitignoreLines = Get-Content $gitignorePath -Encoding UTF8 $gsdIgnoreLine = $gitignoreLines | Where-Object { $trimmed = $_.Trim() - $trimmed -eq '.gsd' -and -not $trimmed.StartsWith('#') + $trimmed -eq '.sf' -and -not $trimmed.StartsWith('#') } | Select-Object -First 1 } if ($GsdIsSymlink) { - # Symlink layout: .gsd SHOULD be ignored (it's external state). + # Symlink layout: .sf SHOULD be ignored (it's external state). if (-not $gsdIgnoreLine) { - Write-Warn '".gsd" missing from .gitignore — will add (migration complete, .gsd/ is external).' + Write-Warn '".sf" missing from .gitignore — will add (migration complete, .sf/ is external).' } else { - Write-Ok '".gsd" already in .gitignore — correct for external-state layout.' + Write-Ok '".sf" already in .gitignore — correct for external-state layout.' } } else { - # Real-directory layout: .gsd should NOT be ignored. + # Real-directory layout: .sf should NOT be ignored. if (-not $gsdIgnoreLine) { - Write-Ok '".gsd" not found in .gitignore — .gitignore not affected.' + Write-Ok '".sf" not found in .gitignore — .gitignore not affected.' } else { - Write-Warn '".gsd" found in .gitignore — this is the bad pattern from #1364.' + Write-Warn '".sf" found in .gitignore — this is the bad pattern from #1364.' } } -# ── Step 3: Find deleted .gsd/ files ───────────────────────────────────────── +# ── Step 3: Find deleted .sf/ files ───────────────────────────────────────── -Write-Section "── Step 3: Find deleted .gsd/ files ───────────────────────────────" +Write-Section "── Step 3: Find deleted .sf/ files ───────────────────────────────" # Files deleted in working tree (tracked but missing) -$deletedRaw = Invoke-Git @('ls-files', '--deleted', '--', '.gsd/*') -AllowFailure +$deletedRaw = Invoke-Git @('ls-files', '--deleted', '--', '.sf/*') -AllowFailure $deletedFiles = if ($deletedRaw) { $deletedRaw -split "`n" | Where-Object { $_ } } else { @() } # Files tracked in HEAD right now -$trackedInHeadRaw = Invoke-Git @('ls-tree', '-r', '--name-only', 'HEAD', '--', '.gsd/') -AllowFailure +$trackedInHeadRaw = Invoke-Git @('ls-tree', '-r', '--name-only', 'HEAD', '--', '.sf/') -AllowFailure $trackedInHead = if ($trackedInHeadRaw) { $trackedInHeadRaw -split "`n" | Where-Object { $_ } } else { @() } $deletedFromHistory = @() @@ -176,34 +176,34 @@ if ($GsdIsSymlink) { if ($trackedInHead.Count -eq 0 -and $deletedFiles.Count -eq 0) { Write-Ok "No stale index entries found — symlink/junction layout is healthy." if (-not $gsdIgnoreLine) { - Write-Info "Add .gsd to .gitignore manually to complete the migration." + Write-Info "Add .sf to .gitignore manually to complete the migration." } exit 0 } $indexCount = if ($trackedInHead.Count -gt 0) { $trackedInHead.Count } else { $deletedFiles.Count } - Write-Warn "Scenario C: $indexCount .gsd/ file(s) tracked in git index but inaccessible through reparse point." + Write-Warn "Scenario C: $indexCount .sf/ file(s) tracked in git index but inaccessible through reparse point." Write-Info "Files are safe in external storage — only the git index needs cleaning." } else { # Files deleted in committed history (post-commit damage scenario — Scenario B) - $deletedHistoryRaw = Invoke-Git @('log', '--all', '--diff-filter=D', '--name-only', '--format=', '--', '.gsd/*') -AllowFailure + $deletedHistoryRaw = Invoke-Git @('log', '--all', '--diff-filter=D', '--name-only', '--format=', '--', '.sf/*') -AllowFailure $deletedFromHistory = if ($deletedHistoryRaw) { - $deletedHistoryRaw -split "`n" | Where-Object { $_ -match '^\.gsd' } | Sort-Object -Unique + $deletedHistoryRaw -split "`n" | Where-Object { $_ -match '^\.sf' } | Sort-Object -Unique } else { @() } # Nothing was ever tracked in any scenario if ($trackedInHead.Count -eq 0 -and $deletedFiles.Count -eq 0 -and $deletedFromHistory.Count -eq 0) { - Write-Ok "No .gsd/ files tracked in this repo — not affected by #1364." + Write-Ok "No .sf/ files tracked in this repo — not affected by #1364." if ($gsdIgnoreLine) { - Write-Warn '".gsd" is still in .gitignore but there is nothing to restore.' + Write-Warn '".sf" is still in .gitignore but there is nothing to restore.' } exit 0 } # Determine scenario if ($trackedInHead.Count -gt 0) { - Write-Info "Scenario A: $($trackedInHead.Count) .gsd/ files still tracked in HEAD." + Write-Info "Scenario A: $($trackedInHead.Count) .sf/ files still tracked in HEAD." } elseif ($deletedFromHistory.Count -gt 0) { - Write-Warn "Scenario B: $($deletedFromHistory.Count) .gsd/ file(s) were tracked but deleted in a committed change:" + Write-Warn "Scenario B: $($deletedFromHistory.Count) .sf/ file(s) were tracked but deleted in a committed change:" $deletedFromHistory | Select-Object -First 20 | ForEach-Object { Write-Host " - $_" } if ($deletedFromHistory.Count -gt 20) { Write-Host " ... and $($deletedFromHistory.Count - 20) more" @@ -211,7 +211,7 @@ if ($GsdIsSymlink) { } if ($deletedFiles.Count -gt 0) { - Write-Warn "$($deletedFiles.Count) .gsd/ file(s) are missing from working tree (tracked but deleted/gitignored):" + Write-Warn "$($deletedFiles.Count) .sf/ file(s) are missing from working tree (tracked but deleted/gitignored):" $deletedFiles | Select-Object -First 20 | ForEach-Object { Write-Host " - $_" } if ($deletedFiles.Count -gt 20) { Write-Host " ... and $($deletedFiles.Count - 20) more" @@ -221,10 +221,10 @@ if ($GsdIsSymlink) { # HEAD has files and working tree is clean — only .gitignore needs fixing if ($trackedInHead.Count -gt 0 -and $deletedFiles.Count -eq 0) { if (-not $gsdIgnoreLine) { - Write-Ok "No action needed — .gsd/ is tracked in HEAD and .gitignore is clean." + Write-Ok "No action needed — .sf/ is tracked in HEAD and .gitignore is clean." exit 0 } - Write-Info ".gsd/ is tracked in HEAD and working tree is clean — only .gitignore needs fixing." + Write-Info ".sf/ is tracked in HEAD and working tree is clean — only .gitignore needs fixing." } } @@ -239,24 +239,24 @@ $restorableFiles = @() if ($GsdIsSymlink) { Write-Info "Scenario C: symlink/junction layout — skipping commit history scan (no file restore needed)." } else { - Write-Info "Scanning git log to find when .gsd was added to .gitignore..." + Write-Info "Scanning git log to find when .sf was added to .gitignore..." - # Strategy 1: find first commit that added ".gsd" to .gitignore + # Strategy 1: find first commit that added ".sf" to .gitignore $gitignoreCommits = Invoke-Git @('log', '--format=%H', '--', '.gitignore') -AllowFailure if ($gitignoreCommits) { foreach ($sha in ($gitignoreCommits -split "`n" | Where-Object { $_ })) { $content = Invoke-Git @('show', "${sha}:.gitignore") -AllowFailure - if ($content -and ($content -split "`n" | Where-Object { $_.Trim() -eq '.gsd' })) { + if ($content -and ($content -split "`n" | Where-Object { $_.Trim() -eq '.sf' })) { $damageCommit = $sha break } } } - # Strategy 2: find commit that deleted .gsd/ files + # Strategy 2: find commit that deleted .sf/ files if (-not $damageCommit -and $deletedFromHistory.Count -gt 0) { - Write-Info "Searching for the commit that deleted .gsd/ files from the index..." - $deleteCommits = Invoke-Git @('log', '--all', '--diff-filter=D', '--format=%H', '--', '.gsd/*') -AllowFailure + Write-Info "Searching for the commit that deleted .sf/ files from the index..." + $deleteCommits = Invoke-Git @('log', '--all', '--diff-filter=D', '--format=%H', '--', '.sf/*') -AllowFailure if ($deleteCommits) { $damageCommit = ($deleteCommits -split "`n" | Where-Object { $_ } | Select-Object -First 1) } @@ -274,15 +274,15 @@ if ($GsdIsSymlink) { Write-Info "Restoring from: $cleanCommit — $cleanMsg" } - # Verify restore point has .gsd/ files - $restorable = Invoke-Git @('ls-tree', '-r', '--name-only', $cleanCommit, '--', '.gsd/') -AllowFailure + # Verify restore point has .sf/ files + $restorable = Invoke-Git @('ls-tree', '-r', '--name-only', $cleanCommit, '--', '.sf/') -AllowFailure $restorableFiles = if ($restorable) { $restorable -split "`n" | Where-Object { $_ } } else { @() } if ($restorableFiles.Count -eq 0) { - Exit-Fatal "No .gsd/ files found in restore point $cleanCommit — cannot recover. Check git log manually." + Exit-Fatal "No .sf/ files found in restore point $cleanCommit — cannot recover. Check git log manually." } - Write-Ok "Restore point has $($restorableFiles.Count) .gsd/ files available." + Write-Ok "Restore point has $($restorableFiles.Count) .sf/ files available." } # ── Step 5: Clean index (Scenario C) or restore deleted files (Scenario A/B) ─ @@ -290,34 +290,34 @@ if ($GsdIsSymlink) { if ($GsdIsSymlink) { Write-Section "── Step 5: Clean stale git index entries ───────────────────────────" - Write-Info "Running: git rm -r --cached --ignore-unmatch .gsd/ ..." - Invoke-GitOrDryRun -GitArgs @('rm', '-r', '--cached', '--ignore-unmatch', '.gsd') -Display "rm -r --cached --ignore-unmatch .gsd" + Write-Info "Running: git rm -r --cached --ignore-unmatch .sf/ ..." + Invoke-GitOrDryRun -GitArgs @('rm', '-r', '--cached', '--ignore-unmatch', '.sf') -Display "rm -r --cached --ignore-unmatch .sf" if (-not $DryRun) { - $stillStaleRaw = Invoke-Git @('ls-files', '--deleted', '--', '.gsd/*') -AllowFailure + $stillStaleRaw = Invoke-Git @('ls-files', '--deleted', '--', '.sf/*') -AllowFailure $stillStale = if ($stillStaleRaw) { $stillStaleRaw -split "`n" | Where-Object { $_ } } else { @() } if ($stillStale.Count -eq 0) { - Write-Ok "Git index cleaned — no stale .gsd/ entries remain." + Write-Ok "Git index cleaned — no stale .sf/ entries remain." } else { Write-Warn "$($stillStale.Count) stale entr(ies) still present — may need manual cleanup." } } } else { - Write-Section "── Step 5: Restore deleted .gsd/ files ────────────────────────────" + Write-Section "── Step 5: Restore deleted .sf/ files ────────────────────────────" $needsRestore = ($deletedFiles.Count -gt 0) -or ($deletedFromHistory.Count -gt 0 -and $trackedInHead.Count -eq 0) if (-not $needsRestore) { Write-Ok "No deleted files to restore — skipping." } else { - Write-Info "Restoring .gsd/ files from $cleanCommit..." - Invoke-GitOrDryRun -GitArgs @('checkout', $cleanCommit, '--', '.gsd/') -Display "checkout $cleanCommit -- .gsd/" + Write-Info "Restoring .sf/ files from $cleanCommit..." + Invoke-GitOrDryRun -GitArgs @('checkout', $cleanCommit, '--', '.sf/') -Display "checkout $cleanCommit -- .sf/" if (-not $DryRun) { - $stillMissingRaw = Invoke-Git @('ls-files', '--deleted', '--', '.gsd/*') -AllowFailure + $stillMissingRaw = Invoke-Git @('ls-files', '--deleted', '--', '.sf/*') -AllowFailure $stillMissing = if ($stillMissingRaw) { $stillMissingRaw -split "`n" | Where-Object { $_ } } else { @() } if ($stillMissing.Count -eq 0) { - Write-Ok "All .gsd/ files restored successfully." + Write-Ok "All .sf/ files restored successfully." } else { Write-Warn "$($stillMissing.Count) file(s) still missing after restore — may need manual recovery:" $stillMissing | Select-Object -First 10 | ForEach-Object { Write-Host " - $_" } @@ -331,34 +331,34 @@ if ($GsdIsSymlink) { Write-Section "── Step 6: Fix .gitignore ──────────────────────────────────────────" if ($GsdIsSymlink) { - # Scenario C: .gsd IS external — it should be in .gitignore. Add if missing. + # Scenario C: .sf IS external — it should be in .gitignore. Add if missing. if (-not $gsdIgnoreLine) { - Write-Info 'Adding ".gsd" to .gitignore (migration complete — .gsd/ is external state)...' + Write-Info 'Adding ".sf" to .gitignore (migration complete — .sf/ is external state)...' if ($DryRun) { - Write-Host " (dry-run) Would append: .gsd" -ForegroundColor Yellow + Write-Host " (dry-run) Would append: .sf" -ForegroundColor Yellow } else { - $appendLines = @('', '# SF external state (symlink/junction — added by recover-gsd-1364)', '.gsd') + $appendLines = @('', '# SF external state (symlink/junction — added by recover-sf-1364)', '.sf') Add-Content -LiteralPath $gitignorePath -Value $appendLines -Encoding UTF8 - Write-Ok '".gsd" added to .gitignore.' + Write-Ok '".sf" added to .gitignore.' } } else { - Write-Ok '".gsd" already in .gitignore — correct for external-state layout.' + Write-Ok '".sf" already in .gitignore — correct for external-state layout.' } } else { - # Scenario A/B: .gsd is a real tracked directory — remove the bad ignore line. + # Scenario A/B: .sf is a real tracked directory — remove the bad ignore line. if (-not $gsdIgnoreLine) { - Write-Ok '".gsd" not in .gitignore — nothing to fix.' + Write-Ok '".sf" not in .gitignore — nothing to fix.' } else { - Write-Info 'Removing bare ".gsd" line from .gitignore...' + Write-Info 'Removing bare ".sf" line from .gitignore...' if ($DryRun) { - Write-Host " (dry-run) Would remove line: .gsd" -ForegroundColor Yellow + Write-Host " (dry-run) Would remove line: .sf" -ForegroundColor Yellow } else { - # Filter out the exact bare ".gsd" line — preserve all other content including - # sub-path patterns like ".gsd/", ".gsd/activity/" and comments - $cleaned = $gitignoreLines | Where-Object { $_.Trim() -ne '.gsd' } + # Filter out the exact bare ".sf" line — preserve all other content including + # sub-path patterns like ".sf/", ".sf/activity/" and comments + $cleaned = $gitignoreLines | Where-Object { $_.Trim() -ne '.sf' } # Write with UTF-8 no BOM to match git's expectations [System.IO.File]::WriteAllLines($gitignorePath, $cleaned, [System.Text.UTF8Encoding]::new($false)) - Write-Ok '".gsd" line removed from .gitignore.' + Write-Ok '".sf" line removed from .gitignore.' } } } @@ -368,18 +368,18 @@ if ($GsdIsSymlink) { Write-Section "── Step 7: Stage recovery changes ──────────────────────────────────" if (-not $DryRun) { - $changed = Invoke-Git @('status', '--short', '--', '.gsd/', '.gitignore') -AllowFailure + $changed = Invoke-Git @('status', '--short', '--', '.sf/', '.gitignore') -AllowFailure if (-not $changed) { Write-Ok "No staged changes — working tree was already clean." } else { if ($GsdIsSymlink) { # Scenario C: git rm --cached already staged the index cleanup. - # Only stage .gitignore — adding .gsd/ would fail (now gitignored). + # Only stage .gitignore — adding .sf/ would fail (now gitignored). Invoke-Git @('add', '.gitignore') -AllowFailure | Out-Null } else { - Invoke-Git @('add', '.gsd/', '.gitignore') -AllowFailure | Out-Null + Invoke-Git @('add', '.sf/', '.gitignore') -AllowFailure | Out-Null } - $stagedRaw = Invoke-Git @('diff', '--cached', '--name-only', '--', '.gsd/', '.gitignore') -AllowFailure + $stagedRaw = Invoke-Git @('diff', '--cached', '--name-only', '--', '.sf/', '.gitignore') -AllowFailure $stagedFiles = if ($stagedRaw) { $stagedRaw -split "`n" | Where-Object { $_ } } else { @() } Write-Ok "$($stagedFiles.Count) file(s) staged and ready to commit." } @@ -392,16 +392,16 @@ Write-Section "── Summary ──────────────── if ($DryRun) { Write-Host "Dry-run complete. Re-run without -DryRun to apply changes." -ForegroundColor Yellow } else { - $finalStagedRaw = Invoke-Git @('diff', '--cached', '--name-only', '--', '.gsd/', '.gitignore') -AllowFailure + $finalStagedRaw = Invoke-Git @('diff', '--cached', '--name-only', '--', '.sf/', '.gitignore') -AllowFailure $finalStaged = if ($finalStagedRaw) { $finalStagedRaw -split "`n" | Where-Object { $_ } } else { @() } if ($finalStaged.Count -gt 0) { Write-Host "Recovery complete. Commit with:" -ForegroundColor Green Write-Host "" if ($GsdIsSymlink) { - Write-Host ' git commit -m "fix: clean stale .gsd/ index entries after external-state migration"' + Write-Host ' git commit -m "fix: clean stale .sf/ index entries after external-state migration"' } else { - Write-Host ' git commit -m "fix: restore .gsd/ files deleted by #1364 regression"' + Write-Host ' git commit -m "fix: restore .sf/ files deleted by #1364 regression"' } Write-Host "" Write-Host "Staged files:" diff --git a/scripts/recover-gsd-1364.sh b/scripts/recover-gsd-1364.sh index 65e8f30a5..43bf5e23d 100755 --- a/scripts/recover-gsd-1364.sh +++ b/scripts/recover-gsd-1364.sh @@ -1,23 +1,23 @@ #!/usr/bin/env bash -# recover-gsd-1364.sh — Recovery script for issue #1364 (Linux / macOS) +# recover-sf-1364.sh — Recovery script for issue #1364 (Linux / macOS) # # For Windows use the PowerShell equivalent: -# powershell -ExecutionPolicy Bypass -File scripts\recover-gsd-1364.ps1 [-DryRun] +# powershell -ExecutionPolicy Bypass -File scripts\recover-sf-1364.ps1 [-DryRun] # # CRITICAL DATA-LOSS BUG: SF versions 2.30.0–2.35.x unconditionally added -# ".gsd" to .gitignore via ensureGitignore(), causing git to report all -# tracked .gsd/ files as deleted. Fixed in v2.36.0 (PR #1367). +# ".sf" to .gitignore via ensureGitignore(), causing git to report all +# tracked .sf/ files as deleted. Fixed in v2.36.0 (PR #1367). # Three residual vectors remain on v2.36.0–v2.38.0 — see PR #1635 for details. # # This script: # 1. Detects whether the repo was affected # 2. Finds the last clean commit before the damage -# 3. Restores all deleted .gsd/ files from that commit -# 4. Removes the bad ".gsd" line from .gitignore (if .gsd/ is tracked) +# 3. Restores all deleted .sf/ files from that commit +# 4. Removes the bad ".sf" line from .gitignore (if .sf/ is tracked) # 5. Prints a ready-to-commit summary # # Usage: -# bash scripts/recover-gsd-1364.sh [--dry-run] +# bash scripts/recover-sf-1364.sh [--dry-run] # # Options: # --dry-run Show what would be done without making any changes @@ -84,30 +84,30 @@ if $DRY_RUN; then warn "DRY-RUN mode — no changes will be made." fi -# ─── Step 1: Check if .gsd/ exists ──────────────────────────────────────────── +# ─── Step 1: Check if .sf/ exists ──────────────────────────────────────────── -section "── Step 1: Detect .gsd/ directory ────────────────────────────────────" +section "── Step 1: Detect .sf/ directory ────────────────────────────────────" -SF_DIR="$REPO_ROOT/.gsd" +SF_DIR="$REPO_ROOT/.sf" SF_IS_SYMLINK=false if [[ ! -e "$SF_DIR" ]]; then - ok ".gsd/ does not exist in this repo — not affected." + ok ".sf/ does not exist in this repo — not affected." exit 0 fi if [[ -L "$SF_DIR" ]]; then # Scenario C: migration succeeded (symlink in place) but git index was never - # cleaned — tracked .gsd/* files still appear as deleted through the symlink. + # cleaned — tracked .sf/* files still appear as deleted through the symlink. SF_IS_SYMLINK=true - warn ".gsd/ is a symlink — checking for stale git index entries (Scenario C)..." + warn ".sf/ is a symlink — checking for stale git index entries (Scenario C)..." else - info ".gsd/ is a real directory (Scenario A/B)." + info ".sf/ is a real directory (Scenario A/B)." fi -# ─── Step 2: Check if .gsd is in .gitignore ─────────────────────────────────── +# ─── Step 2: Check if .sf is in .gitignore ─────────────────────────────────── -section "── Step 2: Check .gitignore for .gsd entry ────────────────────────────" +section "── Step 2: Check .gitignore for .sf entry ────────────────────────────" GITIGNORE="$REPO_ROOT/.gitignore" @@ -116,13 +116,13 @@ if [[ ! -f "$GITIGNORE" ]] && ! $SF_IS_SYMLINK; then exit 0 fi -# Look for a bare ".gsd" line (not a comment, not a sub-path like .gsd/) +# Look for a bare ".sf" line (not a comment, not a sub-path like .sf/) SF_IGNORE_LINE="" if [[ -f "$GITIGNORE" ]]; then while IFS= read -r line; do trimmed="${line#"${line%%[![:space:]]*}"}" trimmed="${trimmed%"${trimmed##*[![:space:]]}"}" - if [[ "$trimmed" == ".gsd" ]] && [[ "${trimmed:0:1}" != "#" ]]; then + if [[ "$trimmed" == ".sf" ]] && [[ "${trimmed:0:1}" != "#" ]]; then SF_IGNORE_LINE="$trimmed" break fi @@ -130,31 +130,31 @@ if [[ -f "$GITIGNORE" ]]; then fi if $SF_IS_SYMLINK; then - # Symlink layout: .gsd SHOULD be ignored (it's external state). + # Symlink layout: .sf SHOULD be ignored (it's external state). # Missing = needs adding. Present = correct. if [[ -z "$SF_IGNORE_LINE" ]]; then - warn '".gsd" missing from .gitignore — will add (migration complete, .gsd/ is external).' + warn '".sf" missing from .gitignore — will add (migration complete, .sf/ is external).' else - ok '".gsd" already in .gitignore — correct for external-state layout.' + ok '".sf" already in .gitignore — correct for external-state layout.' fi else - # Real-directory layout: .gsd should NOT be ignored. + # Real-directory layout: .sf should NOT be ignored. if [[ -z "$SF_IGNORE_LINE" ]]; then - ok '".gsd" not found in .gitignore — .gitignore not affected.' + ok '".sf" not found in .gitignore — .gitignore not affected.' else - warn '".gsd" found in .gitignore — this is the bad pattern from #1364.' + warn '".sf" found in .gitignore — this is the bad pattern from #1364.' fi fi -# ─── Step 3: Find deleted .gsd/ tracked files ───────────────────────────────── +# ─── Step 3: Find deleted .sf/ tracked files ───────────────────────────────── -section "── Step 3: Find deleted .gsd/ files ───────────────────────────────────" +section "── Step 3: Find deleted .sf/ files ───────────────────────────────────" # Files showing as deleted in the working tree (tracked in index but missing) -DELETED_FILES="$(git ls-files --deleted -- '.gsd/*' 2>/dev/null || true)" +DELETED_FILES="$(git ls-files --deleted -- '.sf/*' 2>/dev/null || true)" # Files tracked in HEAD right now -TRACKED_IN_HEAD="$(git ls-tree -r --name-only HEAD -- '.gsd/' 2>/dev/null || true)" +TRACKED_IN_HEAD="$(git ls-tree -r --name-only HEAD -- '.sf/' 2>/dev/null || true)" if $SF_IS_SYMLINK; then # Scenario C: migration succeeded. Files are safe via symlink. @@ -162,49 +162,49 @@ if $SF_IS_SYMLINK; then if [[ -z "$TRACKED_IN_HEAD" ]] && [[ -z "$DELETED_FILES" ]]; then ok "No stale index entries found — symlink layout is healthy." if [[ -z "$SF_IGNORE_LINE" ]]; then - info "Add .gsd to .gitignore manually to complete the migration." + info "Add .sf to .gitignore manually to complete the migration." fi exit 0 fi INDEX_COUNT="$(echo "${TRACKED_IN_HEAD:-$DELETED_FILES}" | wc -l | tr -d ' ')" - warn "Scenario C: ${INDEX_COUNT} .gsd/ file(s) tracked in git index but inaccessible through symlink." + warn "Scenario C: ${INDEX_COUNT} .sf/ file(s) tracked in git index but inaccessible through symlink." info "Files are safe in external storage — only the git index needs cleaning." else # Files deleted via a committed git rm --cached (Scenario B) - DELETED_FROM_HISTORY="$(git log --all --diff-filter=D --name-only --format="" -- '.gsd/*' 2>/dev/null \ - | grep '^\.gsd' | sort -u || true)" + DELETED_FROM_HISTORY="$(git log --all --diff-filter=D --name-only --format="" -- '.sf/*' 2>/dev/null \ + | grep '^\.sf' | sort -u || true)" if [[ -z "$TRACKED_IN_HEAD" ]] && [[ -z "$DELETED_FILES" ]] && [[ -z "$DELETED_FROM_HISTORY" ]]; then - ok "No .gsd/ files tracked in this repo — not affected by #1364." + ok "No .sf/ files tracked in this repo — not affected by #1364." if [[ -n "$SF_IGNORE_LINE" ]]; then - warn '".gsd" is still in .gitignore but there is nothing to restore.' + warn '".sf" is still in .gitignore but there is nothing to restore.' fi exit 0 fi if [[ -n "$TRACKED_IN_HEAD" ]]; then TRACKED_COUNT="$(echo "$TRACKED_IN_HEAD" | wc -l | tr -d ' ')" - info "Scenario A: ${TRACKED_COUNT} .gsd/ files still tracked in HEAD." + info "Scenario A: ${TRACKED_COUNT} .sf/ files still tracked in HEAD." elif [[ -n "$DELETED_FROM_HISTORY" ]]; then DELETED_HIST_COUNT="$(echo "$DELETED_FROM_HISTORY" | wc -l | tr -d ' ')" - warn "Scenario B: ${DELETED_HIST_COUNT} .gsd/ file(s) deleted in a committed change:" + warn "Scenario B: ${DELETED_HIST_COUNT} .sf/ file(s) deleted in a committed change:" echo "$DELETED_FROM_HISTORY" | head -20 | while IFS= read -r f; do echo " - $f"; done if (( DELETED_HIST_COUNT > 20 )); then echo " ... and $((DELETED_HIST_COUNT - 20)) more"; fi fi if [[ -n "$DELETED_FILES" ]]; then DELETED_COUNT="$(echo "$DELETED_FILES" | wc -l | tr -d ' ')" - warn "${DELETED_COUNT} .gsd/ file(s) missing from working tree:" + warn "${DELETED_COUNT} .sf/ file(s) missing from working tree:" echo "$DELETED_FILES" | head -20 | while IFS= read -r f; do echo " - $f"; done if (( DELETED_COUNT > 20 )); then echo " ... and $((DELETED_COUNT - 20)) more"; fi fi if [[ -n "$TRACKED_IN_HEAD" ]] && [[ -z "$DELETED_FILES" ]]; then if [[ -z "$SF_IGNORE_LINE" ]]; then - ok "No action needed — .gsd/ is tracked in HEAD and .gitignore is clean." + ok "No action needed — .sf/ is tracked in HEAD and .gitignore is clean." exit 0 fi - info ".gsd/ is tracked in HEAD and working tree is clean — only .gitignore needs fixing." + info ".sf/ is tracked in HEAD and working tree is clean — only .gitignore needs fixing." fi fi @@ -219,23 +219,23 @@ RESTORABLE="" if $SF_IS_SYMLINK; then info "Scenario C: symlink layout — skipping commit history scan (no file restore needed)." else - # Find the commit where ".gsd" was first added to .gitignore - # by walking the log and finding the first commit where .gitignore contained ".gsd" - info "Scanning git log to find when .gsd was added to .gitignore..." + # Find the commit where ".sf" was first added to .gitignore + # by walking the log and finding the first commit where .gitignore contained ".sf" + info "Scanning git log to find when .sf was added to .gitignore..." - # Strategy 1: find the first commit that added ".gsd" to .gitignore + # Strategy 1: find the first commit that added ".sf" to .gitignore while IFS= read -r sha; do content="$(git show "${sha}:.gitignore" 2>/dev/null || true)" - if echo "$content" | grep -qx '\.gsd' 2>/dev/null; then + if echo "$content" | grep -qx '\.sf' 2>/dev/null; then DAMAGE_COMMIT="$sha" break fi done < <(git log --format="%H" -- .gitignore) - # Strategy 2: if .gsd files were committed as deleted, find that commit + # Strategy 2: if .sf files were committed as deleted, find that commit if [[ -z "$DAMAGE_COMMIT" ]] && [[ -n "${DELETED_FROM_HISTORY:-}" ]]; then - info "Searching for the commit that deleted .gsd/ files from the index..." - DAMAGE_COMMIT="$(git log --all --diff-filter=D --format="%H" -- '.gsd/*' 2>/dev/null | head -1 || true)" + info "Searching for the commit that deleted .sf/ files from the index..." + DAMAGE_COMMIT="$(git log --all --diff-filter=D --format="%H" -- '.sf/*' 2>/dev/null | head -1 || true)" fi if [[ -z "$DAMAGE_COMMIT" ]]; then @@ -248,14 +248,14 @@ else info "Restoring from: $CLEAN_COMMIT — $CLEAN_MSG" fi - # Verify the clean commit actually has .gsd/ files - RESTORABLE="$(git ls-tree -r --name-only "$CLEAN_COMMIT" -- '.gsd/' 2>/dev/null || true)" + # Verify the clean commit actually has .sf/ files + RESTORABLE="$(git ls-tree -r --name-only "$CLEAN_COMMIT" -- '.sf/' 2>/dev/null || true)" if [[ -z "$RESTORABLE" ]]; then - die "No .gsd/ files found in restore point $CLEAN_COMMIT — cannot recover. Check git log manually." + die "No .sf/ files found in restore point $CLEAN_COMMIT — cannot recover. Check git log manually." fi RESTORABLE_COUNT="$(echo "$RESTORABLE" | wc -l | tr -d ' ')" - ok "Restore point has ${RESTORABLE_COUNT} .gsd/ files available." + ok "Restore point has ${RESTORABLE_COUNT} .sf/ files available." fi # ─── Step 5: Clean index (Scenario C) or restore deleted files (Scenario A/B) ─ @@ -263,18 +263,18 @@ fi if $SF_IS_SYMLINK; then section "── Step 5: Clean stale git index entries ───────────────────────────────" - info "Running: git rm -r --cached --ignore-unmatch .gsd/ ..." - run "git rm -r --cached --ignore-unmatch .gsd" + info "Running: git rm -r --cached --ignore-unmatch .sf/ ..." + run "git rm -r --cached --ignore-unmatch .sf" if ! $DRY_RUN; then - STILL_STALE="$(git ls-files --deleted -- '.gsd/*' 2>/dev/null || true)" + STILL_STALE="$(git ls-files --deleted -- '.sf/*' 2>/dev/null || true)" if [[ -z "$STILL_STALE" ]]; then - ok "Git index cleaned — no stale .gsd/ entries remain." + ok "Git index cleaned — no stale .sf/ entries remain." else warn "$(echo "$STILL_STALE" | wc -l | tr -d ' ') stale entr(ies) still present — may need manual cleanup." fi fi else - section "── Step 5: Restore deleted .gsd/ files ────────────────────────────────" + section "── Step 5: Restore deleted .sf/ files ────────────────────────────────" NEEDS_RESTORE=false [[ -n "$DELETED_FILES" ]] && NEEDS_RESTORE=true @@ -283,12 +283,12 @@ else if ! $NEEDS_RESTORE; then ok "No deleted files to restore — skipping." else - info "Restoring .gsd/ files from $CLEAN_COMMIT..." - run "git checkout \"$CLEAN_COMMIT\" -- .gsd/" + info "Restoring .sf/ files from $CLEAN_COMMIT..." + run "git checkout \"$CLEAN_COMMIT\" -- .sf/" if ! $DRY_RUN; then - STILL_MISSING="$(git ls-files --deleted -- '.gsd/*' 2>/dev/null || true)" + STILL_MISSING="$(git ls-files --deleted -- '.sf/*' 2>/dev/null || true)" if [[ -z "$STILL_MISSING" ]]; then - ok "All .gsd/ files restored successfully." + ok "All .sf/ files restored successfully." else MISS_COUNT="$(echo "$STILL_MISSING" | wc -l | tr -d ' ')" warn "${MISS_COUNT} file(s) still missing after restore — may need manual recovery:" @@ -303,33 +303,33 @@ fi section "── Step 6: Fix .gitignore ───────────────────────────────────────────────" if $SF_IS_SYMLINK; then - # Scenario C: .gsd IS external — it should be in .gitignore. Add if missing. + # Scenario C: .sf IS external — it should be in .gitignore. Add if missing. if [[ -z "$SF_IGNORE_LINE" ]]; then - info 'Adding ".gsd" to .gitignore (migration complete — .gsd/ is external state)...' + info 'Adding ".sf" to .gitignore (migration complete — .sf/ is external state)...' if $DRY_RUN; then - echo -e " ${YELLOW}(dry-run)${RESET} Would append: .gsd" + echo -e " ${YELLOW}(dry-run)${RESET} Would append: .sf" else - printf '\n# SF external state (symlink — added by recover-gsd-1364)\n.gsd\n' >> "$GITIGNORE" - ok '".gsd" added to .gitignore.' + printf '\n# SF external state (symlink — added by recover-sf-1364)\n.sf\n' >> "$GITIGNORE" + ok '".sf" added to .gitignore.' fi else - ok '".gsd" already in .gitignore — correct for external-state layout.' + ok '".sf" already in .gitignore — correct for external-state layout.' fi else - # Scenario A/B: .gsd is a real tracked directory — remove the bad ignore line. + # Scenario A/B: .sf is a real tracked directory — remove the bad ignore line. if [[ -z "$SF_IGNORE_LINE" ]]; then - ok '".gsd" not in .gitignore — nothing to fix.' + ok '".sf" not in .gitignore — nothing to fix.' else - info 'Removing bare ".gsd" line from .gitignore...' + info 'Removing bare ".sf" line from .gitignore...' if $DRY_RUN; then - echo -e " ${YELLOW}(dry-run)${RESET} Would remove line: .gsd" + echo -e " ${YELLOW}(dry-run)${RESET} Would remove line: .sf" else - # Remove the exact line ".gsd" (not comments, not .gsd/ subdirs) + # Remove the exact line ".sf" (not comments, not .sf/ subdirs) # Use a temp file for portability (no sed -i on all platforms) TMP="$(mktemp)" - grep -v '^\.gsd$' "$GITIGNORE" > "$TMP" || true + grep -v '^\.sf$' "$GITIGNORE" > "$TMP" || true mv "$TMP" "$GITIGNORE" - ok '".gsd" line removed from .gitignore.' + ok '".sf" line removed from .gitignore.' fi fi fi @@ -339,18 +339,18 @@ fi section "── Step 7: Stage recovery changes ──────────────────────────────────────" if ! $DRY_RUN; then - CHANGED="$(git status --short -- '.gsd/' .gitignore 2>/dev/null || true)" + CHANGED="$(git status --short -- '.sf/' .gitignore 2>/dev/null || true)" if [[ -z "$CHANGED" ]]; then ok "No staged changes — working tree was already clean." else if $SF_IS_SYMLINK; then # Scenario C: the git rm --cached already staged the index cleanup. - # Only stage .gitignore — adding .gsd/ would fail (now gitignored). + # Only stage .gitignore — adding .sf/ would fail (now gitignored). git add .gitignore 2>/dev/null || true else - git add .gsd/ .gitignore 2>/dev/null || true + git add .sf/ .gitignore 2>/dev/null || true fi - STAGED_COUNT="$(git diff --cached --name-only -- '.gsd/' .gitignore | wc -l | tr -d ' ')" + STAGED_COUNT="$(git diff --cached --name-only -- '.sf/' .gitignore | wc -l | tr -d ' ')" ok "${STAGED_COUNT} file(s) staged and ready to commit." fi fi @@ -362,21 +362,21 @@ section "── Summary ────────────────── if $DRY_RUN; then echo -e "${YELLOW}Dry-run complete. Re-run without --dry-run to apply changes.${RESET}" else - FINAL_STAGED="$(git diff --cached --name-only -- '.gsd/' .gitignore 2>/dev/null | wc -l | tr -d ' ')" + FINAL_STAGED="$(git diff --cached --name-only -- '.sf/' .gitignore 2>/dev/null | wc -l | tr -d ' ')" if (( FINAL_STAGED > 0 )); then echo -e "${GREEN}Recovery complete. Commit with:${RESET}" echo "" if $SF_IS_SYMLINK; then - echo " git commit -m \"fix: clean stale .gsd/ index entries after external-state migration\"" + echo " git commit -m \"fix: clean stale .sf/ index entries after external-state migration\"" else - echo " git commit -m \"fix: restore .gsd/ files deleted by #1364 regression\"" + echo " git commit -m \"fix: restore .sf/ files deleted by #1364 regression\"" fi echo "" echo "Staged files:" - git diff --cached --name-only -- '.gsd/' .gitignore | head -20 | while IFS= read -r f; do + git diff --cached --name-only -- '.sf/' .gitignore | head -20 | while IFS= read -r f; do echo " + $f" done - TOTAL_STAGED="$(git diff --cached --name-only -- '.gsd/' .gitignore | wc -l | tr -d ' ')" + TOTAL_STAGED="$(git diff --cached --name-only -- '.sf/' .gitignore | wc -l | tr -d ' ')" if (( TOTAL_STAGED > 20 )); then echo " ... and $((TOTAL_STAGED - 20)) more" fi diff --git a/scripts/recover-gsd-1668.ps1 b/scripts/recover-gsd-1668.ps1 index 4baac1a0f..9d67447c9 100644 --- a/scripts/recover-gsd-1668.ps1 +++ b/scripts/recover-gsd-1668.ps1 @@ -1,4 +1,4 @@ -# recover-gsd-1668.ps1 — Recovery script for issue #1668 (Windows) +# recover-sf-1668.ps1 — Recovery script for issue #1668 (Windows) # # SF v2.39.x deleted the milestone branch and worktree directory when a # merge failed due to the repo using `master` as its default branch (not @@ -13,7 +13,7 @@ # 5. Reports what was found and how to complete the merge manually # # Usage: -# powershell -ExecutionPolicy Bypass -File scripts\recover-gsd-1668.ps1 [-MilestoneId ] [-DryRun] [-Auto] +# powershell -ExecutionPolicy Bypass -File scripts\recover-sf-1668.ps1 [-MilestoneId ] [-DryRun] [-Auto] # # Options: # -MilestoneId SF milestone ID (e.g. M001-g2nalq). @@ -295,9 +295,9 @@ if (-not $DryRun) { if (-not $DryRun) { Section "── Step 6: Verify recovery branch ──────────────────────────────────────" - $fileList = & git ls-tree -r --name-only $recoveryBranch 2>/dev/null | Where-Object { $_ -notmatch '^\.gsd/' } + $fileList = & git ls-tree -r --name-only $recoveryBranch 2>/dev/null | Where-Object { $_ -notmatch '^\.sf/' } $fileCount = @($fileList).Count - Info "Files recoverable (excluding .gsd/ state files): $fileCount" + Info "Files recoverable (excluding .sf/ state files): $fileCount" $fileList | Select-Object -First 30 | ForEach-Object { Write-Host " $_" } if ($fileCount -gt 30) { Dim " ... and $($fileCount - 30) more" } } diff --git a/scripts/recover-gsd-1668.sh b/scripts/recover-gsd-1668.sh index fafc02fa6..59cdb3e32 100755 --- a/scripts/recover-gsd-1668.sh +++ b/scripts/recover-gsd-1668.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -# recover-gsd-1668.sh — Recovery script for issue #1668 (Linux / macOS) +# recover-sf-1668.sh — Recovery script for issue #1668 (Linux / macOS) # # SF v2.39.x deleted the milestone branch and worktree directory when a # merge failed due to the repo using `master` as its default branch (not @@ -14,7 +14,7 @@ # 5. Reports what was found and how to complete the merge manually # # Usage: -# bash scripts/recover-gsd-1668.sh [--milestone ] [--dry-run] [--auto] +# bash scripts/recover-sf-1668.sh [--milestone ] [--dry-run] [--auto] # # Options: # --milestone SF milestone ID (e.g. M001-g2nalq). @@ -398,10 +398,10 @@ fi if ! $DRY_RUN; then section "── Step 6: Verify recovery branch ──────────────────────────────────────" - FILE_LIST="$(git ls-tree -r --name-only "${RECOVERY_BRANCH}" 2>/dev/null | grep -v '^\.gsd/' || true)" + FILE_LIST="$(git ls-tree -r --name-only "${RECOVERY_BRANCH}" 2>/dev/null | grep -v '^\.sf/' || true)" FILE_COUNT="$(echo "$FILE_LIST" | grep -c . || true)" - info "Files recoverable (excluding .gsd/ state files): ${FILE_COUNT}" + info "Files recoverable (excluding .sf/ state files): ${FILE_COUNT}" echo "$FILE_LIST" | head -30 | while IFS= read -r f; do echo " $f"; done if [[ "$FILE_COUNT" -gt 30 ]]; then dim " ... and $((FILE_COUNT - 30)) more" diff --git a/scripts/recover-sf-1364.sh b/scripts/recover-sf-1364.sh new file mode 100755 index 000000000..43bf5e23d --- /dev/null +++ b/scripts/recover-sf-1364.sh @@ -0,0 +1,386 @@ +#!/usr/bin/env bash +# recover-sf-1364.sh — Recovery script for issue #1364 (Linux / macOS) +# +# For Windows use the PowerShell equivalent: +# powershell -ExecutionPolicy Bypass -File scripts\recover-sf-1364.ps1 [-DryRun] +# +# CRITICAL DATA-LOSS BUG: SF versions 2.30.0–2.35.x unconditionally added +# ".sf" to .gitignore via ensureGitignore(), causing git to report all +# tracked .sf/ files as deleted. Fixed in v2.36.0 (PR #1367). +# Three residual vectors remain on v2.36.0–v2.38.0 — see PR #1635 for details. +# +# This script: +# 1. Detects whether the repo was affected +# 2. Finds the last clean commit before the damage +# 3. Restores all deleted .sf/ files from that commit +# 4. Removes the bad ".sf" line from .gitignore (if .sf/ is tracked) +# 5. Prints a ready-to-commit summary +# +# Usage: +# bash scripts/recover-sf-1364.sh [--dry-run] +# +# Options: +# --dry-run Show what would be done without making any changes +# +# Requirements: git >= 2.x, bash >= 4.x + +set -euo pipefail + +# ─── Colours ────────────────────────────────────────────────────────────────── + +RED='\033[0;31m' +YELLOW='\033[1;33m' +GREEN='\033[0;32m' +CYAN='\033[0;36m' +BOLD='\033[1m' +RESET='\033[0m' + +# ─── Args ───────────────────────────────────────────────────────────────────── + +DRY_RUN=false +for arg in "$@"; do + case "$arg" in + --dry-run) DRY_RUN=true ;; + *) echo "Unknown argument: $arg" >&2; exit 1 ;; + esac +done + +# ─── Helpers ────────────────────────────────────────────────────────────────── + +info() { echo -e "${CYAN}[info]${RESET} $*"; } +ok() { echo -e "${GREEN}[ok]${RESET} $*"; } +warn() { echo -e "${YELLOW}[warn]${RESET} $*"; } +error() { echo -e "${RED}[error]${RESET} $*" >&2; } +section() { echo -e "\n${BOLD}$*${RESET}"; } + +die() { + error "$*" + exit 1 +} + +# Run or print-only depending on --dry-run +run() { + if $DRY_RUN; then + echo -e " ${YELLOW}(dry-run)${RESET} $*" + else + eval "$*" + fi +} + +# ─── Preflight ──────────────────────────────────────────────────────────────── + +section "── Preflight ───────────────────────────────────────────────────────" + +# Must be run from a git repo root +if ! git rev-parse --git-dir > /dev/null 2>&1; then + die "Not inside a git repository. Run this from your project root." +fi + +REPO_ROOT="$(git rev-parse --show-toplevel)" +cd "$REPO_ROOT" +info "Repo root: $REPO_ROOT" + +if $DRY_RUN; then + warn "DRY-RUN mode — no changes will be made." +fi + +# ─── Step 1: Check if .sf/ exists ──────────────────────────────────────────── + +section "── Step 1: Detect .sf/ directory ────────────────────────────────────" + +SF_DIR="$REPO_ROOT/.sf" +SF_IS_SYMLINK=false + +if [[ ! -e "$SF_DIR" ]]; then + ok ".sf/ does not exist in this repo — not affected." + exit 0 +fi + +if [[ -L "$SF_DIR" ]]; then + # Scenario C: migration succeeded (symlink in place) but git index was never + # cleaned — tracked .sf/* files still appear as deleted through the symlink. + SF_IS_SYMLINK=true + warn ".sf/ is a symlink — checking for stale git index entries (Scenario C)..." +else + info ".sf/ is a real directory (Scenario A/B)." +fi + +# ─── Step 2: Check if .sf is in .gitignore ─────────────────────────────────── + +section "── Step 2: Check .gitignore for .sf entry ────────────────────────────" + +GITIGNORE="$REPO_ROOT/.gitignore" + +if [[ ! -f "$GITIGNORE" ]] && ! $SF_IS_SYMLINK; then + ok ".gitignore does not exist — not affected." + exit 0 +fi + +# Look for a bare ".sf" line (not a comment, not a sub-path like .sf/) +SF_IGNORE_LINE="" +if [[ -f "$GITIGNORE" ]]; then + while IFS= read -r line; do + trimmed="${line#"${line%%[![:space:]]*}"}" + trimmed="${trimmed%"${trimmed##*[![:space:]]}"}" + if [[ "$trimmed" == ".sf" ]] && [[ "${trimmed:0:1}" != "#" ]]; then + SF_IGNORE_LINE="$trimmed" + break + fi + done < "$GITIGNORE" +fi + +if $SF_IS_SYMLINK; then + # Symlink layout: .sf SHOULD be ignored (it's external state). + # Missing = needs adding. Present = correct. + if [[ -z "$SF_IGNORE_LINE" ]]; then + warn '".sf" missing from .gitignore — will add (migration complete, .sf/ is external).' + else + ok '".sf" already in .gitignore — correct for external-state layout.' + fi +else + # Real-directory layout: .sf should NOT be ignored. + if [[ -z "$SF_IGNORE_LINE" ]]; then + ok '".sf" not found in .gitignore — .gitignore not affected.' + else + warn '".sf" found in .gitignore — this is the bad pattern from #1364.' + fi +fi + +# ─── Step 3: Find deleted .sf/ tracked files ───────────────────────────────── + +section "── Step 3: Find deleted .sf/ files ───────────────────────────────────" + +# Files showing as deleted in the working tree (tracked in index but missing) +DELETED_FILES="$(git ls-files --deleted -- '.sf/*' 2>/dev/null || true)" + +# Files tracked in HEAD right now +TRACKED_IN_HEAD="$(git ls-tree -r --name-only HEAD -- '.sf/' 2>/dev/null || true)" + +if $SF_IS_SYMLINK; then + # Scenario C: migration succeeded. Files are safe via symlink. + # Only index entries can be stale — no need to scan commit history. + if [[ -z "$TRACKED_IN_HEAD" ]] && [[ -z "$DELETED_FILES" ]]; then + ok "No stale index entries found — symlink layout is healthy." + if [[ -z "$SF_IGNORE_LINE" ]]; then + info "Add .sf to .gitignore manually to complete the migration." + fi + exit 0 + fi + INDEX_COUNT="$(echo "${TRACKED_IN_HEAD:-$DELETED_FILES}" | wc -l | tr -d ' ')" + warn "Scenario C: ${INDEX_COUNT} .sf/ file(s) tracked in git index but inaccessible through symlink." + info "Files are safe in external storage — only the git index needs cleaning." +else + # Files deleted via a committed git rm --cached (Scenario B) + DELETED_FROM_HISTORY="$(git log --all --diff-filter=D --name-only --format="" -- '.sf/*' 2>/dev/null \ + | grep '^\.sf' | sort -u || true)" + + if [[ -z "$TRACKED_IN_HEAD" ]] && [[ -z "$DELETED_FILES" ]] && [[ -z "$DELETED_FROM_HISTORY" ]]; then + ok "No .sf/ files tracked in this repo — not affected by #1364." + if [[ -n "$SF_IGNORE_LINE" ]]; then + warn '".sf" is still in .gitignore but there is nothing to restore.' + fi + exit 0 + fi + + if [[ -n "$TRACKED_IN_HEAD" ]]; then + TRACKED_COUNT="$(echo "$TRACKED_IN_HEAD" | wc -l | tr -d ' ')" + info "Scenario A: ${TRACKED_COUNT} .sf/ files still tracked in HEAD." + elif [[ -n "$DELETED_FROM_HISTORY" ]]; then + DELETED_HIST_COUNT="$(echo "$DELETED_FROM_HISTORY" | wc -l | tr -d ' ')" + warn "Scenario B: ${DELETED_HIST_COUNT} .sf/ file(s) deleted in a committed change:" + echo "$DELETED_FROM_HISTORY" | head -20 | while IFS= read -r f; do echo " - $f"; done + if (( DELETED_HIST_COUNT > 20 )); then echo " ... and $((DELETED_HIST_COUNT - 20)) more"; fi + fi + + if [[ -n "$DELETED_FILES" ]]; then + DELETED_COUNT="$(echo "$DELETED_FILES" | wc -l | tr -d ' ')" + warn "${DELETED_COUNT} .sf/ file(s) missing from working tree:" + echo "$DELETED_FILES" | head -20 | while IFS= read -r f; do echo " - $f"; done + if (( DELETED_COUNT > 20 )); then echo " ... and $((DELETED_COUNT - 20)) more"; fi + fi + + if [[ -n "$TRACKED_IN_HEAD" ]] && [[ -z "$DELETED_FILES" ]]; then + if [[ -z "$SF_IGNORE_LINE" ]]; then + ok "No action needed — .sf/ is tracked in HEAD and .gitignore is clean." + exit 0 + fi + info ".sf/ is tracked in HEAD and working tree is clean — only .gitignore needs fixing." + fi +fi + +# ─── Step 4: Find the last clean commit (Scenario A/B only) ─────────────────── + +section "── Step 4: Find last clean commit ──────────────────────────────────────" + +DAMAGE_COMMIT="" +CLEAN_COMMIT="" +RESTORABLE="" + +if $SF_IS_SYMLINK; then + info "Scenario C: symlink layout — skipping commit history scan (no file restore needed)." +else + # Find the commit where ".sf" was first added to .gitignore + # by walking the log and finding the first commit where .gitignore contained ".sf" + info "Scanning git log to find when .sf was added to .gitignore..." + + # Strategy 1: find the first commit that added ".sf" to .gitignore + while IFS= read -r sha; do + content="$(git show "${sha}:.gitignore" 2>/dev/null || true)" + if echo "$content" | grep -qx '\.sf' 2>/dev/null; then + DAMAGE_COMMIT="$sha" + break + fi + done < <(git log --format="%H" -- .gitignore) + + # Strategy 2: if .sf files were committed as deleted, find that commit + if [[ -z "$DAMAGE_COMMIT" ]] && [[ -n "${DELETED_FROM_HISTORY:-}" ]]; then + info "Searching for the commit that deleted .sf/ files from the index..." + DAMAGE_COMMIT="$(git log --all --diff-filter=D --format="%H" -- '.sf/*' 2>/dev/null | head -1 || true)" + fi + + if [[ -z "$DAMAGE_COMMIT" ]]; then + warn "Could not pinpoint the damage commit — falling back to HEAD." + CLEAN_COMMIT="HEAD" + else + info "Damage commit: $DAMAGE_COMMIT ($(git log --format='%s' -1 "$DAMAGE_COMMIT"))" + CLEAN_COMMIT="${DAMAGE_COMMIT}^" + CLEAN_MSG="$(git log --format='%s' -1 "$CLEAN_COMMIT" 2>/dev/null || echo "unknown")" + info "Restoring from: $CLEAN_COMMIT — $CLEAN_MSG" + fi + + # Verify the clean commit actually has .sf/ files + RESTORABLE="$(git ls-tree -r --name-only "$CLEAN_COMMIT" -- '.sf/' 2>/dev/null || true)" + if [[ -z "$RESTORABLE" ]]; then + die "No .sf/ files found in restore point $CLEAN_COMMIT — cannot recover. Check git log manually." + fi + + RESTORABLE_COUNT="$(echo "$RESTORABLE" | wc -l | tr -d ' ')" + ok "Restore point has ${RESTORABLE_COUNT} .sf/ files available." +fi + +# ─── Step 5: Clean index (Scenario C) or restore deleted files (Scenario A/B) ─ + +if $SF_IS_SYMLINK; then + section "── Step 5: Clean stale git index entries ───────────────────────────────" + + info "Running: git rm -r --cached --ignore-unmatch .sf/ ..." + run "git rm -r --cached --ignore-unmatch .sf" + if ! $DRY_RUN; then + STILL_STALE="$(git ls-files --deleted -- '.sf/*' 2>/dev/null || true)" + if [[ -z "$STILL_STALE" ]]; then + ok "Git index cleaned — no stale .sf/ entries remain." + else + warn "$(echo "$STILL_STALE" | wc -l | tr -d ' ') stale entr(ies) still present — may need manual cleanup." + fi + fi +else + section "── Step 5: Restore deleted .sf/ files ────────────────────────────────" + + NEEDS_RESTORE=false + [[ -n "$DELETED_FILES" ]] && NEEDS_RESTORE=true + [[ -n "${DELETED_FROM_HISTORY:-}" ]] && [[ -z "$TRACKED_IN_HEAD" ]] && NEEDS_RESTORE=true + + if ! $NEEDS_RESTORE; then + ok "No deleted files to restore — skipping." + else + info "Restoring .sf/ files from $CLEAN_COMMIT..." + run "git checkout \"$CLEAN_COMMIT\" -- .sf/" + if ! $DRY_RUN; then + STILL_MISSING="$(git ls-files --deleted -- '.sf/*' 2>/dev/null || true)" + if [[ -z "$STILL_MISSING" ]]; then + ok "All .sf/ files restored successfully." + else + MISS_COUNT="$(echo "$STILL_MISSING" | wc -l | tr -d ' ')" + warn "${MISS_COUNT} file(s) still missing after restore — may need manual recovery:" + echo "$STILL_MISSING" | head -10 | while IFS= read -r f; do echo " - $f"; done + fi + fi + fi +fi + +# ─── Step 6: Fix .gitignore ─────────────────────────────────────────────────── + +section "── Step 6: Fix .gitignore ───────────────────────────────────────────────" + +if $SF_IS_SYMLINK; then + # Scenario C: .sf IS external — it should be in .gitignore. Add if missing. + if [[ -z "$SF_IGNORE_LINE" ]]; then + info 'Adding ".sf" to .gitignore (migration complete — .sf/ is external state)...' + if $DRY_RUN; then + echo -e " ${YELLOW}(dry-run)${RESET} Would append: .sf" + else + printf '\n# SF external state (symlink — added by recover-sf-1364)\n.sf\n' >> "$GITIGNORE" + ok '".sf" added to .gitignore.' + fi + else + ok '".sf" already in .gitignore — correct for external-state layout.' + fi +else + # Scenario A/B: .sf is a real tracked directory — remove the bad ignore line. + if [[ -z "$SF_IGNORE_LINE" ]]; then + ok '".sf" not in .gitignore — nothing to fix.' + else + info 'Removing bare ".sf" line from .gitignore...' + if $DRY_RUN; then + echo -e " ${YELLOW}(dry-run)${RESET} Would remove line: .sf" + else + # Remove the exact line ".sf" (not comments, not .sf/ subdirs) + # Use a temp file for portability (no sed -i on all platforms) + TMP="$(mktemp)" + grep -v '^\.sf$' "$GITIGNORE" > "$TMP" || true + mv "$TMP" "$GITIGNORE" + ok '".sf" line removed from .gitignore.' + fi + fi +fi + +# ─── Step 7: Stage changes ──────────────────────────────────────────────────── + +section "── Step 7: Stage recovery changes ──────────────────────────────────────" + +if ! $DRY_RUN; then + CHANGED="$(git status --short -- '.sf/' .gitignore 2>/dev/null || true)" + if [[ -z "$CHANGED" ]]; then + ok "No staged changes — working tree was already clean." + else + if $SF_IS_SYMLINK; then + # Scenario C: the git rm --cached already staged the index cleanup. + # Only stage .gitignore — adding .sf/ would fail (now gitignored). + git add .gitignore 2>/dev/null || true + else + git add .sf/ .gitignore 2>/dev/null || true + fi + STAGED_COUNT="$(git diff --cached --name-only -- '.sf/' .gitignore | wc -l | tr -d ' ')" + ok "${STAGED_COUNT} file(s) staged and ready to commit." + fi +fi + +# ─── Summary ────────────────────────────────────────────────────────────────── + +section "── Summary ──────────────────────────────────────────────────────────────" + +if $DRY_RUN; then + echo -e "${YELLOW}Dry-run complete. Re-run without --dry-run to apply changes.${RESET}" +else + FINAL_STAGED="$(git diff --cached --name-only -- '.sf/' .gitignore 2>/dev/null | wc -l | tr -d ' ')" + if (( FINAL_STAGED > 0 )); then + echo -e "${GREEN}Recovery complete. Commit with:${RESET}" + echo "" + if $SF_IS_SYMLINK; then + echo " git commit -m \"fix: clean stale .sf/ index entries after external-state migration\"" + else + echo " git commit -m \"fix: restore .sf/ files deleted by #1364 regression\"" + fi + echo "" + echo "Staged files:" + git diff --cached --name-only -- '.sf/' .gitignore | head -20 | while IFS= read -r f; do + echo " + $f" + done + TOTAL_STAGED="$(git diff --cached --name-only -- '.sf/' .gitignore | wc -l | tr -d ' ')" + if (( TOTAL_STAGED > 20 )); then + echo " ... and $((TOTAL_STAGED - 20)) more" + fi + else + ok "Repo is healthy — no recovery needed." + fi +fi diff --git a/scripts/recover-sf-1668.sh b/scripts/recover-sf-1668.sh new file mode 100755 index 000000000..59cdb3e32 --- /dev/null +++ b/scripts/recover-sf-1668.sh @@ -0,0 +1,446 @@ +#!/usr/bin/env bash +# recover-sf-1668.sh — Recovery script for issue #1668 (Linux / macOS) +# +# SF v2.39.x deleted the milestone branch and worktree directory when a +# merge failed due to the repo using `master` as its default branch (not +# `main`). The commits were never merged — they are orphaned in the git +# object store and can be recovered via git reflog or git fsck. +# +# This script: +# 1. Searches git reflog for the deleted milestone branch (fastest path) +# 2. Falls back to git fsck --unreachable to find orphaned commits +# 3. Ranks candidates by recency and SF commit message patterns +# 4. Creates a recovery branch at the identified commit +# 5. Reports what was found and how to complete the merge manually +# +# Usage: +# bash scripts/recover-sf-1668.sh [--milestone ] [--dry-run] [--auto] +# +# Options: +# --milestone SF milestone ID (e.g. M001-g2nalq). +# When omitted the script scans all recent orphans. +# --dry-run Show what would be done without making any changes. +# --auto Pick the best candidate automatically (no prompts). +# +# Requirements: git >= 2.23, bash >= 4.x +# +# Affected versions: SF.39.x +# Fixed in: SF.40.1 (PR #1669) + +set -euo pipefail + +# ─── Colours ────────────────────────────────────────────────────────────────── + +RED='\033[0;31m' +YELLOW='\033[1;33m' +GREEN='\033[0;32m' +CYAN='\033[0;36m' +BOLD='\033[1m' +DIM='\033[2m' +RESET='\033[0m' + +# ─── Args ───────────────────────────────────────────────────────────────────── + +DRY_RUN=false +AUTO=false +MILESTONE_ID="" + +while [[ $# -gt 0 ]]; do + case "$1" in + --dry-run) DRY_RUN=true; shift ;; + --auto) AUTO=true; shift ;; + --milestone) + [[ $# -lt 2 ]] && { echo "Error: --milestone requires an argument" >&2; exit 1; } + MILESTONE_ID="$2"; shift 2 ;; + --milestone=*) + MILESTONE_ID="${1#--milestone=}"; shift ;; + -h|--help) + sed -n '2,/^set -/p' "$0" | grep '^#' | sed 's/^# \{0,1\}//' + exit 0 ;; + *) + echo "Unknown argument: $1" >&2 + echo "Usage: $0 [--milestone ] [--dry-run] [--auto]" >&2 + exit 1 ;; + esac +done + +# ─── Helpers ────────────────────────────────────────────────────────────────── + +info() { echo -e "${CYAN}[info]${RESET} $*"; } +ok() { echo -e "${GREEN}[ok]${RESET} $*"; } +warn() { echo -e "${YELLOW}[warn]${RESET} $*"; } +error() { echo -e "${RED}[error]${RESET} $*" >&2; } +section() { echo -e "\n${BOLD}$*${RESET}"; } +dim() { echo -e "${DIM}$*${RESET}"; } + +die() { + error "$*" + exit 1 +} + +run() { + if $DRY_RUN; then + echo -e " ${YELLOW}(dry-run)${RESET} $*" + else + eval "$*" + fi +} + +# ─── Preflight ──────────────────────────────────────────────────────────────── + +section "── Preflight ───────────────────────────────────────────────────────────" + +if ! git rev-parse --git-dir > /dev/null 2>&1; then + die "Not inside a git repository. Run this from your project root." +fi + +REPO_ROOT="$(git rev-parse --show-toplevel)" +cd "$REPO_ROOT" +info "Repo root: $REPO_ROOT" + +$DRY_RUN && warn "DRY-RUN mode — no changes will be made." + +# ─── Step 1: Confirm the milestone branch is gone ───────────────────────────── + +section "── Step 1: Verify milestone branch is missing ───────────────────────────" + +BRANCH_PATTERN="milestone/" +if [[ -n "$MILESTONE_ID" ]]; then + BRANCH_PATTERN="milestone/${MILESTONE_ID}" +fi + +LIVE_BRANCHES="$(git branch | grep "$BRANCH_PATTERN" 2>/dev/null | tr -d '* ' || true)" + +if [[ -n "$LIVE_BRANCHES" ]]; then + ok "Found live milestone branch(es):" + echo "$LIVE_BRANCHES" | while IFS= read -r b; do echo " $b"; done + echo "" + warn "The branch still exists — are you sure it was lost?" + echo " If you want to check out existing work: git checkout ${LIVE_BRANCHES%%$'\n'*}" + echo " To merge it manually: git checkout master && git merge --squash ${LIVE_BRANCHES%%$'\n'*}" + echo "" + echo "Re-run with --milestone to force scanning for a specific orphaned commit." + if [[ -z "$MILESTONE_ID" ]]; then + exit 0 + fi +fi + +if [[ -n "$MILESTONE_ID" && -n "$LIVE_BRANCHES" ]]; then + warn "Milestone branch milestone/${MILESTONE_ID} is still live — continuing scan anyway." +elif [[ -n "$MILESTONE_ID" ]]; then + info "Confirmed: milestone/${MILESTONE_ID} branch is gone." +else + info "No live milestone/ branches found — scanning for orphaned commits." +fi + +# ─── Step 2: Search git reflog (fastest, most reliable) ─────────────────────── + +section "── Step 2: Search git reflog for deleted branch ────────────────────────" + +# git reflog stores branch moves and deletions in .git/logs/refs/heads/ +# It is retained for 90 days by default (gc.reflogExpire). +REFLOG_FOUND_SHA="" +REFLOG_FOUND_BRANCH="" + +if [[ -n "$MILESTONE_ID" ]]; then + REFLOG_PATH="${REPO_ROOT}/.git/logs/refs/heads/milestone/${MILESTONE_ID}" + if [[ -f "$REFLOG_PATH" ]]; then + # Last line of the reflog for this branch is the most recent tip + REFLOG_FOUND_SHA="$(tail -1 "$REFLOG_PATH" | awk '{print $2}')" + REFLOG_FOUND_BRANCH="milestone/${MILESTONE_ID}" + ok "Reflog entry found for milestone/${MILESTONE_ID} — commit: ${REFLOG_FOUND_SHA:0:12}" + else + info "No reflog file at .git/logs/refs/heads/milestone/${MILESTONE_ID}" + fi +fi + +# Also try git reflog (in-memory index, works without the raw file) +if [[ -z "$REFLOG_FOUND_SHA" ]]; then + info "Scanning git reflog for milestone/ commits..." + REFLOG_MILESTONES="$(git reflog --all --format="%H %gs" 2>/dev/null \ + | grep -E "(checkout|commit|merge).*milestone/" \ + | head -20 || true)" + + if [[ -n "$REFLOG_MILESTONES" ]]; then + info "Found milestone-related reflog entries:" + echo "$REFLOG_MILESTONES" | while IFS= read -r line; do + dim " $line" + done + # Extract the most recent SHA from the most relevant entry + if [[ -n "$MILESTONE_ID" ]]; then + MATCH="$(echo "$REFLOG_MILESTONES" | grep "milestone/${MILESTONE_ID}" | head -1 || true)" + else + MATCH="$(echo "$REFLOG_MILESTONES" | head -1 || true)" + fi + if [[ -n "$MATCH" ]]; then + REFLOG_FOUND_SHA="$(echo "$MATCH" | awk '{print $1}')" + REFLOG_FOUND_BRANCH="$(echo "$MATCH" | grep -oE 'milestone/[^ ]+' | head -1 || echo "milestone/unknown")" + fi + else + info "No milestone/ entries in reflog." + fi +fi + +# ─── Step 3: Fall back to git fsck if reflog didn't find it ─────────────────── + +section "── Step 3: Scan for orphaned (unreachable) commits ───────────────────" + +FSCK_CANDIDATES=() +FSCK_CANDIDATE_MSGS=() +FSCK_CANDIDATE_DATES=() +FSCK_CANDIDATE_FILES=() + +if [[ -z "$REFLOG_FOUND_SHA" ]]; then + info "Running git fsck --unreachable (this may take a moment)..." + + # Collect all unreachable commit hashes + UNREACHABLE_COMMITS="$(git fsck --unreachable --no-reflogs 2>/dev/null \ + | grep '^unreachable commit' \ + | awk '{print $3}' || true)" + + if [[ -z "$UNREACHABLE_COMMITS" ]]; then + # Try without --no-reflogs as a fallback (less conservative) + UNREACHABLE_COMMITS="$(git fsck --unreachable 2>/dev/null \ + | grep '^unreachable commit' \ + | awk '{print $3}' || true)" + fi + + TOTAL="$(echo "$UNREACHABLE_COMMITS" | grep -c . || true)" + info "Found ${TOTAL} unreachable commit object(s)." + + if [[ -z "$UNREACHABLE_COMMITS" || "$TOTAL" -eq 0 ]]; then + error "No unreachable commits found." + echo "" + echo "This means one of:" + echo " 1. git gc has already been run and the objects were pruned" + echo " (objects are pruned after 14 days by default)" + echo " 2. The commits were never written to the object store" + echo " 3. The wrong repository is being scanned" + echo "" + echo "If git gc ran, the objects may be unrecoverable without a backup." + echo "Try: git reflog --all | grep milestone" + exit 1 + fi + + # Score each unreachable commit — rank by recency and SF message patterns. + # SF milestone commits look like: "feat(M001-g2nalq): " + # Slice merges look like: "feat(M001-g2nalq/S01): <slice>" + # + # Performance: use a single `git log --no-walk=unsorted --stdin` call to + # read all commit metadata in one pass instead of one `git show` per commit. + CUTOFF="$(date -d '30 days ago' '+%s' 2>/dev/null || date -v-30d '+%s' 2>/dev/null || echo 0)" + WEEK_AGO="$(date -d '7 days ago' '+%s' 2>/dev/null || date -v-7d '+%s' 2>/dev/null || echo 0)" + + # Batch-read all commits: output format per commit is: + # HASH<TAB>UNIX_TIMESTAMP<TAB>ISO_DATE<TAB>SUBJECT + # separated by NUL so multi-line subjects don't break parsing. + BATCH_LOG="$(echo "$UNREACHABLE_COMMITS" \ + | git log --no-walk=unsorted --stdin --format=$'%H\t%ct\t%ci\t%s' 2>/dev/null || true)" + + while IFS=$'\t' read -r sha commit_ts commit_date_hr commit_msg; do + [[ -z "$sha" ]] && continue + [[ -z "$commit_ts" || "$commit_ts" -lt "$CUTOFF" ]] && continue + + # Score: milestone pattern in subject is highest signal + SCORE=0 + if [[ -n "$MILESTONE_ID" ]] && echo "$commit_msg" | grep -qiE "(milestone[/ ])?${MILESTONE_ID}"; then + SCORE=$((SCORE + 100)) + fi + if echo "$commit_msg" | grep -qE '^feat\([A-Z][0-9]+'; then + SCORE=$((SCORE + 50)) + fi + if echo "$commit_msg" | grep -qiE 'milestone/|complete-milestone|SF|slice'; then + SCORE=$((SCORE + 20)) + fi + if [[ "$commit_ts" -gt "$WEEK_AGO" ]]; then + SCORE=$((SCORE + 10)) + fi + + FSCK_CANDIDATES+=("$sha|$SCORE") + FSCK_CANDIDATE_MSGS+=("$commit_msg") + FSCK_CANDIDATE_DATES+=("$commit_date_hr") + FSCK_CANDIDATE_FILES+=("?") + done <<< "$BATCH_LOG" + + if [[ ${#FSCK_CANDIDATES[@]} -eq 0 ]]; then + error "No recent unreachable commits found within the last 30 days." + echo "" + echo "Objects may have been pruned by git gc, or the issue occurred more than 30 days ago." + echo "Try: git fsck --unreachable --no-reflogs 2>/dev/null | grep commit" + exit 1 + fi + + # Sort by score descending, keep top 10 + IFS=$'\n' SORTED_CANDIDATES=($( + for i in "${!FSCK_CANDIDATES[@]}"; do + echo "${FSCK_CANDIDATES[$i]}|$i" + done | sort -t'|' -k2 -rn | head -10 + )) + unset IFS + + info "Top candidates (scored by recency and SF message patterns):" + echo "" + NUM=1 + SORTED_IDXS=() + for entry in "${SORTED_CANDIDATES[@]}"; do + SHA="${entry%%|*}" + IDX="${entry##*|}" + SORTED_IDXS+=("$IDX") + MSG="${FSCK_CANDIDATE_MSGS[$IDX]}" + DATE="${FSCK_CANDIDATE_DATES[$IDX]}" + FILES="${FSCK_CANDIDATE_FILES[$IDX]}" + echo -e " ${BOLD}${NUM})${RESET} ${sha:0:12} ${GREEN}${MSG}${RESET}" + echo -e " ${DIM}${DATE} — ${FILES}${RESET}" + NUM=$((NUM + 1)) + done + echo "" +fi + +# ─── Step 4: Select the recovery commit ─────────────────────────────────────── + +section "── Step 4: Select recovery commit ──────────────────────────────────────" + +RECOVERY_SHA="" +RECOVERY_SOURCE="" + +if [[ -n "$REFLOG_FOUND_SHA" ]]; then + RECOVERY_SHA="$REFLOG_FOUND_SHA" + RECOVERY_SOURCE="reflog (${REFLOG_FOUND_BRANCH})" + info "Using reflog candidate: ${RECOVERY_SHA:0:12}" + MSG="$(git show -s --format="%s %ci" "$RECOVERY_SHA" 2>/dev/null || echo "unknown")" + dim " $MSG" + +elif [[ ${#SORTED_IDXS[@]} -eq 1 ]] || $AUTO; then + # Auto-select first (highest scored) candidate + FIRST_ENTRY="${SORTED_CANDIDATES[0]}" + FIRST_SHA="${FIRST_ENTRY%%|*}" + FIRST_IDX="${FIRST_ENTRY##*|}" + RECOVERY_SHA="$FIRST_SHA" + RECOVERY_SOURCE="fsck (auto-selected)" + info "Auto-selecting best candidate: ${RECOVERY_SHA:0:12}" + +else + # Prompt user to select + echo -n "Select a candidate to recover [1-${#SORTED_CANDIDATES[@]}, or q to quit]: " + read -r SELECTION + + if [[ "$SELECTION" == "q" ]]; then + info "Aborted." + exit 0 + fi + + if ! [[ "$SELECTION" =~ ^[0-9]+$ ]] || \ + [[ "$SELECTION" -lt 1 ]] || \ + [[ "$SELECTION" -gt ${#SORTED_CANDIDATES[@]} ]]; then + die "Invalid selection: $SELECTION" + fi + + SEL_IDX=$((SELECTION - 1)) + SEL_ENTRY="${SORTED_CANDIDATES[$SEL_IDX]}" + RECOVERY_SHA="${SEL_ENTRY%%|*}" + RECOVERY_SOURCE="fsck (user-selected #${SELECTION})" +fi + +if [[ -z "$RECOVERY_SHA" ]]; then + die "Could not determine a recovery commit. See output above." +fi + +ok "Recovery commit: ${RECOVERY_SHA:0:16} (source: ${RECOVERY_SOURCE})" + +# Show what's in this commit +echo "" +info "Commit details:" +git show -s --format=" Message: %s%n Author: %an <%ae>%n Date: %ci%n Full SHA: %H" "$RECOVERY_SHA" +echo "" +info "Files at this commit (first 30):" +git show --stat --format="" "$RECOVERY_SHA" 2>/dev/null | head -30 +echo "" + +# ─── Step 5: Create recovery branch ─────────────────────────────────────────── + +section "── Step 5: Create recovery branch ──────────────────────────────────────" + +# Determine recovery branch name +if [[ -n "$MILESTONE_ID" ]]; then + RECOVERY_BRANCH="recovery/1668/${MILESTONE_ID}" +elif [[ -n "$REFLOG_FOUND_BRANCH" ]]; then + CLEAN_NAME="${REFLOG_FOUND_BRANCH//\//-}" + RECOVERY_BRANCH="recovery/1668/${CLEAN_NAME}" +else + SHORT_SHA="${RECOVERY_SHA:0:8}" + RECOVERY_BRANCH="recovery/1668/commit-${SHORT_SHA}" +fi + +# Check if it already exists +if git show-ref --verify --quiet "refs/heads/${RECOVERY_BRANCH}" 2>/dev/null; then + warn "Branch ${RECOVERY_BRANCH} already exists." + if ! $AUTO; then + echo -n "Overwrite it? [y/N]: " + read -r ANSWER + if [[ "$ANSWER" != "y" && "$ANSWER" != "Y" ]]; then + info "Aborted. Existing branch preserved." + exit 0 + fi + fi + run "git branch -D \"${RECOVERY_BRANCH}\"" +fi + +run "git branch \"${RECOVERY_BRANCH}\" \"${RECOVERY_SHA}\"" + +if ! $DRY_RUN; then + ok "Recovery branch created: ${RECOVERY_BRANCH}" +else + ok "(dry-run) Would create branch: ${RECOVERY_BRANCH} → ${RECOVERY_SHA:0:12}" +fi + +# ─── Step 6: Verify the recovery branch ─────────────────────────────────────── + +if ! $DRY_RUN; then + section "── Step 6: Verify recovery branch ──────────────────────────────────────" + + FILE_LIST="$(git ls-tree -r --name-only "${RECOVERY_BRANCH}" 2>/dev/null | grep -v '^\.sf/' || true)" + FILE_COUNT="$(echo "$FILE_LIST" | grep -c . || true)" + + info "Files recoverable (excluding .sf/ state files): ${FILE_COUNT}" + echo "$FILE_LIST" | head -30 | while IFS= read -r f; do echo " $f"; done + if [[ "$FILE_COUNT" -gt 30 ]]; then + dim " ... and $((FILE_COUNT - 30)) more" + fi +fi + +# ─── Summary ────────────────────────────────────────────────────────────────── + +section "── Recovery Summary ─────────────────────────────────────────────────────" + +if $DRY_RUN; then + echo -e "${YELLOW}Dry-run complete. Re-run without --dry-run to apply.${RESET}" + exit 0 +fi + +DEFAULT_BRANCH="$(git symbolic-ref refs/remotes/origin/HEAD 2>/dev/null | sed 's|refs/remotes/origin/||' \ + || git for-each-ref --format='%(refname:short)' 'refs/heads/main' 'refs/heads/master' 2>/dev/null | head -1 \ + || git branch --show-current)" + +echo -e "${GREEN}Recovery branch ready: ${BOLD}${RECOVERY_BRANCH}${RESET}" +echo "" +echo "Next steps:" +echo "" +echo -e " ${BOLD}1. Inspect the recovered files:${RESET}" +echo " git checkout ${RECOVERY_BRANCH}" +echo " ls -la" +echo "" +echo -e " ${BOLD}2. Verify your code is intact:${RESET}" +echo " git log --oneline ${RECOVERY_BRANCH} | head -20" +echo " git show --stat ${RECOVERY_BRANCH}" +echo "" +echo -e " ${BOLD}3. Merge to your default branch (${DEFAULT_BRANCH}):${RESET}" +echo " git checkout ${DEFAULT_BRANCH}" +echo " git merge --squash ${RECOVERY_BRANCH}" +echo " git commit -m \"feat: recover milestone from #1668\"" +echo "" +echo -e " ${BOLD}4. Clean up after verifying:${RESET}" +echo " git branch -D ${RECOVERY_BRANCH}" +echo "" +echo -e "${DIM}Note: update SF to v2.40.1+ to prevent this from recurring.${RESET}" +echo " PR: https://github.com/singularity-forge/sf-run/pull/1669" +echo "" diff --git a/scripts/rtk-benchmark.mjs b/scripts/rtk-benchmark.mjs index ba1caa312..4fcd4e025 100644 --- a/scripts/rtk-benchmark.mjs +++ b/scripts/rtk-benchmark.mjs @@ -6,7 +6,7 @@ import { join, dirname } from 'node:path' import { mkdirSync, mkdtempSync, rmSync, writeFileSync } from 'node:fs' function getManagedRtkPath() { - return join(homedir(), '.gsd', 'agent', 'bin', process.platform === 'win32' ? 'rtk.exe' : 'rtk') + return join(homedir(), '.sf', 'agent', 'bin', process.platform === 'win32' ? 'rtk.exe' : 'rtk') } function run(command, args, options = {}) { @@ -29,7 +29,7 @@ function createFixture(projectDir) { mkdirSync(join(projectDir, 'src', 'components'), { recursive: true }) writeFileSync(join(projectDir, 'package.json'), JSON.stringify({ - name: 'gsd-rtk-benchmark', + name: 'sf-rtk-benchmark', version: '1.0.0', scripts: { test: 'node test.js', @@ -114,7 +114,7 @@ function main() { throw new Error('RTK binary path not resolved') } - const workspace = mkdtempSync(join(tmpdir(), 'gsd-rtk-benchmark-')) + const workspace = mkdtempSync(join(tmpdir(), 'sf-rtk-benchmark-')) const homeDir = join(workspace, 'home') const projectDir = join(workspace, 'project') mkdirSync(homeDir, { recursive: true }) diff --git a/scripts/secret-scan.mjs b/scripts/secret-scan.mjs index e8f1a5f79..855af2711 100644 --- a/scripts/secret-scan.mjs +++ b/scripts/secret-scan.mjs @@ -83,7 +83,7 @@ function shouldScan(file) { lower.startsWith('node_modules/') || lower.startsWith('dist/') || lower.startsWith('coverage/') || - lower.startsWith('.gsd/') + lower.startsWith('.sf/') ) { return false; } diff --git a/scripts/secret-scan.sh b/scripts/secret-scan.sh index 5b8bc6283..741890e9f 100755 --- a/scripts/secret-scan.sh +++ b/scripts/secret-scan.sh @@ -128,7 +128,7 @@ should_scan() { esac # Skip node_modules, dist, coverage case "$file" in - node_modules/*|dist/*|coverage/*|.gsd/*) + node_modules/*|dist/*|coverage/*|.sf/*) return 1 ;; esac return 0 diff --git a/scripts/sync-pkg-version.cjs b/scripts/sync-pkg-version.cjs index 2334dc207..2503bf6df 100644 --- a/scripts/sync-pkg-version.cjs +++ b/scripts/sync-pkg-version.cjs @@ -3,7 +3,7 @@ * Sync pkg/package.json version with the installed @mariozechner/pi-coding-agent version. * * sf-run sets PI_PACKAGE_DIR=pkg/ so that pi's config.js reads piConfig from - * pkg/package.json (for branding: name="gsd", configDir=".gsd"). However, config.js + * pkg/package.json (for branding: name="sf", configDir=".sf"). However, config.js * also reads `version` from that same file and uses it for the update check * (comparing against npm registry). If pkg/package.json has a stale version, * pi's update banner fires even when the user is already on the latest release. diff --git a/scripts/validate-pack.js b/scripts/validate-pack.js index 9edbb9827..7c9f21b91 100644 --- a/scripts/validate-pack.js +++ b/scripts/validate-pack.js @@ -150,8 +150,8 @@ try { console.log('==> Verifying @sf-run/* workspace package resolution...'); const installedRoot = join(installDir, 'node_modules', 'sf-run'); const criticalPackages = [ - { scope: '@gsd', name: 'pi-coding-agent' }, - { scope: '@gsd-build', name: 'rpc-client' }, + { scope: '@sf', name: 'pi-coding-agent' }, + { scope: '@sf-build', name: 'rpc-client' }, ]; let resolutionFailed = false; for (const pkg of criticalPackages) { @@ -174,7 +174,7 @@ try { console.log(' @sf-run/* packages are resolvable.'); // --- Run the binary to confirm end-to-end resolution --- - console.log('==> Running installed binary (gsd -v)...'); + console.log('==> Running installed binary (sf -v)...'); const loaderPath = join(installedRoot, 'dist', 'loader.js'); const bundledWorkflowMcpCliPath = join(installedRoot, 'packages', 'mcp-server', 'dist', 'cli.js'); if (!existsSync(bundledWorkflowMcpCliPath)) { @@ -190,13 +190,13 @@ try { timeout: 15000, maxBuffer: DEFAULT_MAX_BUFFER, }).trim(); - console.log(` gsd -v => ${versionOutput}`); + console.log(` sf -v => ${versionOutput}`); if (!versionOutput.match(/^\d+\.\d+\.\d+/)) { - console.log('ERROR: gsd -v returned unexpected output (expected a version string).'); + console.log('ERROR: sf -v returned unexpected output (expected a version string).'); process.exit(1); } } catch (err) { - console.log('ERROR: Running gsd -v failed after install.'); + console.log('ERROR: Running sf -v failed after install.'); if (err.stdout) console.log(err.stdout); if (err.stderr) console.log(err.stderr); process.exit(1); diff --git a/scripts/validate-pack.sh b/scripts/validate-pack.sh index 85d62b36c..4eaa70544 100755 --- a/scripts/validate-pack.sh +++ b/scripts/validate-pack.sh @@ -10,13 +10,13 @@ SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" ROOT="$(cd "$SCRIPT_DIR/.." && pwd)" cd "$ROOT" -# --- Guard: workspace packages must not have @gsd/* cross-deps --- -echo "==> Checking workspace packages for @gsd/* cross-deps..." +# --- Guard: workspace packages must not have @sf/* cross-deps --- +echo "==> Checking workspace packages for @sf/* cross-deps..." CROSS_FAILED=0 for ws_pkg in native pi-agent-core pi-ai pi-coding-agent pi-tui; do RESULT=$(node -e " const pkg = require('./packages/${ws_pkg}/package.json'); - const deps = Object.keys(pkg.dependencies || {}).filter(d => d.startsWith('@gsd/')); + const deps = Object.keys(pkg.dependencies || {}).filter(d => d.startsWith('@sf/')); if (deps.length) { console.log(deps.join(', ')); process.exit(1); } " 2>&1) || { echo " LEAKED in ${ws_pkg}: $RESULT" @@ -25,11 +25,11 @@ for ws_pkg in native pi-agent-core pi-ai pi-coding-agent pi-tui; do } done if [ "$CROSS_FAILED" = "1" ]; then - echo "ERROR: Workspace packages have @gsd/* cross-dependencies." + echo "ERROR: Workspace packages have @sf/* cross-dependencies." echo " These cause 404s when npm resolves them from the registry." exit 1 fi -echo " No @gsd/* cross-dependencies." +echo " No @sf/* cross-dependencies." # --- Pack tarball --- echo "==> Packing tarball..." diff --git a/scripts/verify-s04.sh b/scripts/verify-s04.sh index cca3a2d23..cbd0fe80b 100755 --- a/scripts/verify-s04.sh +++ b/scripts/verify-s04.sh @@ -2,7 +2,7 @@ # S04 verification — npm pack tarball install smoke test # Checks: dist integrity, SF_BUNDLED_EXTENSION_PATHS, prepublishOnly, # npm pack dry-run, tarball install, binary exists, launch (no extension -# errors, "gsd" branding), ~/.gsd/ untouched, non-TTY warning/no exit 1. +# errors, "sf" branding), ~/.sf/ untouched, non-TTY warning/no exit 1. set -uo pipefail @@ -10,11 +10,11 @@ FAIL=0 pass() { echo " PASS: $1"; } fail() { echo " FAIL: $1"; FAIL=1; } -SMOKE_PREFIX=/tmp/gsd-smoke-prefix +SMOKE_PREFIX=/tmp/sf-smoke-prefix TARBALL="" -# Capture ~/.gsd/agent/sessions/ count before any smoke runs (for Check 9) -PI_SESSIONS_BEFORE=$(ls ~/.gsd/agent/sessions/ 2>/dev/null | wc -l | tr -d ' ') +# Capture ~/.sf/agent/sessions/ count before any smoke runs (for Check 9) +PI_SESSIONS_BEFORE=$(ls ~/.sf/agent/sessions/ 2>/dev/null | wc -l | tr -d ' ') cleanup() { rm -rf "$SMOKE_PREFIX" @@ -106,7 +106,7 @@ echo "--- tarball pack ---" # ---------------------------------------------------------------- # Note: prepublishOnly triggers a build here (expected). npm pack --silent 2>/dev/null || npm pack 2>&1 | tail -5 -TARBALL=$(ls glittercowboy-gsd-*.tgz 2>/dev/null | head -1 || true) +TARBALL=$(ls glittercowboy-sf-*.tgz 2>/dev/null | head -1 || true) if [ -n "$TARBALL" ] && [ -f "$TARBALL" ]; then pass "5 — tarball produced: $TARBALL" else @@ -134,10 +134,10 @@ fi # ---------------------------------------------------------------- # Check 7 — binary exists at expected path after install # ---------------------------------------------------------------- -if [ -f "$SMOKE_PREFIX/bin/gsd" ] || [ -L "$SMOKE_PREFIX/bin/gsd" ]; then - pass "7 — $SMOKE_PREFIX/bin/gsd exists after install" +if [ -f "$SMOKE_PREFIX/bin/sf" ] || [ -L "$SMOKE_PREFIX/bin/sf" ]; then + pass "7 — $SMOKE_PREFIX/bin/sf exists after install" else - fail "7 — $SMOKE_PREFIX/bin/gsd not found after install" + fail "7 — $SMOKE_PREFIX/bin/sf not found after install" ls -la "$SMOKE_PREFIX/bin/" 2>/dev/null || echo " (bin/ dir does not exist)" fi @@ -145,14 +145,14 @@ echo "" echo "--- launch smoke ---" # ---------------------------------------------------------------- -# Check 8 — launch: "gsd" branding + zero extension load errors +# Check 8 — launch: "sf" branding + zero extension load errors # Use background kill pattern (macOS has no GNU timeout). # Allow 8s for extensions to load. # ---------------------------------------------------------------- smoke_out=$(mktemp) ( env -i HOME="$HOME" PATH="$PATH" \ - "$SMOKE_PREFIX/bin/gsd" < /dev/null > "$smoke_out" 2>&1 + "$SMOKE_PREFIX/bin/sf" < /dev/null > "$smoke_out" 2>&1 ) & smoke_pid=$! sleep 8 @@ -162,7 +162,7 @@ wait "$smoke_pid" 2>/dev/null || true ext_errors=$(grep "Extension load error" "$smoke_out" 2>/dev/null | wc -l | tr -d ' ') # Strip ANSI escape codes for branding check plain_out=$(sed 's/\x1b\[[0-9;]*m//g' "$smoke_out" 2>/dev/null || cat "$smoke_out") -has_gsd=$(echo "$plain_out" | grep -qi "gsd\|get shit done" && echo "yes" || echo "no") +has_gsd=$(echo "$plain_out" | grep -qi "sf\|get shit done" && echo "yes" || echo "no") if [ "$ext_errors" -eq 0 ]; then pass "8a — zero Extension load errors on launch" @@ -172,31 +172,31 @@ else fi if [ "$has_gsd" = "yes" ]; then - pass "8b — \"gsd\" / \"get shit done\" branding found in launch output" + pass "8b — \"sf\" / \"get shit done\" branding found in launch output" else # Fallback: check if binary self-identifies differently (not "pi") has_pi_only=$(echo "$plain_out" | grep -qi "^pi\b" && echo "yes" || echo "no") if [ "$has_pi_only" = "no" ]; then - pass "8b — output does not show \"pi\" branding (gsd branding likely in ANSI sequences)" + pass "8b — output does not show \"pi\" branding (sf branding likely in ANSI sequences)" else - fail "8b — output shows \"pi\" branding instead of \"gsd\"" + fail "8b — output shows \"pi\" branding instead of \"sf\"" head -5 "$smoke_out" | sed 's/^/ /' fi fi rm -f "$smoke_out" echo "" -echo "--- ~/.gsd/ isolation ---" +echo "--- ~/.sf/ isolation ---" # ---------------------------------------------------------------- -# Check 9 — ~/.gsd/ session count unchanged before/after smoke run +# Check 9 — ~/.sf/ session count unchanged before/after smoke run # PI_SESSIONS_BEFORE captured at script start (before any binary invocation). # ---------------------------------------------------------------- -pi_after=$(ls ~/.gsd/agent/sessions/ 2>/dev/null | wc -l | tr -d ' ') +pi_after=$(ls ~/.sf/agent/sessions/ 2>/dev/null | wc -l | tr -d ' ') if [ "$PI_SESSIONS_BEFORE" = "$pi_after" ]; then - pass "9 — ~/.gsd/agent/sessions/ count unchanged (${pi_after} sessions before and after)" + pass "9 — ~/.sf/agent/sessions/ count unchanged (${pi_after} sessions before and after)" else - fail "9 — ~/.gsd/agent/sessions/ count changed: was ${PI_SESSIONS_BEFORE}, now ${pi_after}" + fail "9 — ~/.sf/agent/sessions/ count changed: was ${PI_SESSIONS_BEFORE}, now ${pi_after}" fi echo "" @@ -211,7 +211,7 @@ exit10_tmp=$(mktemp) echo "" > "$exit10_tmp" ( env -i HOME="$HOME" PATH="$PATH" \ - "$SMOKE_PREFIX/bin/gsd" < /dev/null > "$tmp10" 2>&1 + "$SMOKE_PREFIX/bin/sf" < /dev/null > "$tmp10" 2>&1 echo "$?" > "$exit10_tmp" ) & pid10=$! diff --git a/scripts/watch-resources.js b/scripts/watch-resources.js index d0a160e26..a2dd59985 100644 --- a/scripts/watch-resources.js +++ b/scripts/watch-resources.js @@ -8,8 +8,8 @@ * * This solves the `npm link` branch-drift problem: without dist/resources/, * `initResources()` reads from src/resources/ which changes with git branch - * switches, causing stale extensions to be synced to ~/.gsd/agent/ for ALL - * projects using gsd. + * switches, causing stale extensions to be synced to ~/.sf/agent/ for ALL + * projects using sf. */ import { watch } from 'node:fs' diff --git a/sf-orchestrator/SKILL.md b/sf-orchestrator/SKILL.md new file mode 100644 index 000000000..1475301ab --- /dev/null +++ b/sf-orchestrator/SKILL.md @@ -0,0 +1,215 @@ +--- +name: sf-orchestrator +description: > + Build software products autonomously via SF headless mode. Handles the full + lifecycle: write a spec, launch a build, poll for completion, handle blockers, + track costs, and verify the result. Use when asked to "build something", + "create a project", "run sf", "check build status", or any task that + requires autonomous software development via subprocess. +metadata: + openclaw: + requires: + bins: [sf] + install: + kind: node + package: sf-run + bins: [sf] +--- + +<objective> +You are an autonomous agent that builds software by orchestrating SF as a subprocess. +SF is a headless CLI that plans, codes, tests, and ships software from a spec. +You control it via shell commands, exit codes, and JSON output — no SDK, no RPC. +</objective> + +<mental_model> +SF headless is a subprocess you launch and monitor. Think of it like a junior developer +you hand a spec to: + +1. You write the spec (what to build) +2. You launch the build (`sf headless ... new-milestone --context spec.md --auto`) +3. You wait for it to finish (exit code tells you the outcome) +4. You check the result (query state, inspect files, verify deliverables) +5. If blocked, you intervene (steer, supply answers, or escalate) + +The subprocess handles all planning, coding, testing, and git commits internally. +You never write application code yourself — SF does that. +</mental_model> + +<critical_rules> +- **Flags before command.** `sf headless [--flags] [command] [args]`. Flags after the command are ignored. +- **Redirect stderr.** JSON output goes to stdout. Progress goes to stderr. Always `2>/dev/null` when parsing JSON. +- **Check exit codes.** 0=success, 1=error, 10=blocked (needs you), 11=cancelled. +- **Use `query` to poll.** Instant (~50ms), no LLM cost. Use it between steps, not `auto` for status. +- **Budget awareness.** Track `cost.total` from query results. Set limits before launching long runs. +- **One project directory per build.** Each SF project needs its own directory with a `.sf/` folder. +</critical_rules> + +<routing> +Route based on what you need to do: + +**Build something from scratch:** +Read `workflows/build-from-spec.md` — write spec, init directory, launch, monitor, verify. + +**Check on a running or completed build:** +Read `workflows/monitor-and-poll.md` — query state, interpret phases, handle blockers. + +**Execute with fine-grained control:** +Read `workflows/step-by-step.md` — run one unit at a time with decision points. + +**Understand the JSON output:** +Read `references/json-result.md` — field reference for HeadlessJsonResult. + +**Pre-supply answers or secrets:** +Read `references/answer-injection.md` — answer file schema and injection mechanism. + +**Look up a specific command:** +Read `references/commands.md` — full command reference with flags and examples. +</routing> + +<quick_reference> + +**Launch a full build (spec to working code):** +```bash +mkdir -p /tmp/my-project && cd /tmp/my-project && git init +cat > spec.md << 'EOF' +# Your Product Spec Here +Build a ... +EOF +sf headless --output-format json --context spec.md new-milestone --auto 2>/dev/null +``` + +**Check project state (instant, free):** +```bash +cd /path/to/project +sf headless query | jq '{phase: .state.phase, progress: .state.progress, cost: .cost.total}' +``` + +**Resume work on an existing project:** +```bash +cd /path/to/project +sf headless --output-format json auto 2>/dev/null +``` + +**Run one step at a time:** +```bash +RESULT=$(sf headless --output-format json next 2>/dev/null) +echo "$RESULT" | jq '{status: .status, phase: .phase, cost: .cost.total}' +``` + +</quick_reference> + +<exit_codes> +| Code | Meaning | Your action | +|------|---------|-------------| +| `0` | Success | Check deliverables, verify output, report completion | +| `1` | Error or timeout | Inspect stderr, check `.sf/STATE.md`, retry or escalate | +| `10` | Blocked | Query state for blocker details, steer around it or escalate to human | +| `11` | Cancelled | Process was interrupted — resume with `--resume <sessionId>` or restart | +</exit_codes> + +<project_structure> +SF creates and manages all state in `.sf/`: +``` +.sf/ + PROJECT.md # What this project is + REQUIREMENTS.md # Capability contract + DECISIONS.md # Architectural decisions (append-only) + KNOWLEDGE.md # Persistent project knowledge (patterns, rules, lessons) + STATE.md # Current phase and next action + milestones/ + M001-xxxxx/ + M001-xxxxx-CONTEXT.md # Scope, constraints, assumptions + M001-xxxxx-ROADMAP.md # Slices with checkboxes + M001-xxxxx-SUMMARY.md # Completion summary + slices/S01/ + S01-PLAN.md # Tasks + S01-SUMMARY.md # Slice summary + tasks/ + T01-PLAN.md # Individual task spec + T01-SUMMARY.md # Task completion summary +``` + +State is derived from files on disk — checkboxes in ROADMAP.md and PLAN.md are the source of truth for completion. You never need to edit these files. SF manages them. But you can read them to understand progress. +</project_structure> + +<flags> +| Flag | Description | +|------|-------------| +| `--output-format <fmt>` | `text` (default), `json` (structured result at exit), `stream-json` (JSONL events) | +| `--json` | Alias for `--output-format stream-json` — JSONL event stream to stdout | +| `--bare` | Skip CLAUDE.md, AGENTS.md, user settings, user skills. Use for CI/ecosystem runs. | +| `--resume <id>` | Resume a prior headless session by its session ID | +| `--timeout N` | Overall timeout in ms (default: 300000, use 0 to disable) | +| `--model ID` | Override LLM model | +| `--supervised` | Forward interactive UI requests to orchestrator via stdout/stdin | +| `--response-timeout N` | Timeout (ms) for orchestrator response in supervised mode (default: 30000) | +| `--answers <path>` | Pre-supply answers and secrets from JSON file | +| `--events <types>` | Filter JSONL to specific event types (comma-separated, implies `--json`) | +| `--verbose` | Show tool calls in progress output | +| `--context <path>` | Spec file path for `new-milestone` (use `-` for stdin) | +| `--context-text <text>` | Inline spec text for `new-milestone` | +| `--auto` | Chain into auto-mode after `new-milestone` | +</flags> + +<answer_injection> +Pre-supply answers and secrets for fully autonomous runs: + +```bash +sf headless --answers answers.json --output-format json auto 2>/dev/null +``` + +```json +{ + "questions": { "question_id": "selected_option" }, + "secrets": { "API_KEY": "sk-..." }, + "defaults": { "strategy": "first_option" } +} +``` + +- **questions** — question ID to answer (string for single-select, string[] for multi-select) +- **secrets** — env var to value, injected into child process environment +- **defaults.strategy** — `"first_option"` (default) or `"cancel"` for unmatched questions + +See `references/answer-injection.md` for the full mechanism. +</answer_injection> + +<event_streaming> +For real-time monitoring, use JSONL event streaming: + +```bash +sf headless --json auto 2>/dev/null | while read -r line; do + TYPE=$(echo "$line" | jq -r '.type') + case "$TYPE" in + tool_execution_start) echo "Tool: $(echo "$line" | jq -r '.toolName')" ;; + extension_ui_request) echo "SF: $(echo "$line" | jq -r '.message // .title // empty')" ;; + agent_end) echo "Session ended" ;; + esac +done +``` + +Filter to specific events: `--events agent_end,execution_complete,extension_ui_request` + +Available types: `agent_start`, `agent_end`, `tool_execution_start`, `tool_execution_end`, +`tool_execution_update`, `extension_ui_request`, `message_start`, `message_end`, +`message_update`, `turn_start`, `turn_end`, `cost_update`, `execution_complete`. +</event_streaming> + +<all_commands> +| Command | Purpose | +|---------|---------| +| `auto` | Run all queued units until milestone complete or blocked (default) | +| `next` | Run exactly one unit, then exit | +| `query` | Instant JSON snapshot — state, next dispatch, costs (no LLM, ~50ms) | +| `new-milestone` | Create milestone from spec file | +| `dispatch <phase>` | Force specific phase (research, plan, execute, complete, reassess, uat, replan) | +| `stop` / `pause` | Control auto-mode | +| `steer <desc>` | Hard-steer plan mid-execution | +| `skip` / `undo` | Unit control | +| `queue` | Queue/reorder milestones | +| `history` | View execution history | +| `doctor` | Health check + auto-fix | +| `knowledge <rule>` | Add persistent project knowledge | + +See `references/commands.md` for the complete reference. +</all_commands> diff --git a/sf-orchestrator/references/answer-injection.md b/sf-orchestrator/references/answer-injection.md new file mode 100644 index 000000000..8032350bd --- /dev/null +++ b/sf-orchestrator/references/answer-injection.md @@ -0,0 +1,119 @@ +# Answer Injection + +Pre-supply answers and secrets to eliminate interactive prompts during headless execution. + +## Usage + +```bash +sf headless --answers answers.json auto +sf headless --answers answers.json new-milestone --context spec.md --auto +``` + +The `--answers` flag takes a path to a JSON file containing pre-supplied answers and secrets. + +## Answer File Schema + +```json +{ + "questions": { + "question_id": "selected_option_label", + "multi_select_question": ["option_a", "option_b"] + }, + "secrets": { + "API_KEY": "sk-...", + "DATABASE_URL": "postgres://..." + }, + "defaults": { + "strategy": "first_option" + } +} +``` + +### Fields + +| Field | Type | Description | +|-------|------|-------------| +| `questions` | `Record<string, string \| string[]>` | Map question ID → answer. String for single-select, string array for multi-select. | +| `secrets` | `Record<string, string>` | Map env var name → value. Injected into child process environment variables. | +| `defaults.strategy` | `"first_option" \| "cancel"` | Fallback for unmatched questions. Default: `"first_option"`. | + +## How Secrets Work + +Secrets are injected as environment variables into the SF child process: + +1. The orchestrator passes the answer file via `--answers` +2. SF reads the file and sets secret values as env vars in the child process +3. When `secure_env_collect` runs inside the agent, it finds the keys already in `process.env` +4. The tool skips the interactive prompt and reports the keys as "already configured" + +Secrets are never logged or included in event streams. + +## How Question Matching Works + +Two-phase correlation: + +1. **Observe** — SF monitors `tool_execution_start` events for `ask_user_questions` to extract question metadata (ID, options, allowMultiple) +2. **Match** — Subsequent `extension_ui_request` events are correlated to the metadata and responded to with the pre-supplied answer + +Handles out-of-order events (extension_ui_request can arrive before tool_execution_start) via a deferred processing queue with 500ms timeout. + +## Coexistence with `--supervised` + +Both `--answers` and `--supervised` can be active simultaneously. Priority order: + +1. Answer injector tries first +2. If no answer found, supervised mode forwards to the orchestrator +3. If no orchestrator response within `--response-timeout`, the auto-responder kicks in + +## Without Answer Injection + +Headless mode has built-in auto-responders for all prompt types: + +| Prompt Type | Default Behavior | +|-------------|-----------------| +| Select | Picks first option | +| Confirm | Auto-confirms | +| Input | Empty string | +| Editor | Returns prefill or empty | + +Answer injection overrides these defaults with specific answers when precision matters. + +## Diagnostics + +The injector tracks statistics printed in the session summary: + +| Stat | Description | +|------|-------------| +| `questionsAnswered` | Questions resolved from the answer file | +| `questionsDefaulted` | Questions handled by the default strategy | +| `secretsProvided` | Number of secrets injected | + +Unused question IDs and secret keys are warned about at exit. + +## Example: Orchestrator with Answers + +```bash +# Create answer file +cat > answers.json << 'EOF' +{ + "questions": { + "test_framework": "vitest", + "package_manager": "pnpm" + }, + "secrets": { + "OPENAI_API_KEY": "sk-...", + "DATABASE_URL": "postgres://localhost:5432/mydb" + }, + "defaults": { + "strategy": "first_option" + } +} +EOF + +# Run with pre-supplied answers +sf headless --answers answers.json --output-format json auto 2>/dev/null + +# Parse result +RESULT=$(sf headless --answers answers.json --output-format json next 2>/dev/null) +echo "$RESULT" | jq '{status: .status, cost: .cost.total}' +``` diff --git a/sf-orchestrator/references/commands.md b/sf-orchestrator/references/commands.md new file mode 100644 index 000000000..a92b6e294 --- /dev/null +++ b/sf-orchestrator/references/commands.md @@ -0,0 +1,210 @@ +# SF Commands Reference + +All commands run as subprocesses via `sf headless [flags] [command] [args...]`. + +## Global Flags + +These flags apply to any `sf headless` invocation: + +| Flag | Description | +|------|-------------| +| `--output-format <fmt>` | `text` (default), `json` (structured result), `stream-json` (JSONL) | +| `--json` | Alias for `--output-format stream-json` | +| `--bare` | Minimal context: skip CLAUDE.md, AGENTS.md, user settings, user skills | +| `--resume <id>` | Resume a prior headless session by ID | +| `--timeout N` | Overall timeout in ms (default: 300000) | +| `--model ID` | Override LLM model | +| `--supervised` | Forward interactive UI requests to orchestrator via stdout/stdin | +| `--response-timeout N` | Timeout for orchestrator response in supervised mode (default: 30000ms) | +| `--answers <path>` | Pre-supply answers and secrets from JSON file | +| `--events <types>` | Filter JSONL output to specific event types (comma-separated, implies `--json`) | +| `--verbose` | Show tool calls in progress output | + +## Exit Codes + +| Code | Meaning | When | +|------|---------|------| +| `0` | Success | Unit/milestone completed normally | +| `1` | Error or timeout | Runtime error, LLM failure, or `--timeout` exceeded | +| `10` | Blocked | Execution hit a blocker requiring human intervention | +| `11` | Cancelled | User or orchestrator cancelled the operation | + +## Workflow Commands + +### `auto` (default) + +Autonomous mode — loop through all pending units until milestone complete or blocked. + +```bash +sf headless --output-format json auto +``` + +### `next` + +Step mode — execute exactly one unit (task/slice/milestone step), then exit. Recommended for orchestrators that need decision points between steps. + +```bash +sf headless --output-format json next +``` + +### `new-milestone` + +Create a milestone from a specification document. + +```bash +sf headless new-milestone --context spec.md +sf headless new-milestone --context spec.md --auto +sf headless new-milestone --context-text "Build a REST API" --auto +cat spec.md | sf headless new-milestone --context - --auto +``` + +Extra flags: +- `--context <path>` — path to spec/PRD file (use `-` for stdin) +- `--context-text <text>` — inline specification text +- `--auto` — start auto-mode after milestone creation + +### `dispatch <phase>` + +Force-route to a specific phase, bypassing normal state-machine routing. + +```bash +sf headless dispatch research +sf headless dispatch plan +sf headless dispatch execute +sf headless dispatch complete +sf headless dispatch reassess +sf headless dispatch uat +sf headless dispatch replan +``` + +### `discuss` + +Start guided milestone/slice discussion. + +```bash +sf headless discuss +``` + +### `stop` + +Stop auto-mode gracefully. + +```bash +sf headless stop +``` + +### `pause` + +Pause auto-mode (preserves state, resumable). + +```bash +sf headless pause +``` + +## State Inspection + +### `query` + +**Instant JSON snapshot** — state, next dispatch, parallel costs. No LLM, ~50ms. The recommended way for orchestrators to inspect state. + +```bash +sf headless query +sf headless query | jq '.state.phase' +sf headless query | jq '.next' +sf headless query | jq '.cost.total' +``` + +### `status` + +Progress dashboard (TUI overlay — useful interactively, not for parsing). + +```bash +sf headless status +``` + +### `history` + +Execution history. Supports `--cost`, `--phase`, `--model`, and `limit` arguments. + +```bash +sf headless history +``` + +## Unit Control + +### `skip` + +Prevent a unit from auto-mode dispatch. + +```bash +sf headless skip +``` + +### `undo` + +Revert last completed unit. Use `--force` to bypass confirmation. + +```bash +sf headless undo +sf headless undo --force +``` + +### `steer <description>` + +Hard-steer plan documents during execution. Useful for mid-course corrections. + +```bash +sf headless steer "Skip the blocked dependency, use mock instead" +``` + +### `queue` + +Queue and reorder future milestones. + +```bash +sf headless queue +``` + +## Configuration & Health + +### `doctor` + +Runtime health checks with auto-fix. + +```bash +sf headless doctor +``` + +### `prefs` + +Manage preferences (global/project/status/wizard/setup). + +```bash +sf headless prefs +``` + +### `knowledge <rule|pattern|lesson>` + +Add persistent project knowledge. + +```bash +sf headless knowledge "Always use UTC timestamps in API responses" +``` + +## Phases + +SF workflows progress through these phases: + +``` +pre-planning → needs-discussion → discussing → researching → planning → +executing → verifying → summarizing → advancing → validating-milestone → +completing-milestone → complete +``` + +Special phases: `paused`, `blocked`, `replanning-slice` + +## Hierarchy + +- **Milestone**: Shippable version (4–10 slices, 1–4 weeks) +- **Slice**: One demoable vertical capability (1–7 tasks, 1–3 days) +- **Task**: One context-window-sized unit of work (one session) diff --git a/sf-orchestrator/references/json-result.md b/sf-orchestrator/references/json-result.md new file mode 100644 index 000000000..04adf33ce --- /dev/null +++ b/sf-orchestrator/references/json-result.md @@ -0,0 +1,162 @@ +# HeadlessJsonResult Reference + +When using `--output-format json`, SF collects events silently and emits a single `HeadlessJsonResult` JSON object to stdout at process exit. This is the structured result for orchestrator decision-making. + +## Obtaining the Result + +```bash +# Capture the JSON result +RESULT=$(sf headless --output-format json next 2>/dev/null) +EXIT=$? + +# Parse fields with jq +echo "$RESULT" | jq '.status' +echo "$RESULT" | jq '.cost.total' +echo "$RESULT" | jq '.nextAction' +``` + +**Important:** Progress text goes to stderr. The JSON result goes to stdout. Redirect stderr to `/dev/null` when parsing stdout. + +## Field Reference + +### Top-Level Fields + +| Field | Type | Description | +|-------|------|-------------| +| `status` | `"success" \| "error" \| "blocked" \| "cancelled" \| "timeout"` | Final session status. Maps directly to exit codes. | +| `exitCode` | `number` | Process exit code: `0` (success), `1` (error/timeout), `10` (blocked), `11` (cancelled). | +| `sessionId` | `string \| undefined` | Session identifier. Pass to `--resume <id>` to continue this session. | +| `duration` | `number` | Session wall-clock duration in milliseconds. | +| `cost` | `CostObject` | Token usage and cost breakdown. See below. | +| `toolCalls` | `number` | Total number of tool calls made during the session. | +| `events` | `number` | Total number of events processed during the session. | +| `milestone` | `string \| undefined` | Active milestone ID (e.g. `"M001"`). | +| `phase` | `string \| undefined` | Current SF phase at session end (e.g. `"executing"`, `"blocked"`, `"complete"`). | +| `nextAction` | `string \| undefined` | Recommended next action from the state machine (e.g. `"dispatch"`, `"complete"`). | +| `artifacts` | `string[] \| undefined` | Paths to artifacts created or modified during the session. | +| `commits` | `string[] \| undefined` | Git commit SHAs created during the session. | + +### Status → Exit Code Mapping + +| Status | Exit Code | Constant | Meaning | +|--------|-----------|----------|---------| +| `success` | `0` | `EXIT_SUCCESS` | Unit or milestone completed successfully | +| `error` | `1` | `EXIT_ERROR` | Runtime error or LLM failure | +| `timeout` | `1` | `EXIT_ERROR` | `--timeout` deadline exceeded | +| `blocked` | `10` | `EXIT_BLOCKED` | Execution blocked — needs human intervention | +| `cancelled` | `11` | `EXIT_CANCELLED` | Cancelled by user or orchestrator | + +### Cost Object + +| Field | Type | Description | +|-------|------|-------------| +| `cost.total` | `number` | Total cost in USD for the session. | +| `cost.input_tokens` | `number` | Number of input tokens consumed. | +| `cost.output_tokens` | `number` | Number of output tokens generated. | +| `cost.cache_read_tokens` | `number` | Number of tokens served from prompt cache. | +| `cost.cache_write_tokens` | `number` | Number of tokens written to prompt cache. | + +## Parsing Patterns + +### Decision-Making After Each Step + +```bash +RESULT=$(sf headless --output-format json next 2>/dev/null) +EXIT=$? + +case $EXIT in + 0) + PHASE=$(echo "$RESULT" | jq -r '.phase') + NEXT=$(echo "$RESULT" | jq -r '.nextAction') + echo "Success — phase: $PHASE, next: $NEXT" + ;; + 1) + STATUS=$(echo "$RESULT" | jq -r '.status') + echo "Failed — status: $STATUS" + ;; + 10) + echo "Blocked — needs intervention" + sf headless query | jq '.state' + ;; + 11) + echo "Cancelled" + ;; +esac +``` + +### Cost Tracking + +```bash +RESULT=$(sf headless --output-format json next 2>/dev/null) + +COST=$(echo "$RESULT" | jq -r '.cost.total') +INPUT=$(echo "$RESULT" | jq -r '.cost.input_tokens') +OUTPUT=$(echo "$RESULT" | jq -r '.cost.output_tokens') + +echo "Cost: \$$COST (${INPUT} in / ${OUTPUT} out)" +``` + +### Session Resumption + +```bash +# First run — capture session ID +RESULT=$(sf headless --output-format json next 2>/dev/null) +SESSION_ID=$(echo "$RESULT" | jq -r '.sessionId') + +# Resume the same session later +sf headless --resume "$SESSION_ID" --output-format json next 2>/dev/null +``` + +### Artifact Collection + +```bash +RESULT=$(sf headless --output-format json auto 2>/dev/null) + +# List files created/modified +echo "$RESULT" | jq -r '.artifacts[]?' + +# List commits made +echo "$RESULT" | jq -r '.commits[]?' +``` + +## Example Result + +```json +{ + "status": "success", + "exitCode": 0, + "sessionId": "abc123def456", + "duration": 45200, + "cost": { + "total": 0.42, + "input_tokens": 15000, + "output_tokens": 3500, + "cache_read_tokens": 8000, + "cache_write_tokens": 2000 + }, + "toolCalls": 12, + "events": 87, + "milestone": "M001", + "phase": "executing", + "nextAction": "dispatch", + "artifacts": [ + ".sf/milestones/M001/slices/S01/tasks/T01-SUMMARY.md" + ], + "commits": [ + "a1b2c3d" + ] +} +``` + +## Combined with `query` for Full Picture + +The `HeadlessJsonResult` captures what happened during a session. Use `query` for the current project state: + +```bash +# What happened in this step? +RESULT=$(sf headless --output-format json next 2>/dev/null) +echo "$RESULT" | jq '{status, cost: .cost.total, phase}' + +# What's the overall project state now? +sf headless query | jq '{phase: .state.phase, progress: .state.progress, totalCost: .cost.total}' +``` diff --git a/sf-orchestrator/templates/spec.md b/sf-orchestrator/templates/spec.md new file mode 100644 index 000000000..441880f39 --- /dev/null +++ b/sf-orchestrator/templates/spec.md @@ -0,0 +1,20 @@ +# [Product Name] + +## What +[One paragraph: what this product does. Be concrete — "A CLI tool that converts CSV files to JSON" not "A data transformation solution".] + +## Requirements +- [User can DO something specific and observable] +- [User can DO another specific thing] +- [System DOES something automatically] +- [Error case: system handles X gracefully] + +## Technical Constraints +- Language: [Node.js / Python / Go / Rust / etc.] +- Framework: [Express / FastAPI / none / etc.] +- External dependencies: [list APIs, databases, services] +- Environment: [Node >= 22 / Python 3.12+ / etc.] + +## Out of Scope +- [Explicit exclusion 1 — prevents scope creep] +- [Explicit exclusion 2] diff --git a/sf-orchestrator/workflows/build-from-spec.md b/sf-orchestrator/workflows/build-from-spec.md new file mode 100644 index 000000000..9552fa7b0 --- /dev/null +++ b/sf-orchestrator/workflows/build-from-spec.md @@ -0,0 +1,184 @@ +# Build From Spec + +End-to-end workflow: take a product idea or specification, produce working software. + +## Prerequisites + +- `sf` CLI installed (`npm install -g sf-run`) +- A directory for the project (can be empty) +- Git initialized in the directory + +## Process + +### Step 1: Prepare the project directory + +```bash +PROJECT_DIR="/tmp/my-project-name" +mkdir -p "$PROJECT_DIR" +cd "$PROJECT_DIR" +git init 2>/dev/null # SF needs a git repo +``` + +### Step 2: Write the spec file + +Write a spec file that describes what to build. More detail = better results. + +```bash +cat > spec.md << 'SPEC' +# Product Name + +## What +[Concrete description of what to build] + +## Requirements +- [Specific, testable requirement 1] +- [Specific, testable requirement 2] +- [Specific, testable requirement 3] + +## Technical Constraints +- [Language, framework, or platform requirements] +- [External services or APIs involved] +- [Performance or security requirements] + +## Out of Scope +- [Things explicitly NOT included] +SPEC +``` + +**Spec quality matters.** Vague specs produce vague results. Include: +- What the user can DO when it's done (not what code to write) +- Technical constraints (language, framework, Node version) +- What's out of scope (prevents scope creep) + +### Step 3: Launch the build + +**Fire-and-forget (simplest — SF does everything):** +```bash +cd "$PROJECT_DIR" +RESULT=$(sf headless --output-format json --timeout 0 --context spec.md new-milestone --auto 2>/dev/null) +EXIT=$? +``` + +`--timeout 0` disables the timeout for long builds. `--auto` chains milestone creation into execution. + +**With budget limit:** +```bash +# Use step-by-step mode with budget checks instead of auto +# See workflows/step-by-step.md +``` + +**For CI or ecosystem runs (no user config):** +```bash +RESULT=$(sf headless --bare --output-format json --timeout 0 --context spec.md new-milestone --auto 2>/dev/null) +EXIT=$? +``` + +### Step 4: Handle the result + +```bash +case $EXIT in + 0) + # Success — verify deliverables + STATUS=$(echo "$RESULT" | jq -r '.status') + COST=$(echo "$RESULT" | jq -r '.cost.total') + COMMITS=$(echo "$RESULT" | jq -r '.commits | length') + echo "Build complete: $STATUS, cost: \$$COST, commits: $COMMITS" + + # Inspect what was built + sf headless query | jq '.state.progress' + + # Check the actual files + ls -la "$PROJECT_DIR" + ;; + 1) + # Error — inspect and decide + echo "Build failed" + echo "$RESULT" | jq '{status: .status, phase: .phase}' + + # Check state for details + sf headless query | jq '.state' + ;; + 10) + # Blocked — needs intervention + echo "Build blocked — needs human input" + sf headless query | jq '{phase: .state.phase, blockers: .state.blockers}' + + # Options: steer, supply answers, or escalate + # See workflows/monitor-and-poll.md for blocker handling + ;; + 11) + echo "Build was cancelled" + ;; +esac +``` + +### Step 5: Verify deliverables + +After a successful build, verify the output: + +```bash +cd "$PROJECT_DIR" + +# Check project state +sf headless query | jq '{ + phase: .state.phase, + progress: .state.progress, + cost: .cost.total +}' + +# Check git log for what was built +git log --oneline + +# Run the project's own tests if they exist +[ -f package.json ] && npm test 2>/dev/null +[ -f Makefile ] && make test 2>/dev/null +``` + +## Complete Example + +```bash +# 1. Setup +mkdir -p /tmp/todo-api && cd /tmp/todo-api && git init + +# 2. Write spec +cat > spec.md << 'SPEC' +# Todo API + +Build a REST API for managing todo items using Node.js and Express. + +## Requirements +- GET /todos — list all todos +- POST /todos — create a todo (title, completed) +- PUT /todos/:id — update a todo +- DELETE /todos/:id — delete a todo +- Todos stored in-memory (no database) +- Input validation with descriptive error messages +- Health check endpoint at GET /health + +## Technical Constraints +- Node.js with ESM modules +- Express framework +- No external database — in-memory array +- Port configurable via PORT env var (default 3000) + +## Out of Scope +- Authentication +- Persistent storage +- Frontend +SPEC + +# 3. Launch +RESULT=$(sf headless --output-format json --timeout 0 --context spec.md new-milestone --auto 2>/dev/null) +EXIT=$? + +# 4. Report +if [ $EXIT -eq 0 ]; then + COST=$(echo "$RESULT" | jq -r '.cost.total') + echo "Build complete (\$$COST)" + echo "Files created:" + find . -not -path './.sf/*' -not -path './.git/*' -type f +else + echo "Build failed (exit $EXIT)" + echo "$RESULT" | jq . +fi +``` diff --git a/sf-orchestrator/workflows/monitor-and-poll.md b/sf-orchestrator/workflows/monitor-and-poll.md new file mode 100644 index 000000000..ffff137e4 --- /dev/null +++ b/sf-orchestrator/workflows/monitor-and-poll.md @@ -0,0 +1,187 @@ +# Monitor and Poll + +Check status of a SF project, handle blockers, track costs, and decide next actions. + +## Checking Project State + +The `query` command is your primary monitoring tool. It's instant (~50ms), costs nothing (no LLM), and returns the full project snapshot. + +```bash +cd /path/to/project +sf headless query +``` + +### Key fields to inspect + +```bash +# Overall status +sf headless query | jq '{ + phase: .state.phase, + milestone: .state.activeMilestone.id, + slice: .state.activeSlice.id, + task: .state.activeTask.id, + progress: .state.progress, + cost: .cost.total +}' + +# What should happen next +sf headless query | jq '.next' +# Returns: { "action": "dispatch", "unitType": "execute-task", "unitId": "M001/S01/T01" } + +# Is it done? +sf headless query | jq '.state.phase' +# "complete" = done, "blocked" = needs you, anything else = in progress +``` + +### Phase meanings + +| Phase | Meaning | Your action | +|-------|---------|-------------| +| `pre-planning` | Milestone exists, no slices planned yet | Run `auto` or `next` | +| `needs-discussion` | Ambiguities need resolution | Supply answers or run with defaults | +| `discussing` | Discussion in progress | Wait | +| `researching` | Codebase/library research | Wait | +| `planning` | Creating task plans | Wait | +| `executing` | Writing code | Wait | +| `verifying` | Checking must-haves | Wait | +| `summarizing` | Recording what happened | Wait | +| `advancing` | Moving to next task/slice | Wait | +| `evaluating-gates` | Quality checks before execution | Wait or run `next` | +| `validating-milestone` | Final milestone checks | Wait | +| `completing-milestone` | Archiving and cleanup | Wait | +| `complete` | Done | Verify deliverables | +| `blocked` | Needs human input | Handle blocker (see below) | +| `paused` | Explicitly paused | Resume with `auto` | + +## Handling Blockers + +When exit code is `10` or phase is `blocked`: + +```bash +# 1. Understand the blocker +sf headless query | jq '{phase: .state.phase, blockers: .state.blockers, nextAction: .state.nextAction}' + +# 2. Option A: Steer around it +sf headless steer "Skip the database dependency, use in-memory storage instead" + +# 3. Option B: Supply pre-built answers +cat > fix.json << 'EOF' +{ + "questions": { "blocked_question_id": "workaround_option" }, + "defaults": { "strategy": "first_option" } +} +EOF +sf headless --answers fix.json auto + +# 4. Option C: Force a specific phase +sf headless dispatch replan + +# 5. Option D: Escalate to user +echo "SF build blocked. Phase: $(sf headless query | jq -r '.state.phase')" +echo "Manual intervention required." +``` + +## Cost Tracking + +```bash +# Current cumulative cost +sf headless query | jq '.cost.total' + +# Per-worker breakdown +sf headless query | jq '.cost.workers' + +# After a step (from HeadlessJsonResult) +RESULT=$(sf headless --output-format json next 2>/dev/null) +echo "$RESULT" | jq '.cost' +``` + +### Budget enforcement pattern + +```bash +MAX_BUDGET=15.00 + +check_budget() { + TOTAL=$(sf headless query | jq -r '.cost.total') + OVER=$(echo "$TOTAL > $MAX_BUDGET" | bc -l) + if [ "$OVER" = "1" ]; then + echo "Budget exceeded: \$$TOTAL > \$$MAX_BUDGET" + sf headless stop + return 1 + fi + return 0 +} +``` + +## Poll-and-React Loop + +For agents that need to periodically check on a build: + +```bash +cd /path/to/project + +poll_project() { + STATE=$(sf headless query 2>/dev/null) + if [ -z "$STATE" ]; then + echo "NO_PROJECT" + return + fi + + PHASE=$(echo "$STATE" | jq -r '.state.phase') + COST=$(echo "$STATE" | jq -r '.cost.total') + PROGRESS=$(echo "$STATE" | jq -r '"\(.state.progress.milestones.done)/\(.state.progress.milestones.total) milestones, \(.state.progress.tasks.done)/\(.state.progress.tasks.total) tasks"') + + case "$PHASE" in + complete) + echo "COMPLETE cost=\$$COST progress=$PROGRESS" + ;; + blocked) + BLOCKER=$(echo "$STATE" | jq -r '.state.nextAction // "unknown"') + echo "BLOCKED reason=$BLOCKER cost=\$$COST" + ;; + *) + NEXT=$(echo "$STATE" | jq -r '.next.action // "none"') + echo "IN_PROGRESS phase=$PHASE next=$NEXT cost=\$$COST progress=$PROGRESS" + ;; + esac +} +``` + +## Resuming Work + +If a build was interrupted or you need to continue: + +```bash +cd /path/to/project + +# Check current state +sf headless query | jq '.state.phase' + +# Resume from where it left off +sf headless --output-format json auto 2>/dev/null + +# Or resume a specific session +sf headless --resume "$SESSION_ID" --output-format json auto 2>/dev/null +``` + +## Reading Build Artifacts + +After completion, inspect what SF produced: + +```bash +cd /path/to/project + +# Project summary +cat .sf/PROJECT.md + +# What was decided +cat .sf/DECISIONS.md + +# Requirements and their validation status +cat .sf/REQUIREMENTS.md + +# Milestone summary +cat .sf/milestones/M001-*/M001-*-SUMMARY.md 2>/dev/null + +# Git history (SF commits per-slice) +git log --oneline +``` diff --git a/sf-orchestrator/workflows/step-by-step.md b/sf-orchestrator/workflows/step-by-step.md new file mode 100644 index 000000000..b9f9eb1e6 --- /dev/null +++ b/sf-orchestrator/workflows/step-by-step.md @@ -0,0 +1,156 @@ +# Step-by-Step Execution + +Run SF one unit at a time with decision points between steps. Use this when you need +control over execution — budget enforcement, progress reporting, conditional logic, +or the ability to steer mid-build. + +## When to use this vs `auto` + +| Approach | Use when | +|----------|----------| +| `auto` | You trust the build, just want the result | +| `next` loop | You need budget checks, progress updates, or intervention points | + +## Core Loop + +```bash +cd /path/to/project +MAX_BUDGET=20.00 +TOTAL_COST=0 + +while true; do + # Run one unit + RESULT=$(sf headless --output-format json next 2>/dev/null) + EXIT=$? + + # Parse result + STATUS=$(echo "$RESULT" | jq -r '.status') + STEP_COST=$(echo "$RESULT" | jq -r '.cost.total') + PHASE=$(echo "$RESULT" | jq -r '.phase // empty') + SESSION_ID=$(echo "$RESULT" | jq -r '.sessionId // empty') + + # Handle exit codes + case $EXIT in + 0) ;; # success — continue + 1) + echo "Step failed: $STATUS" + break + ;; + 10) + echo "Blocked — needs intervention" + sf headless query | jq '.state' + break + ;; + 11) + echo "Cancelled" + break + ;; + esac + + # Check if milestone complete + CURRENT_PHASE=$(sf headless query | jq -r '.state.phase') + if [ "$CURRENT_PHASE" = "complete" ]; then + TOTAL_COST=$(sf headless query | jq -r '.cost.total') + echo "Milestone complete. Total cost: \$$TOTAL_COST" + break + fi + + # Budget check + TOTAL_COST=$(sf headless query | jq -r '.cost.total') + OVER=$(echo "$TOTAL_COST > $MAX_BUDGET" | bc -l) + if [ "$OVER" = "1" ]; then + echo "Budget limit (\$$MAX_BUDGET) exceeded at \$$TOTAL_COST" + sf headless stop + break + fi + + # Progress report + PROGRESS=$(sf headless query | jq -r '"\(.state.progress.tasks.done)/\(.state.progress.tasks.total) tasks"') + echo "Step done ($STATUS). Phase: $CURRENT_PHASE, Progress: $PROGRESS, Cost: \$$TOTAL_COST" +done +``` + +## Step-by-Step with Spec Creation + +Complete flow from idea to working code with full control: + +```bash +# 1. Setup +PROJECT_DIR="/tmp/my-project" +mkdir -p "$PROJECT_DIR" && cd "$PROJECT_DIR" && git init 2>/dev/null + +# 2. Write spec +cat > spec.md << 'SPEC' +[Your spec here] +SPEC + +# 3. Create the milestone (planning only, no execution) +RESULT=$(sf headless --output-format json --context spec.md new-milestone 2>/dev/null) +EXIT=$? + +if [ $EXIT -ne 0 ]; then + echo "Milestone creation failed" + echo "$RESULT" | jq . + exit 1 +fi + +echo "Milestone created. Starting execution..." + +# 4. Execute step-by-step +STEP=0 +while true; do + STEP=$((STEP + 1)) + RESULT=$(sf headless --output-format json next 2>/dev/null) + EXIT=$? + + [ $EXIT -ne 0 ] && break + + PHASE=$(sf headless query | jq -r '.state.phase') + COST=$(sf headless query | jq -r '.cost.total') + + echo "Step $STEP complete. Phase: $PHASE, Cost: \$$COST" + + [ "$PHASE" = "complete" ] && break +done + +echo "Build finished in $STEP steps" +``` + +## Intervention Patterns + +### Steer mid-execution + +If you detect the build going in the wrong direction: + +```bash +# Check what's happening +sf headless query | jq '{phase: .state.phase, task: .state.activeTask}' + +# Redirect +sf headless steer "Use SQLite instead of PostgreSQL for storage" + +# Continue +sf headless --output-format json next 2>/dev/null +``` + +### Skip a stuck unit + +```bash +sf headless skip +sf headless --output-format json next 2>/dev/null +``` + +### Undo last completed unit + +```bash +sf headless undo --force +sf headless --output-format json next 2>/dev/null +``` + +### Force a specific phase + +```bash +sf headless dispatch replan # Re-plan the current slice +sf headless dispatch execute # Skip to execution +sf headless dispatch uat # Jump to user acceptance testing +``` diff --git a/src/cli-web-branch.ts b/src/cli-web-branch.ts index 7b163404b..10b61f14c 100644 --- a/src/cli-web-branch.ts +++ b/src/cli-web-branch.ts @@ -17,7 +17,7 @@ export interface CliFlags { tools?: string[] messages: string[] web?: boolean - /** Optional project path for web mode: `gsd --web <path>` or `gsd web start <path>` */ + /** Optional project path for web mode: `sf --web <path>` or `sf web start <path>` */ webPath?: string /** Custom host to bind web server to: `--host 0.0.0.0` */ webHost?: string @@ -26,7 +26,7 @@ export interface CliFlags { /** Additional allowed origins for CORS: `--allowed-origins http://192.168.1.10:8080` */ webAllowedOrigins?: string[] - /** Set by `gsd sessions` when the user picks a specific session to resume */ + /** Set by `sf sessions` when the user picks a specific session to resume */ _selectedSessionPath?: string } @@ -203,7 +203,7 @@ export async function runWebCliBranch( flags: CliFlags, deps: RunWebCliBranchDeps = {}, ): Promise<RunWebCliBranchResult> { - // Handle `gsd web stop [path|--all]` subcommand + // Handle `sf web stop [path|--all]` subcommand if (flags.messages[0] === 'web' && flags.messages[1] === 'stop') { const stderr = deps.stderr ?? process.stderr const stopArg = flags.messages[2] @@ -221,8 +221,8 @@ export async function runWebCliBranch( } } - // `gsd web [start] [path]` is an alias for `gsd --web [path]` - // Matches: `gsd web`, `gsd web start`, `gsd web start <path>`, `gsd web <path>` + // `sf web [start] [path]` is an alias for `sf --web [path]` + // Matches: `sf web`, `sf web start`, `sf web start <path>`, `sf web <path>` const isWebSubcommand = flags.messages[0] === 'web' && flags.messages[1] !== 'stop' if (!flags.web && !isWebSubcommand) { return { handled: false } @@ -232,9 +232,9 @@ export async function runWebCliBranch( const defaultCwd = (deps.cwd ?? (() => process.cwd()))() // Resolve project path from multiple forms: - // gsd --web <path> → flags.webPath - // gsd web start <path> → messages[2] - // gsd web <path> → messages[1] (when not "start") + // sf --web <path> → flags.webPath + // sf web start <path> → messages[2] + // sf web <path> → messages[1] (when not "start") let webPath = flags.webPath if (!webPath && isWebSubcommand) { if (flags.messages[1] === 'start') { diff --git a/src/cli.ts b/src/cli.ts index 76ea1bb92..cf7b5911c 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -378,7 +378,7 @@ if (cliFlags.messages[0] === 'headless') { await ensureRtkBootstrap() // Sync bundled resources before headless runs (#3471). Without this, // headless-query loads from src/resources/ while auto/interactive load - // from ~/.gsd/agent/extensions/ — different extension copies diverge. + // from ~/.sf/agent/extensions/ — different extension copies diverge. initResources(agentDir) const { runHeadless, parseHeadlessArgs } = await import('./headless.js') await runHeadless(parseHeadlessArgs(process.argv)) @@ -558,7 +558,7 @@ if (isPrintMode) { markStartup('resourceLoader.reload') // Print mode is a one-shot invocation. The --model flag is a transient - // override (e.g. verification smoke tests like `gsd -p --model longcat/X "reply ok"`) + // override (e.g. verification smoke tests like `sf -p --model longcat/X "reply ok"`) // and MUST NOT mutate the persisted defaultProvider/defaultModel in settings.json (#4251). // We disable persistence at session construction so every downstream path // (setModel override, fallback reapply, validation repair) is gated in one place. @@ -611,7 +611,7 @@ if (isPrintMode) { // Activate every registered tool before starting the MCP transport. // `session.agent.state.tools` is the *active* subset, not the full // registry — if we expose only the active set, extension-registered - // tools (gsd workflow, browser-tools, mac-tools, search-the-web, …) + // tools (sf workflow, browser-tools, mac-tools, search-the-web, …) // are invisible to MCP clients. Flipping the active set to every // known tool name makes `state.tools` mirror the full registry for // this MCP session, which is what an external client expects. @@ -635,7 +635,7 @@ if (isPrintMode) { } // --------------------------------------------------------------------------- -// Worktree subcommand — `gsd worktree <list|merge|clean|remove>` +// Worktree subcommand — `sf worktree <list|merge|clean|remove>` // --------------------------------------------------------------------------- if (cliFlags.messages[0] === 'worktree' || cliFlags.messages[0] === 'wt') { const { handleList, handleMerge, handleClean, handleRemove } = await import('./worktree-cli.js') @@ -676,8 +676,8 @@ if (!cliFlags.worktree && !isPrintMode) { } // --------------------------------------------------------------------------- -// Auto-redirect: `gsd auto` with piped stdout → headless mode (#2732) -// When stdout is not a TTY (e.g. `gsd auto | cat`, `gsd auto > file`), +// Auto-redirect: `sf auto` with piped stdout → headless mode (#2732) +// When stdout is not a TTY (e.g. `sf auto | cat`, `sf auto > file`), // the TUI cannot render and the process hangs. Redirect to headless mode // which handles non-interactive output gracefully. // --------------------------------------------------------------------------- @@ -698,7 +698,7 @@ const cwd = process.cwd() const projectSessionsDir = getProjectSessionsDir(cwd) // Migrate legacy flat sessions: before per-directory scoping, all .jsonl session -// files lived directly in ~/.gsd/sessions/. Move them into the correct per-cwd +// files lived directly in ~/.sf/sessions/. Move them into the correct per-cwd // subdirectory so /resume can find them. migrateLegacyFlatSessions(sessionsDir, projectSessionsDir) diff --git a/src/extension-registry.ts b/src/extension-registry.ts index 042df6799..dbf47db0e 100644 --- a/src/extension-registry.ts +++ b/src/extension-registry.ts @@ -3,7 +3,7 @@ * * Extensions without manifests always load (backwards compatible). * A fresh install has an empty registry — all extensions enabled by default. - * The only way an extension stops loading is an explicit `gsd extensions disable <id>`. + * The only way an extension stops loading is an explicit `sf extensions disable <id>`. */ import { existsSync, mkdirSync, readFileSync, readdirSync, renameSync, writeFileSync } from "node:fs"; diff --git a/src/headless-context.ts b/src/headless-context.ts index 8f9512c30..75a6702f9 100644 --- a/src/headless-context.ts +++ b/src/headless-context.ts @@ -2,7 +2,7 @@ * Headless Context Loading — stdin reading, file context, and project bootstrapping * * Handles loading context from files or stdin for headless new-milestone, - * and bootstraps the .gsd/ directory structure when needed. + * and bootstraps the .sf/ directory structure when needed. */ import { readFileSync, mkdirSync } from 'node:fs' @@ -49,11 +49,11 @@ export async function loadContext(options: ContextOptions): Promise<string> { // --------------------------------------------------------------------------- /** - * Bootstrap .gsd/ directory structure for headless new-milestone. + * Bootstrap .sf/ directory structure for headless new-milestone. * Mirrors the bootstrap logic from guided-flow.ts showSmartEntry(). */ export function bootstrapGsdProject(basePath: string): void { - const gsdDir = join(basePath, '.gsd') + const gsdDir = join(basePath, '.sf') mkdirSync(join(gsdDir, 'milestones'), { recursive: true }) mkdirSync(join(gsdDir, 'runtime'), { recursive: true }) } diff --git a/src/headless-query.ts b/src/headless-query.ts index b8276b0e6..103902579 100644 --- a/src/headless-query.ts +++ b/src/headless-query.ts @@ -1,5 +1,5 @@ /** - * Headless Query — `gsd headless query` + * Headless Query — `sf headless query` * * Single read-only command that returns the full project snapshot as JSON * to stdout, without spawning an LLM session. Instant (~50ms). @@ -18,20 +18,20 @@ import { createJiti } from '@mariozechner/jiti' import { fileURLToPath } from 'node:url' import { join } from 'node:path' import { homedir } from 'node:os' -import type { GSDState } from './resources/extensions/sf/types.js' +import type { SFState } from './resources/extensions/sf/types.js' import { resolveBundledSourceResource } from './bundled-resource-path.js' const jiti = createJiti(fileURLToPath(import.meta.url), { interopDefault: true, debug: false }) // Resolve extensions from the synced agent directory so headless-query // loads the same extension copy as interactive/auto modes (#3471). // Falls back to bundled source for source-tree dev workflows. -const agentExtensionsDir = join(process.env.SF_AGENT_DIR || join(homedir(), '.gsd', 'agent'), 'extensions', 'gsd') +const agentExtensionsDir = join(process.env.SF_AGENT_DIR || join(homedir(), '.sf', 'agent'), 'extensions', 'sf') const { existsSync } = await import('node:fs') const useAgentDir = existsSync(join(agentExtensionsDir, 'state.ts')) const gsdExtensionPath = (...segments: string[]) => useAgentDir ? join(agentExtensionsDir, ...segments) - : resolveBundledSourceResource(import.meta.url, 'extensions', 'gsd', ...segments) + : resolveBundledSourceResource(import.meta.url, 'extensions', 'sf', ...segments) async function loadExtensionModules() { const stateModule = await jiti.import(gsdExtensionPath('state.ts'), {}) as any @@ -41,7 +41,7 @@ async function loadExtensionModules() { const autoStartModule = await jiti.import(gsdExtensionPath('auto-start.ts'), {}) as any return { openProjectDbIfPresent: autoStartModule.openProjectDbIfPresent as (basePath: string) => Promise<void>, - deriveState: stateModule.deriveState as (basePath: string) => Promise<GSDState>, + deriveState: stateModule.deriveState as (basePath: string) => Promise<SFState>, resolveDispatch: dispatchModule.resolveDispatch as (opts: any) => Promise<any>, readAllSessionStatuses: sessionModule.readAllSessionStatuses as (basePath: string) => any[], loadEffectiveGSDPreferences: prefsModule.loadEffectiveGSDPreferences as () => any, @@ -51,7 +51,7 @@ async function loadExtensionModules() { // ─── Types ────────────────────────────────────────────────────────────────── export interface QuerySnapshot { - state: GSDState + state: SFState next: { action: 'dispatch' | 'stop' | 'skip' unitType?: string diff --git a/src/headless.ts b/src/headless.ts index 9a3927038..c84418f88 100644 --- a/src/headless.ts +++ b/src/headless.ts @@ -1,7 +1,7 @@ /** - * Headless Orchestrator — `gsd headless` + * Headless Orchestrator — `sf headless` * - * Runs any /gsd subcommand without a TUI by spawning a child process in + * Runs any /sf subcommand without a TUI by spawning a child process in * RPC mode, auto-responding to extension UI requests, and streaming * progress to stderr. * @@ -289,7 +289,7 @@ async function runHeadlessOnce(options: HeadlessOptions, restartCount: number): } } - // For new-milestone, load context and bootstrap .gsd/ before spawning RPC child + // For new-milestone, load context and bootstrap .sf/ before spawning RPC child if (isNewMilestone) { if (!options.context && !options.contextText) { process.stderr.write('[headless] Error: new-milestone requires --context <file> or --context-text <text>\n') @@ -304,11 +304,11 @@ async function runHeadlessOnce(options: HeadlessOptions, restartCount: number): process.exit(1) } - // Bootstrap .gsd/ if needed - const gsdDir = join(process.cwd(), '.gsd') + // Bootstrap .sf/ if needed + const gsdDir = join(process.cwd(), '.sf') if (!existsSync(gsdDir)) { if (!options.json) { - process.stderr.write('[headless] Bootstrapping .gsd/ project structure...\n') + process.stderr.write('[headless] Bootstrapping .sf/ project structure...\n') } bootstrapGsdProject(process.cwd()) } @@ -319,11 +319,11 @@ async function runHeadlessOnce(options: HeadlessOptions, restartCount: number): writeFileSync(join(runtimeDir, 'headless-context.md'), contextContent, 'utf-8') } - // Validate .gsd/ directory (skip for new-milestone since we just bootstrapped it) - const gsdDir = join(process.cwd(), '.gsd') + // Validate .sf/ directory (skip for new-milestone since we just bootstrapped it) + const gsdDir = join(process.cwd(), '.sf') if (!isNewMilestone && !existsSync(gsdDir)) { - process.stderr.write('[headless] Error: No .gsd/ directory found in current directory.\n') - process.stderr.write("[headless] Run 'gsd' interactively first to initialize a project.\n") + process.stderr.write('[headless] Error: No .sf/ directory found in current directory.\n') + process.stderr.write("[headless] Run 'sf' interactively first to initialize a project.\n") process.exit(1) } @@ -337,7 +337,7 @@ async function runHeadlessOnce(options: HeadlessOptions, restartCount: number): // Resolve CLI path for the child process const cliPath = process.env.SF_BIN_PATH || process.argv[1] if (!cliPath) { - process.stderr.write('[headless] Error: Cannot determine CLI path. Set SF_BIN_PATH or run via gsd.\n') + process.stderr.write('[headless] Error: Cannot determine CLI path. Set SF_BIN_PATH or run via sf.\n') process.exit(1) } @@ -759,7 +759,7 @@ async function runHeadlessOnce(options: HeadlessOptions, restartCount: number): // v2 protocol negotiation — attempt init for structured completion events let v2Enabled = false try { - await client.init({ clientId: 'gsd-headless' }) + await client.init({ clientId: 'sf-headless' }) v2Enabled = true } catch { process.stderr.write('[headless] Warning: v2 init failed, falling back to v1 string-matching\n') @@ -829,11 +829,11 @@ async function runHeadlessOnce(options: HeadlessOptions, restartCount: number): } if (!options.json) { - process.stderr.write(`[headless] Running /gsd ${options.command}${options.commandArgs.length > 0 ? ' ' + options.commandArgs.join(' ') : ''}...\n`) + process.stderr.write(`[headless] Running /sf ${options.command}${options.commandArgs.length > 0 ? ' ' + options.commandArgs.join(' ') : ''}...\n`) } // Send the command - const command = `/gsd ${options.command}${options.commandArgs.length > 0 ? ' ' + options.commandArgs.join(' ') : ''}` + const command = `/sf ${options.command}${options.commandArgs.length > 0 ? ' ' + options.commandArgs.join(' ') : ''}` try { await client.prompt(command) } catch (err) { @@ -846,7 +846,7 @@ async function runHeadlessOnce(options: HeadlessOptions, restartCount: number): await completionPromise } - // Auto-mode chaining: if --auto and milestone creation succeeded, send /gsd auto + // Auto-mode chaining: if --auto and milestone creation succeeded, send /sf auto if (isNewMilestone && options.auto && milestoneReady && !blocked && exitCode === EXIT_SUCCESS) { if (!options.json) { process.stderr.write('[headless] Milestone ready — chaining into auto-mode...\n') @@ -863,7 +863,7 @@ async function runHeadlessOnce(options: HeadlessOptions, restartCount: number): }) try { - await client.prompt('/gsd auto') + await client.prompt('/sf auto') } catch (err) { process.stderr.write(`[headless] Error: Failed to start auto-mode: ${err instanceof Error ? err.message : String(err)}\n`) exitCode = EXIT_ERROR diff --git a/src/help-text.ts b/src/help-text.ts index 505b39415..ab6fad3e5 100644 --- a/src/help-text.ts +++ b/src/help-text.ts @@ -1,6 +1,6 @@ const SUBCOMMAND_HELP: Record<string, string> = { config: [ - 'Usage: gsd config', + 'Usage: sf config', '', 'Re-run the interactive setup wizard to configure:', ' - LLM provider (Anthropic, OpenAI, Google, OpenRouter, Ollama, LM Studio, etc.)', @@ -15,7 +15,7 @@ const SUBCOMMAND_HELP: Record<string, string> = { ].join('\n'), update: [ - 'Usage: gsd update', + 'Usage: sf update', '', 'Update SF to the latest version.', '', @@ -23,7 +23,7 @@ const SUBCOMMAND_HELP: Record<string, string> = { ].join('\n'), sessions: [ - 'Usage: gsd sessions', + 'Usage: sf sessions', '', 'List all saved sessions for the current directory and interactively', 'pick one to resume. Shows date, message count, and a preview of the', @@ -36,31 +36,31 @@ const SUBCOMMAND_HELP: Record<string, string> = { ].join('\n'), install: [ - 'Usage: gsd install <source> [-l, --local]', + 'Usage: sf install <source> [-l, --local]', '', 'Install a package/extension source and run post-install validation (dependency checks, setup).', '', 'Examples:', - ' gsd install npm:@foo/bar', - ' gsd install git:github.com/user/repo', - ' gsd install https://github.com/user/repo', - ' gsd install ./local/path', + ' sf install npm:@foo/bar', + ' sf install git:github.com/user/repo', + ' sf install https://github.com/user/repo', + ' sf install ./local/path', ].join('\n'), remove: [ - 'Usage: gsd remove <source> [-l, --local]', + 'Usage: sf remove <source> [-l, --local]', '', 'Remove an installed package source and its settings entry.', ].join('\n'), list: [ - 'Usage: gsd list', + 'Usage: sf list', '', 'List installed package sources from user and project settings.', ].join('\n'), worktree: [ - 'Usage: gsd worktree <command> [args]', + 'Usage: sf worktree <command> [args]', '', 'Manage isolated git worktrees for parallel work streams.', '', @@ -71,35 +71,35 @@ const SUBCOMMAND_HELP: Record<string, string> = { ' remove <name> Remove a worktree (--force to remove with unmerged changes)', '', 'The -w flag creates/resumes worktrees for interactive sessions:', - ' gsd -w Auto-name a new worktree, or resume the only active one', - ' gsd -w my-feature Create or resume a named worktree', + ' sf -w Auto-name a new worktree, or resume the only active one', + ' sf -w my-feature Create or resume a named worktree', '', 'Lifecycle:', - ' 1. gsd -w Create worktree, start session inside it', + ' 1. sf -w Create worktree, start session inside it', ' 2. (work normally) All changes happen on the worktree branch', ' 3. Ctrl+C Exit — dirty work is auto-committed', - ' 4. gsd -w Resume where you left off', - ' 5. gsd worktree merge Squash-merge into main when done', + ' 4. sf -w Resume where you left off', + ' 5. sf worktree merge Squash-merge into main when done', '', 'Examples:', - ' gsd -w Start in a new auto-named worktree', - ' gsd -w auth-refactor Create/resume "auth-refactor" worktree', - ' gsd worktree list See all worktrees and their status', - ' gsd worktree merge auth-refactor Merge and clean up', - ' gsd worktree clean Remove all merged/empty worktrees', - ' gsd worktree remove old-branch Remove a specific worktree', - ' gsd worktree remove old-branch --force Remove even with unmerged changes', + ' sf -w Start in a new auto-named worktree', + ' sf -w auth-refactor Create/resume "auth-refactor" worktree', + ' sf worktree list See all worktrees and their status', + ' sf worktree merge auth-refactor Merge and clean up', + ' sf worktree clean Remove all merged/empty worktrees', + ' sf worktree remove old-branch Remove a specific worktree', + ' sf worktree remove old-branch --force Remove even with unmerged changes', ].join('\n'), graph: [ - 'Usage: gsd graph <subcommand> [options]', + 'Usage: sf graph <subcommand> [options]', '', - 'Manage the SF project knowledge graph. Reads .gsd/ artifacts and builds', + 'Manage the SF project knowledge graph. Reads .sf/ artifacts and builds', 'a queryable graph of milestones, slices, tasks, rules, patterns, and lessons.', '', 'Subcommands:', - ' build Parse .gsd/ artifacts (STATE.md, milestone ROADMAPs, slice PLANs,', - ' KNOWLEDGE.md) and write .gsd/graphs/graph.json atomically.', + ' build Parse .sf/ artifacts (STATE.md, milestone ROADMAPs, slice PLANs,', + ' KNOWLEDGE.md) and write .sf/graphs/graph.json atomically.', ' query Search graph nodes by term (BFS from seed matches, budget-trimmed).', ' Returns matching nodes and reachable edges within the token budget.', ' status Show whether graph.json exists, its age, node/edge counts, and', @@ -108,16 +108,16 @@ const SUBCOMMAND_HELP: Record<string, string> = { ' Returns added, removed, and changed nodes and edges.', '', 'Examples:', - ' gsd graph build Build the graph from .gsd/ artifacts', - ' gsd graph status Check graph age and node/edge counts', - ' gsd graph query auth Find nodes related to "auth"', - ' gsd graph diff Show changes since last snapshot', + ' sf graph build Build the graph from .sf/ artifacts', + ' sf graph status Check graph age and node/edge counts', + ' sf graph query auth Find nodes related to "auth"', + ' sf graph diff Show changes since last snapshot', ].join('\n'), headless: [ - 'Usage: gsd headless [flags] [command] [args...]', + 'Usage: sf headless [flags] [command] [args...]', '', - 'Run /gsd commands without the TUI. Default command: auto', + 'Run /sf commands without the TUI. Default command: auto', '', 'Flags:', ' --timeout N Overall timeout in ms (default: 300000)', @@ -150,31 +150,31 @@ const SUBCOMMAND_HELP: Record<string, string> = { ' stream-json Stream JSONL events to stdout in real time (same as --json)', '', 'Examples:', - ' gsd headless Run /gsd auto', - ' gsd headless next Run one unit', - ' gsd headless --output-format json auto Structured JSON result on stdout', - ' gsd headless --json status Machine-readable JSONL stream', - ' gsd headless --timeout 60000 With 1-minute timeout', - ' gsd headless --bare auto Minimal context (CI/ecosystem use)', - ' gsd headless --resume abc123 auto Resume a prior session', - ' gsd headless new-milestone --context spec.md Create milestone from file', - ' cat spec.md | gsd headless new-milestone --context - From stdin', - ' gsd headless new-milestone --context spec.md --auto Create + auto-execute', - ' gsd headless --supervised auto Supervised orchestrator mode', - ' gsd headless --answers answers.json auto With pre-supplied answers', - ' gsd headless --events agent_end,extension_ui_request auto Filtered event stream', - ' gsd headless query Instant JSON state snapshot', + ' sf headless Run /sf auto', + ' sf headless next Run one unit', + ' sf headless --output-format json auto Structured JSON result on stdout', + ' sf headless --json status Machine-readable JSONL stream', + ' sf headless --timeout 60000 With 1-minute timeout', + ' sf headless --bare auto Minimal context (CI/ecosystem use)', + ' sf headless --resume abc123 auto Resume a prior session', + ' sf headless new-milestone --context spec.md Create milestone from file', + ' cat spec.md | sf headless new-milestone --context - From stdin', + ' sf headless new-milestone --context spec.md --auto Create + auto-execute', + ' sf headless --supervised auto Supervised orchestrator mode', + ' sf headless --answers answers.json auto With pre-supplied answers', + ' sf headless --events agent_end,extension_ui_request auto Filtered event stream', + ' sf headless query Instant JSON state snapshot', '', 'Exit codes: 0 = success, 1 = error/timeout, 10 = blocked, 11 = cancelled', ].join('\n'), } -// Alias: `gsd wt --help` → same as `gsd worktree --help` +// Alias: `sf wt --help` → same as `sf worktree --help` SUBCOMMAND_HELP['wt'] = SUBCOMMAND_HELP['worktree'] export function printHelp(version: string): void { process.stdout.write(`SF v${version} — Singularity Forge\n\n`) - process.stdout.write('Usage: gsd [options] [message...]\n\n') + process.stdout.write('Usage: sf [options] [message...]\n\n') process.stdout.write('Options:\n') process.stdout.write(' --mode <text|json|rpc|mcp> Output mode (default: interactive)\n') process.stdout.write(' --print, -p Single-shot print mode\n') @@ -196,9 +196,9 @@ export function printHelp(version: string): void { process.stdout.write(' sessions List and resume a past session\n') process.stdout.write(' worktree <cmd> Manage worktrees (list, merge, clean, remove)\n') process.stdout.write(' auto [args] Run auto-mode without TUI (pipeable)\n') - process.stdout.write(' headless [cmd] [args] Run /gsd commands without TUI (default: auto)\n') + process.stdout.write(' headless [cmd] [args] Run /sf commands without TUI (default: auto)\n') process.stdout.write(' graph <subcommand> Manage knowledge graph (build, query, status, diff)\n') - process.stdout.write('\nRun gsd <subcommand> --help for subcommand-specific help.\n') + process.stdout.write('\nRun sf <subcommand> --help for subcommand-specific help.\n') } export function printSubcommandHelp(subcommand: string, version: string): boolean { diff --git a/src/loader.ts b/src/loader.ts index fac20d156..51068d272 100644 --- a/src/loader.ts +++ b/src/loader.ts @@ -77,7 +77,7 @@ import { discoverExtensionEntryPaths } from './extension-discovery.js' import { loadRegistry, readManifestFromEntryPath, isExtensionEnabled } from './extension-registry.js' import { renderLogo } from './logo.js' -// pkg/ is a shim directory: contains gsd's piConfig (package.json) and pi's +// pkg/ is a shim directory: contains sf's piConfig (package.json) and pi's // theme assets (dist/modes/interactive/theme/) without a src/ directory. // This allows config.js to: // 1. Read piConfig.name → "sf" (branding) @@ -90,7 +90,7 @@ process.env.PI_PACKAGE_DIR = pkgDir process.env.PI_SKIP_VERSION_CHECK = '1' // SF runs its own update check in cli.ts — suppress pi's process.title = 'sf' -// Print branded banner on first launch (before ~/.gsd/ exists). +// Print branded banner on first launch (before ~/.sf/ exists). // Set SF_FIRST_RUN_BANNER so cli.ts skips the duplicate welcome screen. if (!existsSync(appRoot)) { const cyan = '\x1b[36m' @@ -107,22 +107,22 @@ if (!existsSync(appRoot)) { process.env.SF_FIRST_RUN_BANNER = '1' } -// SF_CODING_AGENT_DIR — tells pi's getAgentDir() to return ~/.gsd/agent/ instead of ~/.gsd/agent/ +// SF_CODING_AGENT_DIR — tells pi's getAgentDir() to return ~/.sf/agent/ instead of ~/.sf/agent/ process.env.SF_CODING_AGENT_DIR = agentDir // SF_PKG_ROOT — absolute path to sf-run package root. Used by deployed extensions // (e.g. auto.ts resume path) to import modules like resource-loader.js that live -// in the package tree, not in the deployed ~/.gsd/agent/ tree. +// in the package tree, not in the deployed ~/.sf/agent/ tree. process.env.SF_PKG_ROOT = gsdRoot -// RTK environment — make ~/.gsd/agent/bin visible to all child-process paths, +// RTK environment — make ~/.sf/agent/bin visible to all child-process paths, // not just the bash tool, and force-disable RTK telemetry for SF-managed use. applyRtkProcessEnv(process.env) -// NODE_PATH — make gsd's own node_modules available to extensions loaded via jiti. +// NODE_PATH — make sf's own node_modules available to extensions loaded via jiti. // Without this, extensions (e.g. browser-tools) can't resolve dependencies like -// `playwright` because jiti resolves modules from pi-coding-agent's location, not gsd's. -// Prepending gsd's node_modules to NODE_PATH fixes this for all extensions. +// `playwright` because jiti resolves modules from pi-coding-agent's location, not sf's. +// Prepending sf's node_modules to NODE_PATH fixes this for all extensions. const gsdNodeModules = join(gsdRoot, 'node_modules') process.env.NODE_PATH = [gsdNodeModules, process.env.NODE_PATH] .filter(Boolean) @@ -137,12 +137,12 @@ const { Module } = await import('module'); process.env.SF_VERSION = gsdVersion // SF_BIN_PATH — absolute path to this loader (dist/loader.js), used by patched subagent -// to spawn gsd instead of pi when dispatching workflow tasks. -// Respect a pre-set value so a source-mode wrapper (e.g. bin/gsd-from-source) can +// to spawn sf instead of pi when dispatching workflow tasks. +// Respect a pre-set value so a source-mode wrapper (e.g. bin/sf-from-source) can // advertise the executable shim instead of the .ts loader path (which spawn() can't exec). process.env.SF_BIN_PATH = process.env.SF_BIN_PATH || process.argv[1] -// SF_WORKFLOW_PATH — absolute path to bundled SF-WORKFLOW.md, used by patched gsd extension +// SF_WORKFLOW_PATH — absolute path to bundled SF-WORKFLOW.md, used by patched sf extension // when dispatching workflow prompts. Prefers dist/resources/ (stable, set at build time) // over src/resources/ (live working tree) — see resource-loader.ts for rationale. const distRes = join(gsdRoot, 'dist', 'resources') @@ -152,7 +152,7 @@ process.env.SF_WORKFLOW_PATH = join(resourcesDir, 'SF-WORKFLOW.md') // SF_BUNDLED_EXTENSION_PATHS — dynamically discovered bundled extension entry points. // Uses the shared discoverExtensionEntryPaths() to scan the bundled resources -// directory, then remaps discovered paths to agentDir (~/.gsd/agent/extensions/) +// directory, then remaps discovered paths to agentDir (~/.sf/agent/extensions/) // where initResources() will sync them. const bundledExtDir = join(resourcesDir, 'extensions') const agentExtDir = join(agentDir, 'extensions') diff --git a/src/mcp-server.ts b/src/mcp-server.ts index 2abeaa23c..bf87185bf 100644 --- a/src/mcp-server.ts +++ b/src/mcp-server.ts @@ -65,7 +65,7 @@ export async function startMcpServer(options: { } const server = new Server( - { name: 'gsd', version }, + { name: 'sf', version }, { capabilities: { tools: {} } }, ) diff --git a/src/models-resolver.ts b/src/models-resolver.ts index ff1ea4eb4..efcfc8e64 100644 --- a/src/models-resolver.ts +++ b/src/models-resolver.ts @@ -1,10 +1,10 @@ /** * Models.json resolution with fallback to ~/.pi/agent/models.json * - * SF uses ~/.gsd/agent/models.json, but for a smooth migration/development + * SF uses ~/.sf/agent/models.json, but for a smooth migration/development * experience, this module provides resolution logic that: * - * 1. Reads ~/.gsd/agent/models.json if it exists + * 1. Reads ~/.sf/agent/models.json if it exists * 2. Falls back to ~/.pi/agent/models.json if SF file doesn't exist * 3. Merges both files if both exist (SF takes precedence) */ @@ -21,7 +21,7 @@ const PI_MODELS_PATH = join(homedir(), '.pi', 'agent', 'models.json') * Resolve the path to models.json with fallback logic. * * Priority: - * 1. ~/.gsd/agent/models.json (exists) → return this path + * 1. ~/.sf/agent/models.json (exists) → return this path * 2. ~/.pi/agent/models.json (exists) → return this path (fallback) * 3. Neither exists → return SF path (will be created) * diff --git a/src/onboarding.ts b/src/onboarding.ts index 006541c1e..ff97d2b66 100644 --- a/src/onboarding.ts +++ b/src/onboarding.ts @@ -278,7 +278,7 @@ export async function runOnboarding(authStorage: AuthStorage): Promise<void> { if (remoteConfigured) { summaryLines.push(`${pc.green('✓')} Remote questions: ${remoteConfigured}`) } else { - summaryLines.push(`${pc.dim('↷')} Remote questions: not configured — use /gsd remote inside SF`) + summaryLines.push(`${pc.dim('↷')} Remote questions: not configured — use /sf remote inside SF`) } if (toolKeyCount > 0) { @@ -795,7 +795,7 @@ async function runRemoteQuestionsStep( { value: 'discord', label: 'Discord', hint: 'receive questions in a Discord channel' }, { value: 'slack', label: 'Slack', hint: 'receive questions in a Slack channel' }, { value: 'telegram', label: 'Telegram', hint: 'receive questions via Telegram bot' }, - { value: 'skip', label: 'Skip for now', hint: 'use /gsd remote inside SF later' }, + { value: 'skip', label: 'Skip for now', hint: 'use /sf remote inside SF later' }, ) const choice = await p.select({ @@ -968,12 +968,12 @@ async function runDiscordChannelStep(p: ClackModule, pc: PicoModule, token: stri const data = await res.json() guilds = Array.isArray(data) ? data : [] } catch { - p.log.warn('Could not fetch Discord servers — configure channel later with /gsd remote discord') + p.log.warn('Could not fetch Discord servers — configure channel later with /sf remote discord') return null } if (guilds.length === 0) { - p.log.warn('Bot is not in any Discord servers — configure channel later with /gsd remote discord') + p.log.warn('Bot is not in any Discord servers — configure channel later with /sf remote discord') return null } @@ -1001,12 +1001,12 @@ async function runDiscordChannelStep(p: ClackModule, pc: PicoModule, token: stri const data = await res.json() channels = Array.isArray(data) ? data.filter((ch: any) => ch.type === 0 || ch.type === 5) : [] } catch { - p.log.warn('Could not fetch channels — configure later with /gsd remote discord') + p.log.warn('Could not fetch channels — configure later with /sf remote discord') return null } if (channels.length === 0) { - p.log.warn('No text channels found — configure later with /gsd remote discord') + p.log.warn('No text channels found — configure later with /sf remote discord') return null } diff --git a/src/resource-loader.ts b/src/resource-loader.ts index c01107704..854475f88 100644 --- a/src/resource-loader.ts +++ b/src/resource-loader.ts @@ -1,5 +1,5 @@ import { DefaultResourceLoader, sortExtensionPaths } from '@sf-run/pi-coding-agent' -if (process.env.SF_DEBUG_EXTENSIONS) process.stderr.write("[gsd-debug] resource-loader.ts loaded\n") +if (process.env.SF_DEBUG_EXTENSIONS) process.stderr.write("[sf-debug] resource-loader.ts loaded\n") import { createHash } from 'node:crypto' import { homedir } from 'node:os' import { chmodSync, copyFileSync, cpSync, existsSync, lstatSync, mkdirSync, openSync, closeSync, readFileSync, readlinkSync, readdirSync, rmSync, statSync, symlinkSync, unlinkSync, writeFileSync } from 'node:fs' @@ -12,9 +12,9 @@ import { loadRegistry, readManifestFromEntryPath, isExtensionEnabled, ensureRegi // Resolve resources directory — prefer dist/resources/ (stable, set at build time) // over src/resources/ (live working tree, changes with git branch). // -// Why this matters: with `npm link`, src/resources/ points into the gsd-2 repo's +// Why this matters: with `npm link`, src/resources/ points into the sf-2 repo's // working tree. Switching branches there changes src/resources/ for ALL projects -// that use gsd — causing stale/broken extensions to be synced to ~/.gsd/agent/. +// that use sf — causing stale/broken extensions to be synced to ~/.sf/agent/. // dist/resources/ is populated by the build step (`npm run copy-resources`) and // reflects the built state, not the currently checked-out branch. const packageRoot = resolve(dirname(fileURLToPath(import.meta.url)), '..') @@ -285,7 +285,7 @@ function copyDirRecursive(src: string, dest: string): void { * * Native ESM `import()` ignores NODE_PATH — it resolves packages by walking * up the directory tree from the importing file. Extension files synced to - * ~/.gsd/agent/extensions/ have no ancestor node_modules, so imports of + * ~/.sf/agent/extensions/ have no ancestor node_modules, so imports of * @sf-run/* packages fail. The symlink makes Node's standard resolution find * them without requiring every call site to use jiti. * @@ -368,7 +368,7 @@ function reconcileMergedNodeModules( ): void { // Fast path: if already merged for this packageRoot + same directory contents, skip. // The fingerprint includes entry names from both roots so `pnpm add/remove` triggers rebuild. - const marker = join(agentNodeModules, '.gsd-merged') + const marker = join(agentNodeModules, '.sf-merged') const fingerprint = mergedFingerprint(hoisted, internal) try { if (existsSync(marker) && readFileSync(marker, 'utf-8').trim() === fingerprint) return @@ -440,7 +440,7 @@ function mergedFingerprint(hoisted: string, internal: string): string { * 1. Manifest-based (preferred): the manifest records which root files were installed * last time; any that are no longer in the current bundle are deleted. * 2. Known-stale fallback: for upgrades from versions before manifest tracking, - * explicitly delete files known to have been moved (e.g. env-utils.js → gsd/). + * explicitly delete files known to have been moved (e.g. env-utils.js → sf/). */ function pruneRemovedBundledExtensions( manifest: ManagedResourceManifest | null, @@ -501,16 +501,16 @@ function pruneRemovedBundledExtensions( // Always remove known stale files regardless of manifest state. // These were installed by pre-manifest versions so they may not appear in // installedExtensionRootFiles even when a manifest exists. - // env-utils.js was moved from extensions/ root → gsd/ in v2.39.x (#1634) + // env-utils.js was moved from extensions/ root → sf/ in v2.39.x (#1634) removeFileIfStale('env-utils.js') } /** - * Syncs all bundled resources to agentDir (~/.gsd/agent/) on every launch. + * Syncs all bundled resources to agentDir (~/.sf/agent/) on every launch. * - * - extensions/ → ~/.gsd/agent/extensions/ (overwrite when version changes) - * - agents/ → ~/.gsd/agent/agents/ (overwrite when version changes) - * - SF-WORKFLOW.md → ~/.gsd/agent/SF-WORKFLOW.md (fallback for env var miss) + * - extensions/ → ~/.sf/agent/extensions/ (overwrite when version changes) + * - agents/ → ~/.sf/agent/agents/ (overwrite when version changes) + * - SF-WORKFLOW.md → ~/.sf/agent/SF-WORKFLOW.md (fallback for env var miss) * * Skills are NOT synced here. They are installed by the user via the * skills.sh CLI (`npx skills add <repo>`) into ~/.agents/skills/ — the @@ -518,10 +518,10 @@ function pruneRemovedBundledExtensions( * * Skips the copy when the managed-resources.json version matches the current * SF version, avoiding ~128ms of synchronous cpSync on every startup. - * After `npm update -g @glittercowboy/gsd`, versions will differ and the + * After `npm update -g @glittercowboy/sf`, versions will differ and the * copy runs once to land the new resources. * - * Inspectable: `ls ~/.gsd/agent/extensions/` + * Inspectable: `ls ~/.sf/agent/extensions/` */ export function initResources(agentDir: string): void { mkdirSync(agentDir, { recursive: true }) @@ -537,7 +537,7 @@ export function initResources(agentDir: string): void { pruneRemovedBundledExtensions(manifest, agentDir) pruneStaleSiblingFiles(bundledExtensionsDir, extensionsDir) - // Ensure ~/.gsd/agent/node_modules symlinks to SF's node_modules on EVERY + // Ensure ~/.sf/agent/node_modules symlinks to SF's node_modules on EVERY // launch, not just during resource syncs. A stale/broken symlink makes ALL // extensions fail to resolve @sf-run/* packages, rendering SF non-functional. ensureNodeModulesSymlink(agentDir) @@ -566,7 +566,7 @@ export function initResources(agentDir: string): void { // skills.sh CLI (`npx skills add <repo>`) into ~/.agents/skills/ which // is the industry-standard Agent Skills ecosystem directory. // - // Migration from the legacy ~/.gsd/agent/skills/ directory is handled + // Migration from the legacy ~/.sf/agent/skills/ directory is handled // above the manifest check so it runs on every launch (including retries // after partial copy failures). @@ -589,7 +589,7 @@ export function initResources(agentDir: string): void { /** * One-time migration: copy user-customized skills from the old - * ~/.gsd/agent/skills/ directory into ~/.agents/skills/. + * ~/.sf/agent/skills/ directory into ~/.agents/skills/. * * The migration is conservative: * - Only skill directories containing a SKILL.md are considered. @@ -653,7 +653,7 @@ function migrateSkillsToEcosystemDir(agentDir: string): void { if (isSymlink) { // Recreate the symlink in the ecosystem directory using an absolute // target. Relative symlinks would resolve from the new parent dir - // (~/.agents/skills/) instead of the original (~/.gsd/agent/skills/), + // (~/.agents/skills/) instead of the original (~/.sf/agent/skills/), // pointing to the wrong location. const rawTarget = readlinkSync(sourcePath) const absTarget = resolve(dirname(sourcePath), rawTarget) @@ -716,7 +716,7 @@ export function hasStaleCompiledExtensionSiblings(extensionsDir: string, sourceD /** * Constructs a DefaultResourceLoader that loads extensions from both - * ~/.gsd/agent/extensions/ (SF's default) and ~/.pi/agent/extensions/ (pi's default). + * ~/.sf/agent/extensions/ (SF's default) and ~/.pi/agent/extensions/ (pi's default). * This allows users to use extensions from either location. */ // Cache bundled extension keys at module load — avoids re-scanning the extensions diff --git a/src/resources/extensions/aws-auth/index.ts b/src/resources/extensions/aws-auth/index.ts index f649b1e49..113de7512 100644 --- a/src/resources/extensions/aws-auth/index.ts +++ b/src/resources/extensions/aws-auth/index.ts @@ -26,7 +26,7 @@ * * ## Setup * - * Add to ~/.gsd/agent/settings.json (or project-level .gsd/settings.json): + * Add to ~/.sf/agent/settings.json (or project-level .sf/settings.json): * * { "awsAuthRefresh": "aws sso login --profile my-profile" } * @@ -55,10 +55,10 @@ const AWS_AUTH_ERROR_RE = /** * Reads the `awsAuthRefresh` command from settings.json. - * Checks project-level first, then global (~/.gsd/agent/settings.json). + * Checks project-level first, then global (~/.sf/agent/settings.json). */ function getAwsAuthRefreshCommand(): string | undefined { - const configDir = process.env.PI_CONFIG_DIR || ".gsd"; + const configDir = process.env.PI_CONFIG_DIR || ".sf"; const paths = [ join(process.cwd(), configDir, "settings.json"), join(homedir(), configDir, "agent", "settings.json"), diff --git a/src/resources/extensions/bg-shell/utilities.ts b/src/resources/extensions/bg-shell/utilities.ts index 05b8fe654..2f5c6684c 100644 --- a/src/resources/extensions/bg-shell/utilities.ts +++ b/src/resources/extensions/bg-shell/utilities.ts @@ -44,7 +44,7 @@ export function formatTimeAgo(timestamp: number): string { function deriveProjectRootFromAutoWorktree(cachedCwd?: string): string | undefined { if (!cachedCwd) return undefined; - const match = cachedCwd.match(/^(.*?)[\\/]\.gsd[\\/]worktrees[\\/][^\\/]+(?:[\\/].*)?$/); + const match = cachedCwd.match(/^(.*?)[\\/]\.sf[\\/]worktrees[\\/][^\\/]+(?:[\\/].*)?$/); return match?.[1]; } @@ -83,7 +83,7 @@ export function resolveBgShellPersistenceCwd( pathExists: (path: string) => boolean = existsSync, ): string { const resolvedLiveCwd = liveCwd ?? getBgShellLiveCwd(cachedCwd, pathExists); - const cachedIsAutoWorktree = /(?:^|[\\/])\.gsd[\\/]worktrees[\\/]/.test(cachedCwd); + const cachedIsAutoWorktree = /(?:^|[\\/])\.sf[\\/]worktrees[\\/]/.test(cachedCwd); if (!cachedIsAutoWorktree) return cachedCwd; if (cachedCwd === resolvedLiveCwd && pathExists(cachedCwd)) return cachedCwd; if (!pathExists(cachedCwd)) return resolvedLiveCwd; diff --git a/src/resources/extensions/browser-tools/tools/state-persistence.ts b/src/resources/extensions/browser-tools/tools/state-persistence.ts index 243be715e..84df1824c 100644 --- a/src/resources/extensions/browser-tools/tools/state-persistence.ts +++ b/src/resources/extensions/browser-tools/tools/state-persistence.ts @@ -6,7 +6,7 @@ import type { ToolDeps } from "../state.js"; * State persistence tools — save/restore cookies, localStorage, sessionStorage. */ -const STATE_DIR = ".gsd/browser-state"; +const STATE_DIR = ".sf/browser-state"; export function registerStatePersistenceTools(pi: ExtensionAPI, deps: ToolDeps): void { // ------------------------------------------------------------------------- @@ -17,7 +17,7 @@ export function registerStatePersistenceTools(pi: ExtensionAPI, deps: ToolDeps): label: "Browser Save State", description: "Save cookies, localStorage, and sessionStorage to disk so authenticated sessions survive browser restarts. " + - "State files are written to .gsd/browser-state/ and should be gitignored (may contain auth tokens). " + + "State files are written to .sf/browser-state/ and should be gitignored (may contain auth tokens). " + "Never displays secret values in output.", parameters: Type.Object({ name: Type.Optional( diff --git a/src/resources/extensions/browser-tools/tools/visual-diff.ts b/src/resources/extensions/browser-tools/tools/visual-diff.ts index fa2453c0e..d8cfd5d9f 100644 --- a/src/resources/extensions/browser-tools/tools/visual-diff.ts +++ b/src/resources/extensions/browser-tools/tools/visual-diff.ts @@ -6,7 +6,7 @@ import type { ToolDeps } from "../state.js"; * Visual regression diffing — compare current page screenshot against a stored baseline. */ -const BASELINE_DIR = ".gsd/browser-baselines"; +const BASELINE_DIR = ".sf/browser-baselines"; export function registerVisualDiffTools(pi: ExtensionAPI, deps: ToolDeps): void { pi.registerTool({ @@ -16,7 +16,7 @@ export function registerVisualDiffTools(pi: ExtensionAPI, deps: ToolDeps): void "Compare current page screenshot against a stored baseline pixel-by-pixel. " + "Returns similarity score (0–1), diff pixel count, and optionally generates a diff image highlighting changes. " + "On first run with no baseline, saves the current screenshot as the baseline. " + - "Baselines are stored in .gsd/browser-baselines/ (gitignored, environment-specific).", + "Baselines are stored in .sf/browser-baselines/ (gitignored, environment-specific).", parameters: Type.Object({ name: Type.Optional( Type.String({ diff --git a/src/resources/extensions/claude-code-cli/partial-builder.ts b/src/resources/extensions/claude-code-cli/partial-builder.ts index e222a36d5..dd94f4fe7 100644 --- a/src/resources/extensions/claude-code-cli/partial-builder.ts +++ b/src/resources/extensions/claude-code-cli/partial-builder.ts @@ -27,7 +27,7 @@ import type { BetaContentBlock, BetaRawMessageStreamEvent, NonNullableUsage } fr * Split a Claude Code MCP tool name (`mcp__<server>__<tool>`) into its parts. * Returns null for non-prefixed names so callers can fall through unchanged. * - * Server names may contain hyphens (`gsd-workflow`); the SDK uses the literal + * Server names may contain hyphens (`sf-workflow`); the SDK uses the literal * `__` delimiter between the server name and the tool name. */ export function parseMcpToolName(name: string): { server: string; tool: string } | null { diff --git a/src/resources/extensions/claude-code-cli/stream-adapter.ts b/src/resources/extensions/claude-code-cli/stream-adapter.ts index dacadd392..f282e2e04 100644 --- a/src/resources/extensions/claude-code-cli/stream-adapter.ts +++ b/src/resources/extensions/claude-code-cli/stream-adapter.ts @@ -21,7 +21,7 @@ import type { ExtensionUIContext } from "@sf-run/pi-coding-agent"; import { EventStream, mapThinkingLevelToEffort, supportsAdaptiveThinking } from "@sf-run/pi-ai"; import { execSync } from "node:child_process"; import { PartialMessageBuilder, ZERO_USAGE, mapUsage } from "./partial-builder.js"; -import { buildWorkflowMcpServers } from "../gsd/workflow-mcp.js"; +import { buildWorkflowMcpServers } from "../sf/workflow-mcp.js"; import { showInterviewRound, type Question, type RoundResult } from "../shared/tui.js"; import type { SDKAssistantMessage, diff --git a/src/resources/extensions/claude-code-cli/tests/partial-builder.test.ts b/src/resources/extensions/claude-code-cli/tests/partial-builder.test.ts index cff2a6830..3086fbd71 100644 --- a/src/resources/extensions/claude-code-cli/tests/partial-builder.test.ts +++ b/src/resources/extensions/claude-code-cli/tests/partial-builder.test.ts @@ -153,7 +153,7 @@ describe("parseMcpToolName", () => { test("splits mcp__<server>__<tool> into parts", () => { assert.deepEqual( parseMcpToolName("mcp__gsd-workflow__gsd_plan_milestone"), - { server: "gsd-workflow", tool: "gsd_plan_milestone" }, + { server: "sf-workflow", tool: "gsd_plan_milestone" }, ); }); @@ -203,7 +203,7 @@ describe("PartialMessageBuilder — MCP tool name normalization", () => { if (event!.type === "toolcall_start") { const toolCall = (event.partial.content[event.contentIndex] as any); assert.equal(toolCall.name, "gsd_plan_milestone"); - assert.equal(toolCall.mcpServer, "gsd-workflow"); + assert.equal(toolCall.mcpServer, "sf-workflow"); } }); @@ -233,7 +233,7 @@ describe("PartialMessageBuilder — MCP tool name normalization", () => { const mapped = mapContentBlock(block) as any; assert.equal(mapped.type, "toolCall"); assert.equal(mapped.name, "gsd_task_complete"); - assert.equal(mapped.mcpServer, "gsd-workflow"); + assert.equal(mapped.mcpServer, "sf-workflow"); assert.deepEqual(mapped.arguments, { taskId: "T001" }); }); }); diff --git a/src/resources/extensions/claude-code-cli/tests/stream-adapter.test.ts b/src/resources/extensions/claude-code-cli/tests/stream-adapter.test.ts index 0d2938506..bac08497a 100644 --- a/src/resources/extensions/claude-code-cli/tests/stream-adapter.test.ts +++ b/src/resources/extensions/claude-code-cli/tests/stream-adapter.test.ts @@ -461,19 +461,19 @@ describe("stream-adapter — session persistence (#2859)", () => { }; try { process.env.SF_WORKFLOW_MCP_COMMAND = "node"; - process.env.SF_WORKFLOW_MCP_NAME = "gsd-workflow"; + process.env.SF_WORKFLOW_MCP_NAME = "sf-workflow"; process.env.SF_WORKFLOW_MCP_ARGS = JSON.stringify(["packages/mcp-server/dist/cli.js"]); - process.env.SF_WORKFLOW_MCP_ENV = JSON.stringify({ SF_CLI_PATH: "/tmp/gsd" }); + process.env.SF_WORKFLOW_MCP_ENV = JSON.stringify({ SF_CLI_PATH: "/tmp/sf" }); process.env.SF_WORKFLOW_MCP_CWD = "/tmp/project"; const options = buildSdkOptions("claude-sonnet-4-20250514", "test"); const mcpServers = options.mcpServers as Record<string, any>; - assert.ok(mcpServers?.["gsd-workflow"], "expected gsd-workflow server config"); - const srv = mcpServers["gsd-workflow"]; + assert.ok(mcpServers?.["sf-workflow"], "expected sf-workflow server config"); + const srv = mcpServers["sf-workflow"]; assert.equal(srv.command, "node"); assert.deepEqual(srv.args, ["packages/mcp-server/dist/cli.js"]); assert.equal(srv.cwd, "/tmp/project"); - assert.equal(srv.env.SF_CLI_PATH, "/tmp/gsd"); + assert.equal(srv.env.SF_CLI_PATH, "/tmp/sf"); assert.equal(srv.env.SF_PERSIST_WRITE_GATE_STATE, "1"); assert.equal(srv.env.SF_WORKFLOW_PROJECT_ROOT, "/tmp/project"); assert.deepEqual(options.disallowedTools, ["AskUserQuestion"]); @@ -508,7 +508,7 @@ describe("stream-adapter — session persistence (#2859)", () => { process.env.SF_WORKFLOW_MCP_COMMAND = "node"; process.env.SF_WORKFLOW_MCP_NAME = "custom-workflow"; process.env.SF_WORKFLOW_MCP_ARGS = JSON.stringify(["packages/mcp-server/dist/cli.js"]); - process.env.SF_WORKFLOW_MCP_ENV = JSON.stringify({ SF_CLI_PATH: "/tmp/gsd" }); + process.env.SF_WORKFLOW_MCP_ENV = JSON.stringify({ SF_CLI_PATH: "/tmp/sf" }); process.env.SF_WORKFLOW_MCP_CWD = "/tmp/project"; const options = buildSdkOptions("claude-sonnet-4-20250514", "test"); @@ -559,7 +559,7 @@ describe("stream-adapter — session persistence (#2859)", () => { // Either outcome is valid — the key invariant is no crash. const mcpServers = (options as any).mcpServers; if (mcpServers) { - assert.ok(mcpServers["gsd-workflow"], "if present, must be gsd-workflow"); + assert.ok(mcpServers["sf-workflow"], "if present, must be sf-workflow"); assert.deepEqual((options as any).disallowedTools, ["AskUserQuestion"]); } else { assert.deepEqual((options as any).disallowedTools, ["AskUserQuestion"]); @@ -591,7 +591,7 @@ describe("stream-adapter — session persistence (#2859)", () => { delete process.env.SF_WORKFLOW_MCP_ARGS; delete process.env.SF_WORKFLOW_MCP_ENV; delete process.env.SF_WORKFLOW_MCP_CWD; - process.env.SF_CLI_PATH = "/tmp/gsd"; + process.env.SF_CLI_PATH = "/tmp/sf"; const distDir = join(repoDir, "packages", "mcp-server", "dist"); mkdirSync(distDir, { recursive: true }); @@ -601,12 +601,12 @@ describe("stream-adapter — session persistence (#2859)", () => { const options = buildSdkOptions("claude-sonnet-4-20250514", "test"); const mcpServers = options.mcpServers as Record<string, any>; - assert.ok(mcpServers?.["gsd-workflow"], "expected gsd-workflow server config"); - const srv = mcpServers["gsd-workflow"]; + assert.ok(mcpServers?.["sf-workflow"], "expected sf-workflow server config"); + const srv = mcpServers["sf-workflow"]; assert.equal(srv.command, process.execPath); assert.deepEqual(srv.args, [realpathSync(resolve(repoDir, "packages", "mcp-server", "dist", "cli.js"))]); assert.equal(srv.cwd, resolvedRepoDir); - assert.equal(srv.env.SF_CLI_PATH, "/tmp/gsd"); + assert.equal(srv.env.SF_CLI_PATH, "/tmp/sf"); assert.equal(srv.env.SF_PERSIST_WRITE_GATE_STATE, "1"); assert.equal(srv.env.SF_WORKFLOW_PROJECT_ROOT, resolvedRepoDir); assert.deepEqual(options.disallowedTools, ["AskUserQuestion"]); @@ -651,7 +651,7 @@ describe("stream-adapter — session persistence (#2859)", () => { describe("stream-adapter — MCP elicitation bridge", () => { const askUserQuestionsRequest = { - serverName: "gsd-workflow", + serverName: "sf-workflow", message: "Please answer the following question(s).", mode: "form" as const, requestedSchema: { @@ -793,7 +793,7 @@ describe("stream-adapter — MCP elicitation bridge", () => { test("parseTextInputElicitation recognizes secure free-text MCP forms", () => { const request = { - serverName: "gsd-workflow", + serverName: "sf-workflow", message: "Enter values for environment variables.", mode: "form" as const, requestedSchema: { @@ -834,7 +834,7 @@ describe("stream-adapter — MCP elicitation bridge", () => { test("parseTextInputElicitation accepts legacy keys schema and skips unsupported fields", () => { const request = { - serverName: "gsd-workflow", + serverName: "sf-workflow", message: "Enter secure values", mode: "form" as const, requestedSchema: { @@ -867,7 +867,7 @@ describe("stream-adapter — MCP elicitation bridge", () => { test("createClaudeCodeElicitationHandler collects secure_env_collect fields through input dialogs", async () => { const secureRequest = { - serverName: "gsd-workflow", + serverName: "sf-workflow", message: "Enter values for environment variables.", mode: "form" as const, requestedSchema: { diff --git a/src/resources/extensions/cmux/index.ts b/src/resources/extensions/cmux/index.ts index 9c6d01819..c335a4419 100644 --- a/src/resources/extensions/cmux/index.ts +++ b/src/resources/extensions/cmux/index.ts @@ -2,10 +2,10 @@ import { execFileSync, spawn } from "node:child_process"; import { existsSync } from "node:fs"; import { tmpdir } from "node:os"; import { join } from "node:path"; -import type { GSDPreferences } from "../gsd/preferences.js"; -import type { GSDState, Phase } from "../gsd/types.js"; +import type { SFPreferences } from "../sf/preferences.js"; +import type { SFState, Phase } from "../sf/types.js"; const DEFAULT_SOCKET_PATH = "/tmp/cmux.sock"; -const STATUS_KEY = "gsd"; +const STATUS_KEY = "sf"; const lastSidebarSnapshots = new Map<string, string>(); let cmuxPromptedThisSession = false; let cachedCliAvailability: boolean | null = null; @@ -52,7 +52,7 @@ export function detectCmuxEnvironment( } export function resolveCmuxConfig( - preferences: GSDPreferences | undefined, + preferences: SFPreferences | undefined, env: NodeJS.ProcessEnv = process.env, socketExists: (path: string) => boolean = existsSync, cliAvailable: () => boolean = isCmuxCliAvailable, @@ -71,7 +71,7 @@ export function resolveCmuxConfig( } export function shouldPromptToEnableCmux( - preferences: GSDPreferences | undefined, + preferences: SFPreferences | undefined, env: NodeJS.ProcessEnv = process.env, socketExists: (path: string) => boolean = existsSync, cliAvailable: () => boolean = isCmuxCliAvailable, @@ -113,7 +113,7 @@ export function emitOsc777Notification(title: string, body: string): void { process.stdout.write(`\x1b]777;notify;${safeTitle};${safeBody}\x07`); } -export function buildCmuxStatusLabel(state: GSDState): string { +export function buildCmuxStatusLabel(state: SFState): string { const parts: string[] = []; if (state.activeMilestone) parts.push(state.activeMilestone.id); if (state.activeSlice) parts.push(state.activeSlice.id); @@ -125,7 +125,7 @@ export function buildCmuxStatusLabel(state: GSDState): string { return `${parts.join(" ")} · ${state.phase}`; } -export function buildCmuxProgress(state: GSDState): CmuxSidebarProgress | null { +export function buildCmuxProgress(state: SFState): CmuxSidebarProgress | null { const progress = state.progress; if (!progress) return null; @@ -171,7 +171,7 @@ export class CmuxClient { this.config = config; } - static fromPreferences(preferences: GSDPreferences | undefined): CmuxClient { + static fromPreferences(preferences: SFPreferences | undefined): CmuxClient { return new CmuxClient(resolveCmuxConfig(preferences)); } @@ -270,7 +270,7 @@ export class CmuxClient { ])); } - log(message: string, level: CmuxLogLevel = "info", source = "gsd"): void { + log(message: string, level: CmuxLogLevel = "info", source = "sf"): void { if (!this.config.sidebar) return; this.runSync(this.appendWorkspace([ "log", @@ -319,13 +319,13 @@ export class CmuxClient { /** * Create a grid of surfaces for parallel agent execution. * - * Layout strategy (gsd stays in the original surface): - * 1 agent: [gsd | A] - * 2 agents: [gsd | A] + * Layout strategy (sf stays in the original surface): + * 1 agent: [sf | A] + * 2 agents: [sf | A] * [ | B] - * 3 agents: [gsd | A] + * 3 agents: [sf | A] * [ C | B] - * 4 agents: [gsd | A] + * 4 agents: [sf | A] * [ C | B] (D splits from B downward) * [ | D] * @@ -335,7 +335,7 @@ export class CmuxClient { if (!this.config.splits || count <= 0) return []; const surfaces: string[] = []; - // First split: create right column from the gsd surface + // First split: create right column from the sf surface const rightCol = await this.createSplitFrom(this.config.surfaceId, "right"); if (!rightCol) return []; surfaces.push(rightCol); @@ -347,7 +347,7 @@ export class CmuxClient { surfaces.push(bottomRight); if (count === 2) return surfaces; - // Third split: split gsd surface down → bottom-left + // Third split: split sf surface down → bottom-left const bottomLeft = await this.createSplitFrom(this.config.surfaceId, "down"); if (!bottomLeft) return surfaces; surfaces.push(bottomLeft); @@ -372,7 +372,7 @@ export class CmuxClient { } } -export function syncCmuxSidebar(preferences: GSDPreferences | undefined, state: GSDState): void { +export function syncCmuxSidebar(preferences: SFPreferences | undefined, state: SFState): void { const client = CmuxClient.fromPreferences(preferences); const config = client.getConfig(); if (!config.sidebar) return; @@ -388,7 +388,7 @@ export function syncCmuxSidebar(preferences: GSDPreferences | undefined, state: lastSidebarSnapshots.set(key, snapshot); } -export function clearCmuxSidebar(preferences: GSDPreferences | undefined): void { +export function clearCmuxSidebar(preferences: SFPreferences | undefined): void { const config = resolveCmuxConfig(preferences); if (!config.available || !config.cliAvailable) return; const client = new CmuxClient({ ...config, enabled: true, sidebar: true }); @@ -399,7 +399,7 @@ export function clearCmuxSidebar(preferences: GSDPreferences | undefined): void } export function logCmuxEvent( - preferences: GSDPreferences | undefined, + preferences: SFPreferences | undefined, message: string, level: CmuxLogLevel = "info", ): void { diff --git a/src/resources/extensions/genai-proxy/extension-manifest.json b/src/resources/extensions/genai-proxy/extension-manifest.json new file mode 100644 index 000000000..bb207e6b6 --- /dev/null +++ b/src/resources/extensions/genai-proxy/extension-manifest.json @@ -0,0 +1,13 @@ +{ + "id": "genai-proxy", + "name": "GenAI Proxy", + "version": "1.0.0", + "description": "Exposes SF's AI engine as a standard Google GenAI / OpenAI compatible endpoint.", + "tier": "community", + "requires": { + "platform": "all" + }, + "provides": { + "commands": ["/genai-proxy"] + } +} diff --git a/src/resources/extensions/genai-proxy/index.ts b/src/resources/extensions/genai-proxy/index.ts new file mode 100644 index 000000000..4eea34403 --- /dev/null +++ b/src/resources/extensions/genai-proxy/index.ts @@ -0,0 +1,14 @@ +import type { ExtensionAPI } from "@sf-run/pi-coding-agent"; +import { registerProxyCommands } from "./proxy-command.js"; + +/** + * GenAI Proxy Extension + * + * Exposes Singularity Forge's AI engine (pi-ai) as a standard Google GenAI + * compatible endpoint. This allows you to use your OAuth-authenticated + * Google models with any tool or SDK. + */ +export default function genaiProxy(pi: ExtensionAPI) { + // Register /genai-proxy commands + registerProxyCommands(pi); +} diff --git a/src/resources/extensions/genai-proxy/proxy-command.ts b/src/resources/extensions/genai-proxy/proxy-command.ts new file mode 100644 index 000000000..15e382bbc --- /dev/null +++ b/src/resources/extensions/genai-proxy/proxy-command.ts @@ -0,0 +1,50 @@ +import type { ExtensionAPI } from "@sf-run/pi-coding-agent"; +import * as server from "./proxy-server.js"; + +export function registerProxyCommands(pi: ExtensionAPI): void { + pi.registerCommand("genai-proxy", { + description: "Manage GenAI Proxy server — start | stop | status", + async handler(args, ctx) { + const parts = (args ?? "").trim().split(/\s+/); + const subcommand = parts[0] || "status"; + + switch (subcommand) { + case "start": + const port = parseInt(parts[1], 10) || 3000; + if (server.isRunning()) { + ctx.ui.notify("GenAI Proxy is already running.", "info"); + return; + } + await server.startProxy(port, (msg) => { + if (ctx.hasUI) { + ctx.ui.notify(msg, "info"); + } else { + process.stderr.write(`[genai-proxy] ${msg}\n`); + } + }); + ctx.ui.notify(`GenAI Proxy started on port ${port}`, "success"); + break; + + case "stop": + if (!server.isRunning()) { + ctx.ui.notify("GenAI Proxy is not running.", "warning"); + return; + } + server.stopProxy(); + ctx.ui.notify("GenAI Proxy stopped.", "success"); + break; + + case "status": + if (server.isRunning()) { + ctx.ui.notify("GenAI Proxy is running.", "info"); + } else { + ctx.ui.notify("GenAI Proxy is not running.", "info"); + } + break; + + default: + ctx.ui.notify("Usage: /genai-proxy start [port] | stop | status", "warning"); + } + }, + }); +} diff --git a/src/resources/extensions/genai-proxy/proxy-server.ts b/src/resources/extensions/genai-proxy/proxy-server.ts new file mode 100644 index 000000000..eb323d2ac --- /dev/null +++ b/src/resources/extensions/genai-proxy/proxy-server.ts @@ -0,0 +1,282 @@ +import express from "express"; +import type { Server } from "http"; +import { + streamGoogleGeminiCli, + type Context, + type GoogleGeminiCliOptions, + type Message, + type Model, + getModels, +} from "@sf-run/pi-ai"; + +let server: Server | null = null; +let oauth: { token: string; projectId: string } | null = null; + +type GoogleGeminiCliModel = Model<"google-gemini-cli">; +type JsonRecord = Record<string, unknown>; +type GooglePart = { text?: string }; +type GoogleContent = { role?: string; parts?: GooglePart[] }; +type OpenAiMessage = { + role?: string; + content?: string | Array<{ type?: string; text?: string }>; +}; + +function buildGeminiCliModel(modelId: string): GoogleGeminiCliModel { + return { + id: modelId, + api: "google-gemini-cli", + provider: "google", + name: modelId, + baseUrl: "", + envVar: "", + input: "text", + reasoning: false, + promptCache: false, + maxOutputTokens: 0, + } as unknown as GoogleGeminiCliModel; +} + +function normalizeGoogleContents(contents: unknown): Message[] { + if (!Array.isArray(contents)) return []; + return contents.map((content) => { + const entry = content as GoogleContent; + const role = entry.role === "user" ? "user" : "assistant"; + const text = Array.isArray(entry.parts) + ? entry.parts.map((part) => part.text ?? "").join("") + : ""; + return { + role, + content: [{ type: "text", text }], + } as Message; + }); +} + +function normalizeOpenAiMessages(messages: unknown): { + systemPrompt: string | undefined; + messages: Message[]; +} { + if (!Array.isArray(messages)) return { systemPrompt: undefined, messages: [] }; + + const typedMessages = messages as OpenAiMessage[]; + const systemMessage = typedMessages.find((message) => message.role === "system"); + const nonSystemMessages = typedMessages.filter((message) => message.role !== "system"); + + return { + systemPrompt: typeof systemMessage?.content === "string" ? systemMessage.content : undefined, + messages: nonSystemMessages.map((message) => { + const text = typeof message.content === "string" + ? message.content + : Array.isArray(message.content) + ? message.content.map((part) => part.text ?? "").join("") + : ""; + return { + role: message.role === "user" ? "user" : "assistant", + content: [{ type: "text", text }], + } as Message; + }), + }; +} + +function buildOptions( + generationConfig: JsonRecord | undefined, + oauthState: { token: string; projectId: string }, +): GoogleGeminiCliOptions { + return { + apiKey: JSON.stringify(oauthState), + temperature: typeof generationConfig?.temperature === "number" ? generationConfig.temperature : undefined, + maxTokens: typeof generationConfig?.maxOutputTokens === "number" ? generationConfig.maxOutputTokens : undefined, + }; +} + +export function isRunning(): boolean { + return server !== null; +} + +export async function startProxy(port: number, onLog: (msg: string) => void): Promise<void> { + if (server) return; + + const app = express(); + app.use(express.json()); + + app.get("/login", async (_req, res) => { + try { + const message = + "OAuth login is not available from the extension package boundary yet. " + + "Provide cached credentials through the hosting environment instead."; + onLog(message); + res.status(501).send(message); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + onLog(`Login failed: ${message}`); + res.status(500).send(message); + } + }); + + // 2. Models listing endpoints + app.get(["/v1/models", "/v1beta/models"], (req, res) => { + const providers = ["google", "google-gemini-cli", "google-vertex"] as const; + const allModels = providers.flatMap((p) => getModels(p as any)); + + const formatted = allModels.map((m) => ({ + id: m.id, + object: "model", + created: 1677610602, + owned_by: "google", + name: m.name, + capabilities: m.capabilities, + })); + + if (req.path.startsWith("/v1beta")) { + res.json({ models: formatted }); + } else { + res.json({ data: formatted, object: "list" }); + } + }); + + app.post("/v1beta/models/:modelPath", async (req, res) => { + if (!oauth) { + return res.status(401).json({ error: "Not authenticated. Visit /login first." }); + } + + const params = req.params as Record<string, string | undefined>; + const modelPath = params.modelPath ?? ""; + const modelId = modelPath.replace(/:streamGenerateContent$/, ""); + const body = req.body as JsonRecord; + const contents = body.contents; + const systemInstruction = body.systemInstruction as JsonRecord | undefined; + const generationConfig = body.generationConfig as JsonRecord | undefined; + + try { + const model = buildGeminiCliModel(modelId); + const context: Context = { + messages: normalizeGoogleContents(contents), + systemPrompt: typeof systemInstruction?.parts === "object" + ? ((systemInstruction.parts as GooglePart[] | undefined)?.[0]?.text) + : undefined, + }; + const options = buildOptions(generationConfig, oauth); + const stream = streamGoogleGeminiCli(model, context, options); + + res.setHeader("Content-Type", "application/json"); + for await (const event of stream) { + if (event.type === "text_delta") { + res.write(JSON.stringify({ + candidates: [{ content: { parts: [{ text: event.delta }] } }], + }) + "\n"); + } else if (event.type === "error") { + onLog(`Stream error: ${event.error.errorMessage}`); + if (!res.headersSent) { + res.status(500).json({ error: event.error.errorMessage }); + } + return; + } + } + res.end(); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + onLog(`Proxy error: ${message}`); + res.status(500).json({ error: message }); + } + }); + + app.post("/v1/chat/completions", async (req, res) => { + if (!oauth) { + return res.status(401).json({ error: "Not authenticated. Visit /login first." }); + } + + const body = req.body as JsonRecord; + const modelId = typeof body.model === "string" ? body.model : "gemini-2.5-flash"; + const isStreaming = body.stream === true; + const temperature = typeof body.temperature === "number" ? body.temperature : undefined; + const maxTokens = typeof body.max_tokens === "number" ? body.max_tokens : undefined; + const normalized = normalizeOpenAiMessages(body.messages); + + try { + const model = buildGeminiCliModel(modelId); + const context: Context = { + messages: normalized.messages, + systemPrompt: normalized.systemPrompt, + }; + const options: GoogleGeminiCliOptions = { + apiKey: JSON.stringify(oauth), + temperature, + maxTokens, + }; + const stream = streamGoogleGeminiCli(model, context, options); + + if (isStreaming) { + res.setHeader("Content-Type", "text/event-stream"); + res.setHeader("Cache-Control", "no-cache"); + res.setHeader("Connection", "keep-alive"); + + for await (const event of stream) { + if (event.type === "text_delta") { + const chunk = { + id: `chatcmpl-${Date.now()}`, + object: "chat.completion.chunk", + created: Math.floor(Date.now() / 1000), + model: modelId, + choices: [{ + index: 0, + delta: { content: event.delta }, + finish_reason: null, + }], + }; + res.write(`data: ${JSON.stringify(chunk)}\n\n`); + } else if (event.type === "error") { + onLog(`OpenAI stream error: ${event.error.errorMessage}`); + if (!res.headersSent) { + res.status(500).json({ error: event.error.errorMessage }); + } + return; + } + } + + res.write("data: [DONE]\n\n"); + res.end(); + return; + } + + let fullContent = ""; + for await (const event of stream) { + if (event.type === "text_delta") { + fullContent += event.delta; + } else if (event.type === "error") { + onLog(`OpenAI stream error: ${event.error.errorMessage}`); + res.status(500).json({ error: event.error.errorMessage }); + return; + } + } + + res.json({ + id: `chatcmpl-${Date.now()}`, + object: "chat.completion", + created: Math.floor(Date.now() / 1000), + model: modelId, + choices: [{ + index: 0, + message: { role: "assistant", content: fullContent }, + finish_reason: "stop", + }], + }); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + onLog(`OpenAI proxy error: ${message}`); + res.status(500).json({ error: message }); + } + }); + + await new Promise<void>((resolve) => { + server = app.listen(port, () => { + onLog(`GenAI Proxy Server running on http://localhost:${port}`); + resolve(); + }); + }); +} + +export function stopProxy(): void { + if (server) { + server.close(); + server = null; + } +} diff --git a/src/resources/extensions/get-secrets-from-user.ts b/src/resources/extensions/get-secrets-from-user.ts index dd7085d04..9687e9212 100644 --- a/src/resources/extensions/get-secrets-from-user.ts +++ b/src/resources/extensions/get-secrets-from-user.ts @@ -15,9 +15,9 @@ import { Editor, type EditorTheme, Key, matchesKey, Text, truncateToWidth, wrapT import { Type } from "@sinclair/typebox"; import { makeUI } from "./shared/tui.js"; import { maskEditorLine, type ProgressStatus } from "./shared/mod.js"; -import { parseSecretsManifest, formatSecretsManifest } from "./gsd/files.js"; -import { resolveMilestoneFile } from "./gsd/paths.js"; -import type { SecretsManifestEntry } from "./gsd/types.js"; +import { parseSecretsManifest, formatSecretsManifest } from "./sf/files.js"; +import { resolveMilestoneFile } from "./sf/paths.js"; +import type { SecretsManifestEntry } from "./sf/types.js"; // ─── Types ──────────────────────────────────────────────────────────────────── @@ -88,7 +88,7 @@ async function writeEnvKey(filePath: string, key: string, value: string): Promis // Re-export from env-utils.ts so existing consumers still work. // The implementation lives in env-utils.ts to avoid pulling @sf-run/pi-tui // into modules that only need env-checking (e.g. files.ts during reports). -import { checkExistingEnvKeys } from "./gsd/env-utils.js"; +import { checkExistingEnvKeys } from "./sf/env-utils.js"; export { checkExistingEnvKeys }; /** diff --git a/src/resources/extensions/github-sync/mapping.ts b/src/resources/extensions/github-sync/mapping.ts index 594785f87..647192412 100644 --- a/src/resources/extensions/github-sync/mapping.ts +++ b/src/resources/extensions/github-sync/mapping.ts @@ -1,20 +1,20 @@ /** * Persistence layer for the GitHub sync mapping. * - * The mapping lives at `.gsd/github-sync.json` and tracks which SF + * The mapping lives at `.sf/github-sync.json` and tracks which SF * entities have been synced to which GitHub entities (issues, PRs, * milestones) along with their numbers and sync timestamps. */ import { existsSync, readFileSync } from "node:fs"; import { join } from "node:path"; -import { atomicWriteSync } from "../gsd/atomic-write.js"; +import { atomicWriteSync } from "../sf/atomic-write.js"; import type { SyncMapping, MilestoneSyncRecord, SliceSyncRecord, SyncEntityRecord } from "./types.js"; const MAPPING_FILENAME = "github-sync.json"; function mappingPath(basePath: string): string { - return join(basePath, ".gsd", MAPPING_FILENAME); + return join(basePath, ".sf", MAPPING_FILENAME); } // ─── Load / Save ──────────────────────────────────────────────────────────── diff --git a/src/resources/extensions/github-sync/sync.ts b/src/resources/extensions/github-sync/sync.ts index ecf676215..3384a91b2 100644 --- a/src/resources/extensions/github-sync/sync.ts +++ b/src/resources/extensions/github-sync/sync.ts @@ -10,15 +10,15 @@ import { existsSync, readdirSync } from "node:fs"; import { join } from "node:path"; -import { loadFile, parseSummary } from "../gsd/files.js"; -import { parseRoadmap, parsePlan } from "../gsd/parsers-legacy.js"; +import { loadFile, parseSummary } from "../sf/files.js"; +import { parseRoadmap, parsePlan } from "../sf/parsers-legacy.js"; import { resolveMilestoneFile, resolveSliceFile, resolveTaskFile, -} from "../gsd/paths.js"; -import { debugLog } from "../gsd/debug-logger.js"; -import { loadEffectiveGSDPreferences } from "../gsd/preferences.js"; +} from "../sf/paths.js"; +import { debugLog } from "../sf/debug-logger.js"; +import { loadEffectiveSFPreferences } from "../sf/preferences.js"; import type { GitHubSyncConfig, SyncMapping } from "./types.js"; import { @@ -442,7 +442,7 @@ async function syncMilestoneComplete( // ─── Bootstrap ────────────────────────────────────────────────────────────── /** - * Walk the `.gsd/milestones/` tree and create GitHub entities for any + * Walk the `.sf/milestones/` tree and create GitHub entities for any * that are missing from the sync mapping. Safe to run multiple times. */ export async function bootstrapSync(basePath: string): Promise<{ @@ -462,7 +462,7 @@ export async function bootstrapSync(basePath: string): Promise<{ const taskCountBefore = Object.keys(mapping.tasks).length; const counts = { milestones: 0, slices: 0, tasks: 0 }; - const milestonesDir = join(basePath, ".gsd", "milestones"); + const milestonesDir = join(basePath, ".sf", "milestones"); if (!existsSync(milestonesDir)) return counts; const milestoneIds = readdirSync(milestonesDir, { withFileTypes: true }) @@ -505,7 +505,7 @@ let _cachedConfig: GitHubSyncConfig | null | undefined; function loadGitHubSyncConfig(_basePath: string): GitHubSyncConfig | null { if (_cachedConfig !== undefined) return _cachedConfig; try { - const prefs = loadEffectiveGSDPreferences(); + const prefs = loadEffectiveSFPreferences(); const github = (prefs?.preferences as Record<string, unknown>)?.github; if (!github || typeof github !== "object") { _cachedConfig = null; diff --git a/src/resources/extensions/github-sync/tests/commit-linking.test.ts b/src/resources/extensions/github-sync/tests/commit-linking.test.ts index 14401d1ae..1c8b75927 100644 --- a/src/resources/extensions/github-sync/tests/commit-linking.test.ts +++ b/src/resources/extensions/github-sync/tests/commit-linking.test.ts @@ -1,6 +1,6 @@ import { describe, it } from "node:test"; import assert from "node:assert/strict"; -import { buildTaskCommitMessage } from "../../gsd/git-service.ts"; +import { buildTaskCommitMessage } from "../../sf/git-service.ts"; describe("commit linking", () => { it("appends Resolves #N when issueNumber is set", () => { diff --git a/src/resources/extensions/github-sync/tests/mapping.test.ts b/src/resources/extensions/github-sync/tests/mapping.test.ts index cb467aeaa..9d5325173 100644 --- a/src/resources/extensions/github-sync/tests/mapping.test.ts +++ b/src/resources/extensions/github-sync/tests/mapping.test.ts @@ -21,8 +21,8 @@ describe("mapping", () => { let tmpDir: string; beforeEach(() => { - tmpDir = mkdtempSync(join(tmpdir(), "gsd-sync-test-")); - mkdirSync(join(tmpDir, ".gsd"), { recursive: true }); + tmpDir = mkdtempSync(join(tmpdir(), "sf-sync-test-")); + mkdirSync(join(tmpDir, ".sf"), { recursive: true }); }); afterEach(() => { diff --git a/src/resources/extensions/github-sync/types.ts b/src/resources/extensions/github-sync/types.ts index fe541daa1..eaaf00ee6 100644 --- a/src/resources/extensions/github-sync/types.ts +++ b/src/resources/extensions/github-sync/types.ts @@ -2,7 +2,7 @@ * Type definitions for the GitHub Sync extension. * * Config shape (stored in SF preferences under `github` key) and - * sync mapping records (stored in `.gsd/github-sync.json`). + * sync mapping records (stored in `.sf/github-sync.json`). */ // ─── Configuration ────────────────────────────────────────────────────────── diff --git a/src/resources/extensions/mcp-client/auth.ts b/src/resources/extensions/mcp-client/auth.ts index 52a3f86c8..dee585d77 100644 --- a/src/resources/extensions/mcp-client/auth.ts +++ b/src/resources/extensions/mcp-client/auth.ts @@ -75,7 +75,7 @@ function createCliOAuthProvider(config: NonNullable<McpHttpOAuthConfig["oauth"]> get clientMetadata() { return { redirect_uris: [config.redirectUrl ?? "http://localhost:0/callback"], - client_name: "gsd", + client_name: "sf", ...(config.scopes ? { scope: config.scopes.join(" ") } : {}), }; }, diff --git a/src/resources/extensions/mcp-client/index.ts b/src/resources/extensions/mcp-client/index.ts index dce11cc8d..f995e7563 100644 --- a/src/resources/extensions/mcp-client/index.ts +++ b/src/resources/extensions/mcp-client/index.ts @@ -2,7 +2,7 @@ * MCP Client Extension — Native MCP server integration for pi * * Provides on-demand access to MCP servers configured in project files - * (.mcp.json, .gsd/mcp.json) using the @modelcontextprotocol/sdk Client + * (.mcp.json, .sf/mcp.json) using the @modelcontextprotocol/sdk Client * directly — no external CLI dependency required. * * Three tools: @@ -68,7 +68,7 @@ function readConfigs(): McpServerConfig[] { const seen = new Set<string>(); const configPaths = [ join(process.cwd(), ".mcp.json"), - join(process.cwd(), ".gsd", "mcp.json"), + join(process.cwd(), ".sf", "mcp.json"), ]; for (const configPath of configPaths) { @@ -154,7 +154,7 @@ async function getOrConnect(name: string, signal?: AbortSignal): Promise<Client> const existing = connections.get(config.name); if (existing) return existing.client; - const client = new Client({ name: "gsd", version: "1.0.0" }); + const client = new Client({ name: "sf", version: "1.0.0" }); let transport: StdioClientTransport | StreamableHTTPClientTransport; if (config.transport === "stdio" && config.command) { @@ -200,7 +200,7 @@ async function closeAll(): Promise<void> { // ─── Formatters ─────────────────────────────────────────────────────────────── function formatServerList(servers: McpServerConfig[]): string { - if (servers.length === 0) return "No MCP servers configured. Add servers to .mcp.json or .gsd/mcp.json."; + if (servers.length === 0) return "No MCP servers configured. Add servers to .mcp.json or .sf/mcp.json."; const lines: string[] = [`${servers.length} MCP servers configured:\n`]; @@ -234,7 +234,7 @@ function formatToolList(serverName: string, tools: McpToolSchema[]): string { return lines.join("\n"); } -// ─── Status helper (consumed by /gsd mcp) ───────────────────────────────────── +// ─── Status helper (consumed by /sf mcp) ───────────────────────────────────── /** * Return the live connection status for a named MCP server. @@ -263,7 +263,7 @@ export default function (pi: ExtensionAPI) { name: "mcp_servers", label: "MCP Servers", description: - "List all available MCP servers configured in project files (.mcp.json, .gsd/mcp.json). " + + "List all available MCP servers configured in project files (.mcp.json, .sf/mcp.json). " + "Shows server names, transport type, and connection status. Use mcp_discover to get full tool schemas for a server.", promptSnippet: "List available MCP servers from project configuration", diff --git a/src/resources/extensions/remote-questions/config.ts b/src/resources/extensions/remote-questions/config.ts index 404cdc868..55cd16fb9 100644 --- a/src/resources/extensions/remote-questions/config.ts +++ b/src/resources/extensions/remote-questions/config.ts @@ -3,7 +3,7 @@ */ import { AuthStorage } from "@sf-run/pi-coding-agent"; -import { loadEffectiveGSDPreferences, type RemoteQuestionsConfig } from "../gsd/preferences.js"; +import { loadEffectiveSFPreferences, type RemoteQuestionsConfig } from "../sf/preferences.js"; import type { RemoteChannel } from "./types.js"; export interface ResolvedConfig { @@ -44,7 +44,7 @@ const AUTH_PROVIDER_ENV_MAP: Record<string, string> = { /** * Populate remote channel env vars from auth.json when they are not already * set in the environment. Called before every config resolution so that tokens - * saved via `/gsd remote discord` (or `/gsd keys add discord_bot`) survive + * saved via `/sf remote discord` (or `/sf keys add discord_bot`) survive * process restarts without requiring the user to export env vars manually. * * Silently no-ops if auth.json is absent, unreadable, or malformed. @@ -76,7 +76,7 @@ function hydrateRemoteTokensFromAuth(): void { export function resolveRemoteConfig(): ResolvedConfig | null { hydrateRemoteTokensFromAuth(); - const prefs = loadEffectiveGSDPreferences(); + const prefs = loadEffectiveSFPreferences(); const rq: RemoteQuestionsConfig | undefined = prefs?.preferences.remote_questions; if (!rq || !rq.channel || !rq.channel_id) return null; if (rq.channel !== "slack" && rq.channel !== "discord" && rq.channel !== "telegram") return null; @@ -101,7 +101,7 @@ export function resolveRemoteConfig(): ResolvedConfig | null { export function getRemoteConfigStatus(): string { hydrateRemoteTokensFromAuth(); - const prefs = loadEffectiveGSDPreferences(); + const prefs = loadEffectiveSFPreferences(); const rq: RemoteQuestionsConfig | undefined = prefs?.preferences.remote_questions; if (!rq || !rq.channel || !rq.channel_id) return "Remote questions: not configured"; if (rq.channel !== "slack" && rq.channel !== "discord" && rq.channel !== "telegram") return `Remote questions: unknown channel type \"${rq.channel}\"`; diff --git a/src/resources/extensions/remote-questions/remote-command.ts b/src/resources/extensions/remote-questions/remote-command.ts index 6f8a0415b..827fc4e3e 100644 --- a/src/resources/extensions/remote-questions/remote-command.ts +++ b/src/resources/extensions/remote-questions/remote-command.ts @@ -1,5 +1,5 @@ /** - * Remote Questions — /gsd remote command + * Remote Questions — /sf remote command */ import type { ExtensionAPI, ExtensionCommandContext } from "@sf-run/pi-coding-agent"; @@ -7,7 +7,7 @@ import { AuthStorage } from "@sf-run/pi-coding-agent"; import { Editor, type EditorTheme, Key, matchesKey, truncateToWidth } from "@sf-run/pi-tui"; import { existsSync, readFileSync, writeFileSync, mkdirSync } from "node:fs"; import { dirname, join } from "node:path"; -import { getGlobalGSDPreferencesPath, loadEffectiveGSDPreferences } from "../gsd/preferences.js"; +import { getGlobalSFPreferencesPath, loadEffectiveSFPreferences } from "../sf/preferences.js"; import { getRemoteConfigStatus, isValidChannelId, resolveRemoteConfig } from "./config.js"; import { maskEditorLine, sanitizeError } from "../shared/mod.js"; import { getLatestPromptSummary } from "./status.js"; @@ -202,7 +202,7 @@ async function handleRemoteStatus(ctx: ExtensionCommandContext): Promise<void> { } async function handleDisconnect(ctx: ExtensionCommandContext): Promise<void> { - const prefs = loadEffectiveGSDPreferences(); + const prefs = loadEffectiveSFPreferences(); const channel = prefs?.preferences.remote_questions?.channel; if (!channel) return void ctx.ui.notify("No remote channel configured — nothing to disconnect.", "info"); @@ -225,20 +225,20 @@ async function handleRemoteMenu(ctx: ExtensionCommandContext): Promise<void> { latestPrompt ? ` Last prompt: ${latestPrompt.id} (${latestPrompt.status})` : " No remote prompts recorded yet", "", "Commands:", - " /gsd remote status", - " /gsd remote disconnect", - " /gsd remote slack", - " /gsd remote discord", - " /gsd remote telegram", + " /sf remote status", + " /sf remote disconnect", + " /sf remote slack", + " /sf remote discord", + " /sf remote telegram", ] : [ "No remote question channel configured.", "", "Commands:", - " /gsd remote slack", - " /gsd remote discord", - " /gsd remote telegram", - " /gsd remote status", + " /sf remote slack", + " /sf remote discord", + " /sf remote telegram", + " /sf remote status", ]; ctx.ui.notify(lines.join("\n"), "info"); @@ -300,7 +300,7 @@ async function promptSlackChannelId(ctx: ExtensionCommandContext): Promise<strin } function getAuthStorage(): AuthStorage { - const authPath = join(process.env.HOME ?? "", ".gsd", "agent", "auth.json"); + const authPath = join(process.env.HOME ?? "", ".sf", "agent", "auth.json"); mkdirSync(dirname(authPath), { recursive: true }); return AuthStorage.create(authPath); } @@ -316,7 +316,7 @@ function removeProviderToken(provider: string): void { } export function saveRemoteQuestionsConfig(channel: "slack" | "discord" | "telegram", channelId: string): void { - const prefsPath = getGlobalGSDPreferencesPath(); + const prefsPath = getGlobalSFPreferencesPath(); const block = [ "remote_questions:", ` channel: ${channel}`, @@ -343,7 +343,7 @@ export function saveRemoteQuestionsConfig(channel: "slack" | "discord" | "telegr } function removeRemoteQuestionsConfig(): void { - const prefsPath = getGlobalGSDPreferencesPath(); + const prefsPath = getGlobalSFPreferencesPath(); if (!existsSync(prefsPath)) return; const content = readFileSync(prefsPath, "utf-8"); const fmMatch = content.match(/^---\n([\s\S]*?)\n---/); diff --git a/src/resources/extensions/remote-questions/status.ts b/src/resources/extensions/remote-questions/status.ts index d9437cfad..704a2ec88 100644 --- a/src/resources/extensions/remote-questions/status.ts +++ b/src/resources/extensions/remote-questions/status.ts @@ -8,7 +8,7 @@ import { homedir } from "node:os"; import { readPromptRecord } from "./store.js"; function getGsdHome(): string { - return process.env.SF_HOME || join(homedir(), ".gsd"); + return process.env.SF_HOME || join(homedir(), ".sf"); } export interface LatestPromptSummary { diff --git a/src/resources/extensions/remote-questions/store.ts b/src/resources/extensions/remote-questions/store.ts index f71af1b2d..ac401528d 100644 --- a/src/resources/extensions/remote-questions/store.ts +++ b/src/resources/extensions/remote-questions/store.ts @@ -8,7 +8,7 @@ import { homedir } from "node:os"; import type { RemotePrompt, RemotePromptRecord, RemotePromptRef, RemoteAnswer, RemotePromptStatus } from "./types.js"; function getGsdHome(): string { - return process.env.SF_HOME || join(homedir(), ".gsd"); + return process.env.SF_HOME || join(homedir(), ".sf"); } function runtimeDir(): string { diff --git a/src/resources/extensions/search-the-web/native-search.ts b/src/resources/extensions/search-the-web/native-search.ts index 81bf5169a..709065306 100644 --- a/src/resources/extensions/search-the-web/native-search.ts +++ b/src/resources/extensions/search-the-web/native-search.ts @@ -5,7 +5,7 @@ * the heavy tool-registration modules. */ -import { resolveSearchProviderFromPreferences } from "../gsd/preferences.js"; +import { resolveSearchProviderFromPreferences } from "../sf/preferences.js"; /** Tool names for the Brave-backed custom search tools */ export const BRAVE_TOOL_NAMES = ["search-the-web", "search_and_read"]; diff --git a/src/resources/extensions/search-the-web/provider.ts b/src/resources/extensions/search-the-web/provider.ts index 18251da1f..f1d0416a3 100644 --- a/src/resources/extensions/search-the-web/provider.ts +++ b/src/resources/extensions/search-the-web/provider.ts @@ -13,12 +13,12 @@ import { AuthStorage } from '@sf-run/pi-coding-agent' import { homedir } from 'os' import { join } from 'path' -import { resolveSearchProviderFromPreferences } from '../gsd/preferences.js' +import { resolveSearchProviderFromPreferences } from '../sf/preferences.js' // Compute authFilePath locally instead of importing from app-paths.ts, -// because extensions are copied to ~/.gsd/agent/extensions/ at runtime +// because extensions are copied to ~/.sf/agent/extensions/ at runtime // where the relative import '../../../app-paths.ts' doesn't resolve. -const gsdHome = process.env.SF_HOME || join(homedir(), '.gsd') +const gsdHome = process.env.SF_HOME || join(homedir(), '.sf') const authFilePath = join(gsdHome, 'agent', 'auth.json') export type SearchProvider = 'tavily' | 'brave' | 'ollama' | 'combosearch' diff --git a/src/resources/extensions/sf/activity-log.ts b/src/resources/extensions/sf/activity-log.ts index efcbcf0c4..a2f7615ff 100644 --- a/src/resources/extensions/sf/activity-log.ts +++ b/src/resources/extensions/sf/activity-log.ts @@ -1,5 +1,5 @@ /** - * SF Activity Log — Save raw chat sessions to .gsd/activity/ + * SF Activity Log — Save raw chat sessions to .sf/activity/ * * Before each context wipe in auto-mode, dumps the full session * as JSONL. No formatting, no truncation, no information loss. diff --git a/src/resources/extensions/sf/auto-artifact-paths.ts b/src/resources/extensions/sf/auto-artifact-paths.ts index bfc61940f..0d96bc968 100644 --- a/src/resources/extensions/sf/auto-artifact-paths.ts +++ b/src/resources/extensions/sf/auto-artifact-paths.ts @@ -120,7 +120,7 @@ export function diagnoseExpectedArtifact( case "replan-slice": return `${relSliceFile(base, mid, sid!, "REPLAN")} + updated ${relSliceFile(base, mid, sid!, "PLAN")}`; case "rewrite-docs": - return "Active overrides resolved in .gsd/OVERRIDES.md + plan documents updated"; + return "Active overrides resolved in .sf/OVERRIDES.md + plan documents updated"; case "reassess-roadmap": return `${relSliceFile(base, mid, sid!, "ASSESSMENT")} (roadmap reassessment)`; case "run-uat": diff --git a/src/resources/extensions/sf/auto-dispatch.ts b/src/resources/extensions/sf/auto-dispatch.ts index 433f4ea5c..075aaa25b 100644 --- a/src/resources/extensions/sf/auto-dispatch.ts +++ b/src/resources/extensions/sf/auto-dispatch.ts @@ -785,13 +785,13 @@ export const DISPATCH_RULES: DispatchRule[] = [ } // Safety guard (#1703): verify the milestone produced implementation - // artifacts (non-.gsd/ files). A milestone with only plan files and + // artifacts (non-.sf/ files). A milestone with only plan files and // zero implementation code should not be marked complete. const artifactCheck = hasImplementationArtifacts(basePath); if (artifactCheck === "absent") { return { action: "stop", - reason: `Cannot complete milestone ${mid}: no implementation files found outside .gsd/. The milestone has only plan files — actual code changes are required.`, + reason: `Cannot complete milestone ${mid}: no implementation files found outside .sf/. The milestone has only plan files — actual code changes are required.`, level: "error", }; } diff --git a/src/resources/extensions/sf/auto-post-unit.ts b/src/resources/extensions/sf/auto-post-unit.ts index 19c7aef0a..f2de6e145 100644 --- a/src/resources/extensions/sf/auto-post-unit.ts +++ b/src/resources/extensions/sf/auto-post-unit.ts @@ -94,7 +94,7 @@ function enqueueSidecar( if (notification) ctx.ui.notify(notification, "info"); return "continue"; } -/** Unit types that only touch `.gsd/` internal state files (no code changes). +/** Unit types that only touch `.sf/` internal state files (no code changes). * Auto-commit is skipped for these — their state files are picked up by the * next actual task commit via `smartStage()`. */ const LIFECYCLE_ONLY_UNITS = new Set([ diff --git a/src/resources/extensions/sf/auto-prompts.ts b/src/resources/extensions/sf/auto-prompts.ts index ecc2f3c11..96333911c 100644 --- a/src/resources/extensions/sf/auto-prompts.ts +++ b/src/resources/extensions/sf/auto-prompts.ts @@ -252,7 +252,7 @@ export async function inlineDependencySummaries( } /** - * Load a well-known .gsd/ root file for optional inlining. + * Load a well-known .sf/ root file for optional inlining. * Handles the existsSync check internally. */ export async function inlineGsdRootFile( @@ -297,7 +297,7 @@ export async function inlineDecisionsFromDb( const formatted = inlineLevel !== "full" ? formatDecisionsCompact(decisions) : formatDecisionsForPrompt(decisions); - return `### Decisions\nSource: \`.gsd/DECISIONS.md\`\n\n${formatted}`; + return `### Decisions\nSource: \`.sf/DECISIONS.md\`\n\n${formatted}`; } // DB available but cascade returned empty — intentional per D020, don't fall back to file return null; @@ -327,7 +327,7 @@ export async function inlineRequirementsFromDb( const formatted = inlineLevel !== "full" ? formatRequirementsCompact(requirements) : formatRequirementsForPrompt(requirements); - return `### Requirements\nSource: \`.gsd/REQUIREMENTS.md\`\n\n${formatted}`; + return `### Requirements\nSource: \`.sf/REQUIREMENTS.md\`\n\n${formatted}`; } } } catch (err) { @@ -349,7 +349,7 @@ export async function inlineProjectFromDb( const { queryProject } = await import("./context-store.js"); const content = queryProject(); if (content) { - return `### Project\nSource: \`.gsd/PROJECT.md\`\n\n${content}`; + return `### Project\nSource: \`.sf/PROJECT.md\`\n\n${content}`; } } } catch (err) { @@ -1005,7 +1005,7 @@ export async function buildDiscussMilestonePrompt(mid: string, midTitle: string, milestoneTitle: midTitle, inlinedTemplates: discussTemplates, structuredQuestionsAvailable: "false", - commitInstruction: "Do not commit planning artifacts — .gsd/ is managed externally.", + commitInstruction: "Do not commit planning artifacts — .sf/ is managed externally.", fastPathInstruction: "", }); @@ -1275,7 +1275,7 @@ export async function buildPlanSlicePrompt( const executorContextConstraints = formatExecutorConstraints(); const outputRelPath = relSliceFile(base, mid, sid, "PLAN"); - const commitInstruction = "Do not commit — .gsd/ planning docs are managed externally and not tracked in git."; + const commitInstruction = "Do not commit — .sf/ planning docs are managed externally and not tracked in git."; return loadPrompt("plan-slice", { workingDirectory: base, milestoneId: mid, sliceId: sid, sliceTitle: sTitle, @@ -1409,7 +1409,7 @@ export async function buildExecuteTaskPrompt( const runtimePath = resolveRuntimeFile(base); const runtimeContent = existsSync(runtimePath) ? await loadFile(runtimePath) : null; const runtimeContext = runtimeContent - ? `### Runtime Context\nSource: \`.gsd/RUNTIME.md\`\n\n${runtimeContent.trim()}` + ? `### Runtime Context\nSource: \`.sf/RUNTIME.md\`\n\n${runtimeContent.trim()}` : ""; const phaseAnchorSection = planAnchor ? formatAnchorForPrompt(planAnchor) : ""; @@ -1913,7 +1913,7 @@ export async function buildReassessRoadmapPrompt( logWarning("prompt", `loadDeferredCaptures failed: ${err instanceof Error ? err.message : String(err)}`); } - const reassessCommitInstruction = "Do not commit — .gsd/ planning docs are managed externally and not tracked in git."; + const reassessCommitInstruction = "Do not commit — .sf/ planning docs are managed externally and not tracked in git."; return loadPrompt("reassess-roadmap", { workingDirectory: base, diff --git a/src/resources/extensions/sf/auto-recovery.ts b/src/resources/extensions/sf/auto-recovery.ts index e9b09b7eb..6879782fd 100644 --- a/src/resources/extensions/sf/auto-recovery.ts +++ b/src/resources/extensions/sf/auto-recovery.ts @@ -57,11 +57,11 @@ export { resolveExpectedArtifactPath, diagnoseExpectedArtifact }; // ─── Artifact Resolution & Verification ─────────────────────────────────────── /** - * Check whether a milestone produced implementation artifacts (non-`.gsd/` files) + * Check whether a milestone produced implementation artifacts (non-`.sf/` files) * in the git history. Uses `git log --name-only` to inspect all commits on the - * current branch that touch files outside `.gsd/`. + * current branch that touch files outside `.sf/`. * - * Returns "present" if implementation files found, "absent" if only .gsd/ files, + * Returns "present" if implementation files found, "absent" if only .sf/ files, * "unknown" if git is unavailable or check failed (callers decide how to handle). */ export function hasImplementationArtifacts(basePath: string): "present" | "absent" | "unknown" { @@ -89,10 +89,10 @@ export function hasImplementationArtifacts(basePath: string): "present" | "absen // commit repo, or other edge case where git diff returns nothing). if (changedFiles.length === 0) return "unknown"; - // Filter out .gsd/ files — only implementation files count. - // If every changed file is under .gsd/, the milestone produced no + // Filter out .sf/ files — only implementation files count. + // If every changed file is under .sf/, the milestone produced no // implementation code (#1703). - const implFiles = changedFiles.filter(f => !f.startsWith(".gsd/") && !f.startsWith(".gsd\\")); + const implFiles = changedFiles.filter(f => !f.startsWith(".sf/") && !f.startsWith(".sf\\")); return implFiles.length > 0 ? "present" : "absent"; } catch (e) { // Non-fatal — if git operations fail, return unknown so callers can decide @@ -400,7 +400,7 @@ export function verifyExpectedArtifact( } // complete-milestone must have produced implementation artifacts (#1703). - // A milestone with only .gsd/ plan files and zero implementation code is + // A milestone with only .sf/ plan files and zero implementation code is // not genuinely complete — the LLM wrote plan files but skipped actual work. if (unitType === "complete-milestone") { if (hasImplementationArtifacts(base) === "absent") return false; @@ -494,7 +494,7 @@ export type MergeReconcileResult = "clean" | "reconciled" | "blocked"; /** * Detect leftover merge state from a prior session and reconcile it. * If MERGE_HEAD or SQUASH_MSG exists, check whether conflicts are resolved. - * If resolved: finalize the commit. If only .gsd conflicts remain: auto-resolve. + * If resolved: finalize the commit. If only .sf conflicts remain: auto-resolve. * If code conflicts remain: fail safe without modifying the worktree. */ export function reconcileMergeState( @@ -524,32 +524,32 @@ export function reconcileMergeState( return "blocked"; } } else { - // Still conflicted — try auto-resolving .gsd/ state file conflicts (#530) - const sfConflicts = conflictedFiles.filter((f) => f.startsWith(".gsd/")); - const codeConflicts = conflictedFiles.filter((f) => !f.startsWith(".gsd/")); + // Still conflicted — try auto-resolving .sf/ state file conflicts (#530) + const sfConflicts = conflictedFiles.filter((f) => f.startsWith(".sf/")); + const codeConflicts = conflictedFiles.filter((f) => !f.startsWith(".sf/")); if (sfConflicts.length > 0 && codeConflicts.length === 0) { - // All conflicts are in .gsd/ state files — auto-resolve by accepting theirs + // All conflicts are in .sf/ state files — auto-resolve by accepting theirs let resolved = true; try { nativeCheckoutTheirs(basePath, sfConflicts); nativeAddPaths(basePath, sfConflicts); } catch (e) { - logError("recovery", `auto-resolve .gsd/ conflicts failed: ${(e as Error).message}`); + logError("recovery", `auto-resolve .sf/ conflicts failed: ${(e as Error).message}`); resolved = false; } if (resolved) { try { nativeCommit( basePath, - "chore: auto-resolve .gsd/ state file conflicts", + "chore: auto-resolve .sf/ state file conflicts", ); ctx.ui.notify( - `Auto-resolved ${sfConflicts.length} .gsd/ state file conflict(s) from prior merge.`, + `Auto-resolved ${sfConflicts.length} .sf/ state file conflict(s) from prior merge.`, "info", ); } catch (e) { - logError("recovery", `auto-commit .gsd/ conflict resolution failed: ${(e as Error).message}`); + logError("recovery", `auto-commit .sf/ conflict resolution failed: ${(e as Error).message}`); resolved = false; } } diff --git a/src/resources/extensions/sf/auto-start.ts b/src/resources/extensions/sf/auto-start.ts index 4f3298965..8ef35b6fc 100644 --- a/src/resources/extensions/sf/auto-start.ts +++ b/src/resources/extensions/sf/auto-start.ts @@ -212,7 +212,7 @@ export function auditOrphanedMilestoneBranches( // If the directory still exists after git worktree remove (either it // wasn't registered or the remove was a noop), fall back to direct - // filesystem removal — but only inside .gsd/worktrees/ for safety (#2365). + // filesystem removal — but only inside .sf/worktrees/ for safety (#2365). if (existsSync(wtDir)) { if (isInsideWorktreesDir(basePath, wtDir)) { try { @@ -222,7 +222,7 @@ export function auditOrphanedMilestoneBranches( warnings.push(`Failed to remove worktree directory for ${milestoneId}: ${err2 instanceof Error ? err2.message : String(err2)}`); } } else { - warnings.push(`Orphaned worktree directory for ${milestoneId} is outside .gsd/worktrees/ — skipping removal for safety.`); + warnings.push(`Orphaned worktree directory for ${milestoneId} is outside .sf/worktrees/ — skipping removal for safety.`); } } else { recovered.push(`Removed orphaned worktree directory for ${milestoneId}.`); @@ -281,7 +281,7 @@ export async function bootstrapAutoSession( // selection for subsequent /sf runs in the same session. // // Exception (#4122): when the session provider is a custom provider declared - // in ~/.gsd/agent/models.json (Ollama, vLLM, OpenAI-compatible proxy, etc.), + // in ~/.sf/agent/models.json (Ollama, vLLM, OpenAI-compatible proxy, etc.), // PREFERENCES.md is skipped entirely. PREFERENCES.md cannot reference custom // providers, so honoring it would silently reroute auto-mode to a built-in // provider the user is not logged into and surface as "Not logged in · Please @@ -336,7 +336,7 @@ export async function bootstrapAutoSession( // nativeIsRepo() uses `git rev-parse` which traverses up to parent dirs, // so a parent repo can make it return true even when base has no .git of // its own. Check for a local .git instead (defense-in-depth for the case - // where isInheritedRepo() returns a false negative, e.g. stale .gsd at + // where isInheritedRepo() returns a false negative, e.g. stale .sf at // the parent git root). See #2393 and related issue. const hasLocalGit = existsSync(join(base, ".git")); if (!hasLocalGit || isInheritedRepo(base)) { @@ -345,9 +345,9 @@ export async function bootstrapAutoSession( nativeInit(base, mainBranch); } - // Migrate legacy in-project .gsd/ to external state directory. - // Migration MUST run before ensureGitignore to avoid adding ".gsd" to - // .gitignore when .gsd/ is git-tracked (data-loss bug #1364). + // Migrate legacy in-project .sf/ to external state directory. + // Migration MUST run before ensureGitignore to avoid adding ".sf" to + // .gitignore when .sf/ is git-tracked (data-loss bug #1364). recoverFailedMigration(base); const migration = migrateToExternalState(base); if (migration.error) { @@ -357,17 +357,17 @@ export async function bootstrapAutoSession( ensureGsdSymlink(base); // Ensure .gitignore has baseline patterns. - // ensureGitignore checks for git-tracked .gsd/ files and skips the - // ".gsd" pattern if the project intentionally tracks .gsd/ in git. + // ensureGitignore checks for git-tracked .sf/ files and skips the + // ".sf" pattern if the project intentionally tracks .sf/ in git. const gitPrefs = loadEffectiveSFPreferences()?.preferences?.git; const manageGitignore = gitPrefs?.manage_gitignore; ensureGitignore(base, { manageGitignore }); if (manageGitignore !== false) untrackRuntimeFiles(base); // Bootstrap milestones/ if it doesn't exist. - // Check milestones/ directly — ensureGsdSymlink above already created .gsd/, - // so checking .gsd/ existence would be dead code (#2942). - const sfDir = join(base, ".gsd"); + // Check milestones/ directly — ensureGsdSymlink above already created .sf/, + // so checking .sf/ existence would be dead code (#2942). + const sfDir = join(base, ".sf"); const milestonesPath = join(sfDir, "milestones"); if (!existsSync(milestonesPath)) { mkdirSync(milestonesPath, { recursive: true }); @@ -471,7 +471,7 @@ export async function bootstrapAutoSession( (state.phase === "pre-planning" || state.phase === "complete") && shouldUseWorktreeIsolation() && !detectWorktreeName(base) && - !base.includes(`${pathSep}.gsd${pathSep}worktrees${pathSep}`) + !base.includes(`${pathSep}.sf${pathSep}worktrees${pathSep}`) ) { const milestoneBranch = `milestone/${state.activeMilestone.id}`; const { nativeBranchExists } = await import("./native-git-bridge.js"); @@ -698,14 +698,14 @@ export async function bootstrapAutoSession( s.originalBasePath = base; const isUnderGsdWorktrees = (p: string): boolean => { - // Direct layout: /.gsd/worktrees/ - const marker = `${pathSep}.gsd${pathSep}worktrees${pathSep}`; + // Direct layout: /.sf/worktrees/ + const marker = `${pathSep}.sf${pathSep}worktrees${pathSep}`; if (p.includes(marker)) return true; - const worktreesSuffix = `${pathSep}.gsd${pathSep}worktrees`; + const worktreesSuffix = `${pathSep}.sf${pathSep}worktrees`; if (p.endsWith(worktreesSuffix)) return true; - // Symlink-resolved layout: /.gsd/projects/<hash>/worktrees/ + // Symlink-resolved layout: /.sf/projects/<hash>/worktrees/ const symlinkRe = new RegExp( - `\\${pathSep}\\.gsd\\${pathSep}projects\\${pathSep}[a-f0-9]+\\${pathSep}worktrees(?:\\${pathSep}|$)`, + `\\${pathSep}\\.sf\\${pathSep}projects\\${pathSep}[a-f0-9]+\\${pathSep}worktrees(?:\\${pathSep}|$)`, ); return symlinkRe.test(p); }; @@ -727,7 +727,7 @@ export async function bootstrapAutoSession( // ── DB lifecycle ── const sfDbPath = resolveProjectRootDbPath(s.basePath); - const sfDirPath = join(s.basePath, ".gsd"); + const sfDirPath = join(s.basePath, ".sf"); if (existsSync(sfDirPath) && !existsSync(sfDbPath)) { const hasDecisions = existsSync(join(sfDirPath, "DECISIONS.md")); const hasRequirements = existsSync(join(sfDirPath, "REQUIREMENTS.md")); @@ -915,7 +915,7 @@ export async function bootstrapAutoSession( // Pre-flight: validate milestone queue try { - const msDir = join(base, ".gsd", "milestones"); + const msDir = join(base, ".sf", "milestones"); if (existsSync(msDir)) { const milestoneIds = readdirSync(msDir, { withFileTypes: true }) .filter((d) => d.isDirectory() && /^M\d{3}/.test(d.name)) diff --git a/src/resources/extensions/sf/auto-worktree.ts b/src/resources/extensions/sf/auto-worktree.ts index 84c125aa8..9dad84558 100644 --- a/src/resources/extensions/sf/auto-worktree.ts +++ b/src/resources/extensions/sf/auto-worktree.ts @@ -67,14 +67,14 @@ import { nativeMergeAbort, } from "./native-git-bridge.js"; -const sfHome = process.env.SF_HOME || join(homedir(), ".gsd"); +const sfHome = process.env.SF_HOME || join(homedir(), ".sf"); const PROJECT_PREFERENCES_FILE = "PREFERENCES.md"; const LEGACY_PROJECT_PREFERENCES_FILE = "preferences.md"; // ─── Shared Constants & Helpers ───────────────────────────────────────────── /** - * Root-level .gsd/ state files synced between worktree and project root. + * Root-level .sf/ state files synced between worktree and project root. * Single source of truth — used by syncSfStateToWorktree, syncWorktreeStateBack, * and the dispatch-level sync functions. */ @@ -250,9 +250,9 @@ export const SAFE_AUTO_RESOLVE_PATTERNS: RegExp[] = [ ]; /** Returns true if the file path is safe to auto-resolve during merge. - * Covers `.gsd/` state files and common build artifacts. */ + * Covers `.sf/` state files and common build artifacts. */ export const isSafeToAutoResolve = (filePath: string): boolean => - filePath.startsWith(".gsd/") || + filePath.startsWith(".sf/") || SAFE_AUTO_RESOLVE_PATTERNS.some((re) => re.test(filePath)); // ─── Dispatch-Level Sync (project root ↔ worktree) ────────────────────────── @@ -272,10 +272,10 @@ export function syncProjectRootToWorktree( if (!worktreePath_ || !projectRoot || worktreePath_ === projectRoot) return; if (!milestoneId) return; - const prGsd = join(projectRoot, ".gsd"); - const wtGsd = join(worktreePath_, ".gsd"); + const prGsd = join(projectRoot, ".sf"); + const wtGsd = join(worktreePath_, ".sf"); - // When .gsd is a symlink to the same external directory in both locations, + // When .sf is a symlink to the same external directory in both locations, // cpSync rejects the copy because source === destination (ERR_FS_CP_EINVAL). // Compare realpaths and skip when they resolve to the same physical path (#2184). if (isSamePath(prGsd, wtGsd)) return; @@ -351,7 +351,7 @@ export function syncProjectRootToWorktree( } /** - * Sync dispatch-critical .gsd/ state files from worktree to project root. + * Sync dispatch-critical .sf/ state files from worktree to project root. * Only runs when inside an auto-worktree (worktreePath differs from projectRoot). * Copies: STATE.md + active milestone directory (roadmap, slice plans, task summaries). * Non-fatal — sync failure should never block dispatch. @@ -364,10 +364,10 @@ export function syncStateToProjectRoot( if (!worktreePath_ || !projectRoot || worktreePath_ === projectRoot) return; if (!milestoneId) return; - const wtGsd = join(worktreePath_, ".gsd"); - const prGsd = join(projectRoot, ".gsd"); + const wtGsd = join(worktreePath_, ".sf"); + const prGsd = join(projectRoot, ".sf"); - // When .gsd is a symlink to the same external directory in both locations, + // When .sf is a symlink to the same external directory in both locations, // cpSync rejects the copy because source === destination (ERR_FS_CP_EINVAL). // Compare realpaths and skip when they resolve to the same physical path (#2184). if (isSamePath(wtGsd, prGsd)) return; @@ -376,7 +376,7 @@ export function syncStateToProjectRoot( safeCopy(join(wtGsd, "STATE.md"), join(prGsd, "STATE.md"), { force: true }); // 2. Milestone directory — ROADMAP, slice PLANs, task summaries - // Copy the entire milestone .gsd subtree so deriveState reads current checkboxes + // Copy the entire milestone .sf subtree so deriveState reads current checkboxes safeCopyRecursive( join(wtGsd, "milestones", milestoneId), join(prGsd, "milestones", milestoneId), @@ -443,7 +443,7 @@ export function checkResourcesStale( * Detect and escape a stale worktree cwd (#608). * * After milestone completion + merge, the worktree directory is removed but - * the process cwd may still point inside `.gsd/worktrees/<MID>/`. + * the process cwd may still point inside `.sf/worktrees/<MID>/`. * When a new session starts, `process.cwd()` is passed as `base` to startAuto * and all subsequent writes land in the wrong directory. This function detects * that scenario and chdir back to the project root. @@ -451,27 +451,27 @@ export function checkResourcesStale( * Returns the corrected base path. */ export function escapeStaleWorktree(base: string): string { - // Direct layout: /.gsd/worktrees/ - const directMarker = `${pathSep}.gsd${pathSep}worktrees${pathSep}`; + // Direct layout: /.sf/worktrees/ + const directMarker = `${pathSep}.sf${pathSep}worktrees${pathSep}`; let idx = base.indexOf(directMarker); if (idx === -1) { - // Symlink-resolved layout: /.gsd/projects/<hash>/worktrees/ + // Symlink-resolved layout: /.sf/projects/<hash>/worktrees/ const symlinkRe = new RegExp( - `\\${pathSep}\\.gsd\\${pathSep}projects\\${pathSep}[a-f0-9]+\\${pathSep}worktrees\\${pathSep}`, + `\\${pathSep}\\.sf\\${pathSep}projects\\${pathSep}[a-f0-9]+\\${pathSep}worktrees\\${pathSep}`, ); const match = base.match(symlinkRe); if (!match || match.index === undefined) return base; idx = match.index; } - // base is inside .gsd/worktrees/<something> — extract the project root + // base is inside .sf/worktrees/<something> — extract the project root const projectRoot = base.slice(0, idx); - // Guard: If the candidate project root's .gsd IS the user-level ~/.gsd, - // the string-slice heuristic matched the wrong /.gsd/ boundary. This happens - // when .gsd is a symlink into ~/.gsd/projects/<hash> and process.cwd() + // Guard: If the candidate project root's .sf IS the user-level ~/.sf, + // the string-slice heuristic matched the wrong /.sf/ boundary. This happens + // when .sf is a symlink into ~/.sf/projects/<hash> and process.cwd() // resolved through the symlink. Returning ~ would be catastrophic (#1676). - const candidateGsd = join(projectRoot, ".gsd").replaceAll("\\", "/"); + const candidateGsd = join(projectRoot, ".sf").replaceAll("\\", "/"); const sfHomePath = sfHome.replaceAll("\\", "/"); if (candidateGsd === sfHomePath || candidateGsd.startsWith(sfHomePath + "/")) { // Don't chdir to home — return base unchanged. @@ -530,15 +530,15 @@ export function cleanStaleRuntimeUnits( // ─── Worktree ↔ Main Repo Sync (#1311) ────────────────────────────────────── /** - * Sync .gsd/ state from the main repo into the worktree. + * Sync .sf/ state from the main repo into the worktree. * - * When .gsd/ is a symlink to the external state directory, both the main + * When .sf/ is a symlink to the external state directory, both the main * repo and worktree share the same directory — no sync needed. * - * When .gsd/ is a real directory (e.g., git-tracked or manage_gitignore:false), + * When .sf/ is a real directory (e.g., git-tracked or manage_gitignore:false), * the worktree has its own copy that may be stale. This function copies * missing milestones, CONTEXT, ROADMAP, DECISIONS, REQUIREMENTS, and - * PROJECT files from the main repo's .gsd/ into the worktree's .gsd/. + * PROJECT files from the main repo's .sf/ into the worktree's .sf/. * * Only adds missing content — never overwrites existing files in the worktree * (the worktree's execution state is authoritative for in-progress work). @@ -556,7 +556,7 @@ export function syncSfStateToWorktree( if (!existsSync(mainGsd) || !existsSync(wtGsd)) return { synced }; - // Sync root-level .gsd/ files (DECISIONS, REQUIREMENTS, PROJECT, KNOWLEDGE, etc.) + // Sync root-level .sf/ files (DECISIONS, REQUIREMENTS, PROJECT, KNOWLEDGE, etc.) for (const f of ROOT_STATE_FILES) { const src = join(mainGsd, f); const dst = join(wtGsd, f); @@ -697,7 +697,7 @@ export function syncSfStateToWorktree( * updated ROADMAP) are visible from the project root (#1412). * * Syncs: - * 1. Root-level .gsd/ files (REQUIREMENTS, PROJECT, DECISIONS, KNOWLEDGE, + * 1. Root-level .sf/ files (REQUIREMENTS, PROJECT, DECISIONS, KNOWLEDGE, * OVERRIDES) — the worktree's versions overwrite main's because the * worktree is the authoritative execution context. * 2. ALL milestone directories found in the worktree — not just the @@ -707,7 +707,7 @@ export function syncSfStateToWorktree( * * History: Originally only synced milestones/<milestoneId>/ and assumed * root-level files would be carried by the squash merge. In practice, - * .gsd/ files are often untracked (gitignored or never committed), so the + * .sf/ files are often untracked (gitignored or never committed), so the * squash merge carries nothing. This caused next-milestone artifacts and * updated REQUIREMENTS/PROJECT to be silently lost on teardown. */ @@ -742,7 +742,7 @@ export function syncWorktreeStateBack( } } - // ── 1. Sync root-level .gsd/ files back ────────────────────────────── + // ── 1. Sync root-level .sf/ files back ────────────────────────────── // The worktree is authoritative — complete-milestone updates REQUIREMENTS, // PROJECT, etc. These must overwrite main's copies so they survive teardown. // Also includes QUEUE.md, completed-units.json, and metrics.json which are @@ -960,8 +960,8 @@ function reconcilePlanCheckboxes( wtPath: string, milestoneId: string, ): void { - const srcMilestone = join(projectRoot, ".gsd", "milestones", milestoneId); - const dstMilestone = join(wtPath, ".gsd", "milestones", milestoneId); + const srcMilestone = join(projectRoot, ".sf", "milestones", milestoneId); + const dstMilestone = join(wtPath, ".sf", "milestones", milestoneId); if (!existsSync(srcMilestone) || !existsSync(dstMilestone)) return; // Walk all markdown files in the milestone directory (plans, summaries, etc.) @@ -1074,10 +1074,10 @@ export function createAutoWorktree( }); } - // Copy .gsd/ planning artifacts from the source repo into the new worktree. + // Copy .sf/ planning artifacts from the source repo into the new worktree. // Worktrees are fresh git checkouts — untracked files don't carry over. // Planning artifacts may be untracked if the project's .gitignore had a - // blanket .gsd/ rule (pre-v2.14.0). Without this copy, auto-mode loops + // blanket .sf/ rule (pre-v2.14.0). Without this copy, auto-mode loops // on plan-slice because the plan file doesn't exist in the worktree. // // IMPORTANT: Skip when re-attaching to an existing branch (#759). @@ -1127,15 +1127,15 @@ export function createAutoWorktree( } /** - * Copy .gsd/ planning artifacts from source repo to a new worktree. + * Copy .sf/ planning artifacts from source repo to a new worktree. * Copies milestones/, DECISIONS.md, REQUIREMENTS.md, PROJECT.md, QUEUE.md, * STATE.md, KNOWLEDGE.md, and OVERRIDES.md. * Skips runtime files (auto.lock, metrics.json, etc.) and the worktrees/ dir. * Best-effort — failures are non-fatal since auto-mode can recreate artifacts. */ function copyPlanningArtifacts(srcBase: string, wtPath: string): void { - const srcGsd = join(srcBase, ".gsd"); - const dstGsd = join(wtPath, ".gsd"); + const srcGsd = join(srcBase, ".sf"); + const dstGsd = join(wtPath, ".sf"); if (!existsSync(srcGsd)) return; if (isSamePath(srcGsd, dstGsd)) return; @@ -1223,7 +1223,7 @@ export function teardownAutoWorktree( { worktree: milestoneId }, ); // Attempt a direct filesystem removal as a fallback — but ONLY if the - // path is safely inside .gsd/worktrees/ to prevent #2365 data loss. + // path is safely inside .sf/worktrees/ to prevent #2365 data loss. if (isInsideWorktreesDir(originalBasePath, wtDir)) { try { rmSync(wtDir, { recursive: true, force: true }); @@ -1233,7 +1233,7 @@ export function teardownAutoWorktree( } } else { console.error( - `[SF] REFUSING fallback rmSync — path is outside .gsd/worktrees/: ${wtDir}`, + `[SF] REFUSING fallback rmSync — path is outside .sf/worktrees/: ${wtDir}`, ); } } @@ -1247,7 +1247,7 @@ export function isInAutoWorktree(basePath: string): boolean { if (!originalBase) return false; const cwd = process.cwd(); const resolvedBase = existsSync(basePath) ? realpathSync(basePath) : basePath; - const wtDir = join(resolvedBase, ".gsd", "worktrees"); + const wtDir = join(resolvedBase, ".sf", "worktrees"); if (!cwd.startsWith(wtDir)) return false; const branch = nativeGetCurrentBranch(cwd); return branch.startsWith("milestone/"); @@ -1359,7 +1359,7 @@ export function getActiveAutoWorktreeContext(): { const resolvedBase = existsSync(originalBase) ? realpathSync(originalBase) : originalBase; - const wtDir = join(resolvedBase, ".gsd", "worktrees"); + const wtDir = join(resolvedBase, ".sf", "worktrees"); if (!cwd.startsWith(wtDir)) return null; const worktreeName = detectWorktreeName(cwd); if (!worktreeName) return null; @@ -1453,8 +1453,8 @@ export function mergeMilestoneToMain( // database (#2823). if (isDbAvailable()) { try { - const worktreeDbPath = join(worktreeCwd, ".gsd", "sf.db"); - const mainDbPath = join(originalBasePath_, ".gsd", "sf.db"); + const worktreeDbPath = join(worktreeCwd, ".sf", "sf.db"); + const mainDbPath = join(originalBasePath_, ".sf", "sf.db"); if (!isSamePath(worktreeDbPath, mainDbPath)) { reconcileWorktreeDb(mainDbPath, worktreeDbPath); } @@ -1592,7 +1592,7 @@ export function mergeMilestoneToMain( // 7. Stash any pre-existing dirty files so the squash merge is not // blocked by unrelated local changes (#2151). clearProjectRootStateFiles - // only removes untracked .gsd/ files; tracked dirty files elsewhere (e.g. + // only removes untracked .sf/ files; tracked dirty files elsewhere (e.g. // .planning/work-state.json with stash conflict markers) are invisible to // that cleanup but will cause `git merge --squash` to reject. let stashed = false; @@ -1604,7 +1604,7 @@ export function mergeMilestoneToMain( }).trim(); if (status) { // Use --include-untracked to stash untracked files that would block - // the squash merge, but EXCLUDE .gsd/milestones/ (#2505). + // the squash merge, but EXCLUDE .sf/milestones/ (#2505). // --include-untracked without exclusion sweeps queued milestone // CONTEXT files into the stash. If stash pop later fails, those files // are permanently trapped in the stash entry and lost on the next @@ -1614,7 +1614,7 @@ export function mergeMilestoneToMain( [ "stash", "push", "--include-untracked", "-m", `sf: pre-merge stash for ${milestoneId}`, - "--", ":(exclude).gsd/milestones", + "--", ":(exclude).sf/milestones", ], { cwd: originalBasePath_, stdio: ["ignore", "pipe", "pipe"], encoding: "utf-8" }, ); @@ -1693,12 +1693,12 @@ export function mergeMilestoneToMain( logError("worktree", `merge state cleanup failed: ${err instanceof Error ? err.message : String(err)}`); } - // 8. Squash merge — auto-resolve .gsd/ state file conflicts (#530) + // 8. Squash merge — auto-resolve .sf/ state file conflicts (#530) const mergeResult = nativeMergeSquash(originalBasePath_, milestoneBranch); if (!mergeResult.success) { // Dirty working tree — the merge was rejected before it started (e.g. - // untracked .gsd/ files left by syncStateToProjectRoot). Preserve the + // untracked .sf/ files left by syncStateToProjectRoot). Preserve the // milestone branch so commits are not lost. if (mergeResult.conflicts.includes("__dirty_working_tree__")) { // Defensively clean merge state — the native path may leave MERGE_HEAD @@ -1729,7 +1729,7 @@ export function mergeMilestoneToMain( // Restore cwd so the caller is not stranded on the integration branch process.chdir(previousCwd); // Surface the actual dirty filenames from git stderr instead of - // generically blaming .gsd/ (#2151). + // generically blaming .sf/ (#2151). const fileList = mergeResult.dirtyFiles?.length ? `Dirty files:\n${mergeResult.dirtyFiles.map((f) => ` ${f}`).join("\n")}` : `Check \`git status\` in the project root for details.`; @@ -1852,14 +1852,14 @@ export function mergeMilestoneToMain( }); } catch (e) { logWarning("worktree", `git stash pop failed, attempting conflict resolution: ${(e as Error).message}`); - // Stash pop after squash merge can conflict on .gsd/ state files that + // Stash pop after squash merge can conflict on .sf/ state files that // diverged between branches. Left unresolved, these UU entries block // every subsequent merge. Auto-resolve them the same way we handle - // .gsd/ conflicts during the merge itself: accept HEAD (the just-committed + // .sf/ conflicts during the merge itself: accept HEAD (the just-committed // version) and drop the now-applied stash. const uu = nativeConflictFiles(originalBasePath_); - const sfUU = uu.filter((f) => f.startsWith(".gsd/")); - const nonGsdUU = uu.filter((f) => !f.startsWith(".gsd/")); + const sfUU = uu.filter((f) => f.startsWith(".sf/")); + const nonGsdUU = uu.filter((f) => !f.startsWith(".sf/")); if (sfUU.length > 0) { for (const f of sfUU) { @@ -1880,7 +1880,7 @@ export function mergeMilestoneToMain( } if (nonGsdUU.length === 0) { - // All conflicts were .gsd/ files — safe to drop the stash + // All conflicts were .sf/ files — safe to drop the stash try { execFileSync("git", ["stash", "drop"], { cwd: originalBasePath_, @@ -1891,8 +1891,8 @@ export function mergeMilestoneToMain( logWarning("worktree", `git stash drop failed: ${err instanceof Error ? err.message : String(err)}`); } } else { - // Non-.gsd conflicts remain — leave stash for manual resolution - logWarning("reconcile", "Stash pop conflict on non-.gsd files after merge", { + // Non-.sf conflicts remain — leave stash for manual resolution + logWarning("reconcile", "Stash pop conflict on non-.sf files after merge", { files: nonGsdUU.join(", "), }); } @@ -1904,7 +1904,7 @@ export function mergeMilestoneToMain( // 9b. Safety check (#1792): if nothing was committed, verify the milestone // work is already on the integration branch before allowing teardown. - // Compare only non-.gsd/ paths — .gsd/ state files diverge normally and + // Compare only non-.sf/ paths — .sf/ state files diverge normally and // are auto-resolved during the squash merge. if (nothingToCommit) { const numstat = nativeDiffNumstat( @@ -1913,7 +1913,7 @@ export function mergeMilestoneToMain( milestoneBranch, ); const codeChanges = numstat.filter( - (entry) => !entry.path.startsWith(".gsd/"), + (entry) => !entry.path.startsWith(".sf/"), ); if (codeChanges.length > 0) { // Milestone has unanchored code changes — abort teardown. @@ -1927,8 +1927,8 @@ export function mergeMilestoneToMain( } } - // 9c. Detect whether any non-.gsd/ code files were actually merged (#1906). - // When a milestone only produced .gsd/ metadata (summaries, roadmaps) but no + // 9c. Detect whether any non-.sf/ code files were actually merged (#1906). + // When a milestone only produced .sf/ metadata (summaries, roadmaps) but no // real code, the user sees "milestone complete" but nothing changed in their // codebase. Surface this so the caller can warn the user. let codeFilesChanged = false; @@ -1940,7 +1940,7 @@ export function mergeMilestoneToMain( "HEAD", ); codeFilesChanged = mergedFiles.some( - (entry) => !entry.path.startsWith(".gsd/"), + (entry) => !entry.path.startsWith(".sf/"), ); } catch (e) { // If HEAD~1 doesn't exist (first commit), assume code was changed diff --git a/src/resources/extensions/sf/auto.ts b/src/resources/extensions/sf/auto.ts index cfae8b300..41ccd815f 100644 --- a/src/resources/extensions/sf/auto.ts +++ b/src/resources/extensions/sf/auto.ts @@ -1,7 +1,7 @@ /** * SF Auto Mode — Fresh Session Per Unit * - * State machine driven by .gsd/ files on disk. Each "unit" of work + * State machine driven by .sf/ files on disk. Each "unit" of work * (plan slice, execute task, complete slice) gets a fresh session via * the stashed ctx.newSession() pattern. * @@ -1465,10 +1465,10 @@ export async function startAuto( // bundled extension updates before resume-time verification/state logic runs. // SF_PKG_ROOT is set by loader.ts and points to the sf-run package root. // The relative import ("../../../resource-loader.js") only works from the source - // tree; deployed extensions live at ~/.gsd/agent/extensions/sf/ where the - // relative path resolves to ~/.gsd/agent/resource-loader.js which doesn't exist. + // tree; deployed extensions live at ~/.sf/agent/extensions/sf/ where the + // relative path resolves to ~/.sf/agent/resource-loader.js which doesn't exist. // Using SF_PKG_ROOT constructs a correct absolute path in both contexts (#3949). - const agentDir = process.env.SF_CODING_AGENT_DIR || join(process.env.SF_HOME || homedir(), ".gsd", "agent"); + const agentDir = process.env.SF_CODING_AGENT_DIR || join(process.env.SF_HOME || homedir(), ".sf", "agent"); const pkgRoot = process.env.SF_PKG_ROOT; const resourceLoaderPath = pkgRoot ? pathToFileURL(join(pkgRoot, "dist", "resource-loader.js")).href diff --git a/src/resources/extensions/sf/auto/loop-deps.ts b/src/resources/extensions/sf/auto/loop-deps.ts index c66a06812..aa8d77dde 100644 --- a/src/resources/extensions/sf/auto/loop-deps.ts +++ b/src/resources/extensions/sf/auto/loop-deps.ts @@ -60,9 +60,6 @@ export interface LoopDeps { loadEffectiveSFPreferences: () => | { preferences?: SFPreferences } | undefined; - loadEffectiveSFPreferences?: () => - | { preferences?: SFPreferences } - | undefined; // Pre-dispatch health gate preDispatchHealthGate: ( diff --git a/src/resources/extensions/sf/auto/phases.ts b/src/resources/extensions/sf/auto/phases.ts index 03defb82c..9088b5f20 100644 --- a/src/resources/extensions/sf/auto/phases.ts +++ b/src/resources/extensions/sf/auto/phases.ts @@ -218,7 +218,7 @@ async function generateMilestoneReport( phase: snapData.phase, }); ctx.ui.notify( - `Report saved: .gsd/reports/${basename(outPath)} — open index.html to browse progression.`, + `Report saved: .sf/reports/${basename(outPath)} — open index.html to browse progression.`, "info", ); } diff --git a/src/resources/extensions/sf/bootstrap/crash-log.ts b/src/resources/extensions/sf/bootstrap/crash-log.ts index 919d1fcfa..f309eccea 100644 --- a/src/resources/extensions/sf/bootstrap/crash-log.ts +++ b/src/resources/extensions/sf/bootstrap/crash-log.ts @@ -1,5 +1,5 @@ /** - * crash-log.ts — Write crash diagnostics to ~/.gsd/crash/<timestamp>.log + * crash-log.ts — Write crash diagnostics to ~/.sf/crash/<timestamp>.log * * Zero cross-dependencies: only uses Node.js built-ins so it can be imported * safely from uncaughtException / unhandledRejection handlers and from tests @@ -11,12 +11,12 @@ import { homedir } from "node:os"; import { join } from "node:path"; /** - * Write a crash log to ~/.gsd/crash/<timestamp>.log (or $SF_HOME/crash/). + * Write a crash log to ~/.sf/crash/<timestamp>.log (or $SF_HOME/crash/). * Never throws — must be safe to call from any error handler. */ export function writeCrashLog(err: Error, source: string): void { try { - const crashDir = join(process.env.SF_HOME ?? join(homedir(), ".gsd"), "crash"); + const crashDir = join(process.env.SF_HOME ?? join(homedir(), ".sf"), "crash"); mkdirSync(crashDir, { recursive: true }); const ts = new Date().toISOString().replace(/[:.]/g, "-"); const logPath = join(crashDir, `${ts}.log`); diff --git a/src/resources/extensions/sf/bootstrap/db-tools.ts b/src/resources/extensions/sf/bootstrap/db-tools.ts index 51db5c8ef..f64eb268c 100644 --- a/src/resources/extensions/sf/bootstrap/db-tools.ts +++ b/src/resources/extensions/sf/bootstrap/db-tools.ts @@ -85,7 +85,7 @@ export function registerDbTools(pi: ExtensionAPI): void { "Use sf_decision_save when recording an architectural, pattern, library, or observability decision.", "Decision IDs are auto-assigned (D001, D002, ...) — never guess or provide an ID.", "All fields except revisable, when_context, and made_by are required.", - "The tool writes to the DB and regenerates .gsd/DECISIONS.md automatically.", + "The tool writes to the DB and regenerates .sf/DECISIONS.md automatically.", "Set made_by to 'human' when the user explicitly directed the decision, 'agent' when the LLM chose autonomously (default), or 'collaborative' when it was discussed and agreed together.", ], parameters: Type.Object({ @@ -252,7 +252,7 @@ export function registerDbTools(pi: ExtensionAPI): void { "Use sf_requirement_save when recording a new functional, non-functional, or operational requirement.", "Requirement IDs are auto-assigned (R001, R002, ...) — never guess or provide an ID.", "class, description, why, and source are required. All other fields are optional.", - "The tool writes to the DB and regenerates .gsd/REQUIREMENTS.md automatically.", + "The tool writes to the DB and regenerates .sf/REQUIREMENTS.md automatically.", ], parameters: Type.Object({ class: Type.String({ description: "Requirement class (e.g. 'functional', 'non-functional', 'operational')" }), diff --git a/src/resources/extensions/sf/bootstrap/dynamic-tools.ts b/src/resources/extensions/sf/bootstrap/dynamic-tools.ts index 50555ed73..e6753abce 100644 --- a/src/resources/extensions/sf/bootstrap/dynamic-tools.ts +++ b/src/resources/extensions/sf/bootstrap/dynamic-tools.ts @@ -9,34 +9,34 @@ import { setLogBasePath, logWarning } from "../workflow-logger.js"; /** * Resolve the correct DB path for the current working directory. - * If `basePath` is inside a `.gsd/worktrees/<MID>/` directory, returns - * the project root's `.gsd/sf.db` (shared WAL — R012). Otherwise - * returns `<basePath>/.gsd/sf.db`. + * If `basePath` is inside a `.sf/worktrees/<MID>/` directory, returns + * the project root's `.sf/sf.db` (shared WAL — R012). Otherwise + * returns `<basePath>/.sf/sf.db`. */ export function resolveProjectRootDbPath(basePath: string): string { - // Detect worktree: look for `.gsd/worktrees/` in the path segments. - // A worktree path looks like: /project/root/.gsd/worktrees/M001/... - // We need to resolve back to /project/root/.gsd/sf.db - const marker = `${sep}.gsd${sep}worktrees${sep}`; + // Detect worktree: look for `.sf/worktrees/` in the path segments. + // A worktree path looks like: /project/root/.sf/worktrees/M001/... + // We need to resolve back to /project/root/.sf/sf.db + const marker = `${sep}.sf${sep}worktrees${sep}`; const idx = basePath.indexOf(marker); if (idx !== -1) { const projectRoot = basePath.slice(0, idx); - return join(projectRoot, ".gsd", "sf.db"); + return join(projectRoot, ".sf", "sf.db"); } // Also handle forward-slash paths on all platforms - const fwdMarker = "/.gsd/worktrees/"; + const fwdMarker = "/.sf/worktrees/"; const fwdIdx = basePath.indexOf(fwdMarker); if (fwdIdx !== -1) { const projectRoot = basePath.slice(0, fwdIdx); - return join(projectRoot, ".gsd", "sf.db"); + return join(projectRoot, ".sf", "sf.db"); } - // External-state layout: ~/.gsd/projects/<hash>/worktrees/<MID>/... - // Resolve to ~/.gsd/projects/<hash>/sf.db (the canonical project DB) (#2952). + // External-state layout: ~/.sf/projects/<hash>/worktrees/<MID>/... + // Resolve to ~/.sf/projects/<hash>/sf.db (the canonical project DB) (#2952). // Must be checked before the generic symlink-resolved handler: both match - // /.gsd/projects/<hash>/worktrees/ but require different resolution targets. - const extRe = /[/\\]\.gsd[/\\]projects[/\\][a-f0-9]+[/\\]worktrees(?:[/\\]|$)/; + // /.sf/projects/<hash>/worktrees/ but require different resolution targets. + const extRe = /[/\\]\.sf[/\\]projects[/\\][a-f0-9]+[/\\]worktrees(?:[/\\]|$)/; const extMatch = extRe.exec(basePath); if (extMatch) { const matchStr = extMatch[0]; @@ -46,9 +46,9 @@ export function resolveProjectRootDbPath(basePath: string): string { return join(projectStateRoot, "sf.db"); } - // Symlink-resolved layout: /.gsd/projects/<hash>/worktrees/M001/... - // The project root is everything before /.gsd/projects/ (#2517) - const symlinkMarker = `${sep}.gsd${sep}projects${sep}`; + // Symlink-resolved layout: /.sf/projects/<hash>/worktrees/M001/... + // The project root is everything before /.sf/projects/ (#2517) + const symlinkMarker = `${sep}.sf${sep}projects${sep}`; const symlinkIdx = basePath.indexOf(symlinkMarker); if (symlinkIdx !== -1) { const afterProjects = basePath.slice(symlinkIdx + symlinkMarker.length); @@ -56,32 +56,32 @@ export function resolveProjectRootDbPath(basePath: string): string { const worktreeSeg = `${sep}worktrees${sep}`; if (afterProjects.includes(worktreeSeg)) { const projectRoot = basePath.slice(0, symlinkIdx); - return join(projectRoot, ".gsd", "sf.db"); + return join(projectRoot, ".sf", "sf.db"); } } // Forward-slash variant for symlink-resolved layout - const fwdSymlinkMarker = "/.gsd/projects/"; + const fwdSymlinkMarker = "/.sf/projects/"; const fwdSymlinkIdx = basePath.indexOf(fwdSymlinkMarker); if (fwdSymlinkIdx !== -1) { const afterProjects = basePath.slice(fwdSymlinkIdx + fwdSymlinkMarker.length); if (afterProjects.includes("/worktrees/")) { const projectRoot = basePath.slice(0, fwdSymlinkIdx); - return join(projectRoot, ".gsd", "sf.db"); + return join(projectRoot, ".sf", "sf.db"); } } - return join(basePath, ".gsd", "sf.db"); + return join(basePath, ".sf", "sf.db"); } export async function ensureDbOpen(basePath: string = process.cwd()): Promise<boolean> { try { const db = await import("../sf-db.js"); const dbPath = resolveProjectRootDbPath(basePath); - const sfDir = join(basePath, ".gsd"); + const sfDir = join(basePath, ".sf"); - // Derive the project root from the DB path (strip .gsd/sf.db) + // Derive the project root from the DB path (strip .sf/sf.db) const projectRoot = join(dbPath, "..", ".."); // Open existing DB file (may be at project root for worktrees) @@ -91,7 +91,7 @@ export async function ensureDbOpen(basePath: string = process.cwd()): Promise<bo return opened; } - // No DB file — create + migrate from Markdown if .gsd/ has content + // No DB file — create + migrate from Markdown if .sf/ has content if (existsSync(sfDir)) { const hasDecisions = existsSync(join(sfDir, "DECISIONS.md")); const hasRequirements = existsSync(join(sfDir, "REQUIREMENTS.md")); @@ -110,13 +110,13 @@ export async function ensureDbOpen(basePath: string = process.cwd()): Promise<bo return opened; } - // .gsd/ exists but has no Markdown content (fresh project) — create empty DB + // .sf/ exists but has no Markdown content (fresh project) — create empty DB const opened = db.openDatabase(dbPath); if (opened) setLogBasePath(projectRoot); return opened; } - logWarning("bootstrap", "ensureDbOpen failed — no .gsd directory found"); + logWarning("bootstrap", "ensureDbOpen failed — no .sf directory found"); return false; } catch (err) { logWarning("bootstrap", `ensureDbOpen failed: ${(err as Error).message ?? String(err)}`); diff --git a/src/resources/extensions/sf/bootstrap/query-tools.ts b/src/resources/extensions/sf/bootstrap/query-tools.ts index a7ee83ebe..5c86ea06e 100644 --- a/src/resources/extensions/sf/bootstrap/query-tools.ts +++ b/src/resources/extensions/sf/bootstrap/query-tools.ts @@ -12,7 +12,7 @@ export function registerQueryTools(pi: ExtensionAPI): void { description: "Read the current status of a milestone and all its slices from the SF database. " + "Returns milestone metadata, per-slice status, and task counts per slice. " + - "Use this instead of querying .gsd/sf.db directly via sqlite3 or better-sqlite3.", + "Use this instead of querying .sf/sf.db directly via sqlite3 or better-sqlite3.", promptSnippet: "Get milestone status, slice statuses, and task counts for a given milestoneId", promptGuidelines: [ "Use this tool — not sqlite3 or better-sqlite3 — to inspect milestone or slice state from the DB.", diff --git a/src/resources/extensions/sf/bootstrap/register-hooks.ts b/src/resources/extensions/sf/bootstrap/register-hooks.ts index 822e5fb62..d7caab8f4 100644 --- a/src/resources/extensions/sf/bootstrap/register-hooks.ts +++ b/src/resources/extensions/sf/bootstrap/register-hooks.ts @@ -228,7 +228,7 @@ export function registerHooks(pi: ExtensionAPI): void { // ── Queue-mode execution guard (#2545): block source-code mutations ── // When /sf queue is active, the agent should only create milestones, - // not execute work. Block write/edit to non-.gsd/ paths and bash commands + // not execute work. Block write/edit to non-.sf/ paths and bash commands // that would modify files. if (isQueuePhaseActive()) { let queueInput = ""; diff --git a/src/resources/extensions/sf/bootstrap/register-shortcuts.ts b/src/resources/extensions/sf/bootstrap/register-shortcuts.ts index e29c7fc17..a711c46c4 100644 --- a/src/resources/extensions/sf/bootstrap/register-shortcuts.ts +++ b/src/resources/extensions/sf/bootstrap/register-shortcuts.ts @@ -21,8 +21,8 @@ export function registerShortcuts(pi: ExtensionAPI): void { const openDashboardOverlay = async (ctx: ExtensionContext) => { const basePath = projectRoot(); - if (!existsSync(join(basePath, ".gsd"))) { - ctx.ui.notify("No .gsd/ directory found. Run /sf to start.", "info"); + if (!existsSync(join(basePath, ".sf"))) { + ctx.ui.notify("No .sf/ directory found. Run /sf to start.", "info"); return; } await ctx.ui.custom<boolean>( @@ -52,7 +52,7 @@ export function registerShortcuts(pi: ExtensionAPI): void { const openParallelOverlay = async (ctx: ExtensionContext) => { const basePath = projectRoot(); - const parallelDir = join(basePath, ".gsd", "parallel"); + const parallelDir = join(basePath, ".sf", "parallel"); if (!existsSync(parallelDir)) { ctx.ui.notify("No parallel workers found. Run /sf parallel start first.", "info"); return; diff --git a/src/resources/extensions/sf/bootstrap/system-context.ts b/src/resources/extensions/sf/bootstrap/system-context.ts index d1b661c4b..daf6d4026 100644 --- a/src/resources/extensions/sf/bootstrap/system-context.ts +++ b/src/resources/extensions/sf/bootstrap/system-context.ts @@ -22,7 +22,7 @@ import { toPosixPath } from "../../shared/mod.js"; import { markCmuxPromptShown, shouldPromptToEnableCmux } from "../../cmux/index.js"; import { autoEnableCmuxPreferences } from "../commands-cmux.js"; -const sfHome = process.env.SF_HOME || join(homedir(), ".gsd"); +const sfHome = process.env.SF_HOME || join(homedir(), ".sf"); /** * Bundled skill triggers — resolved dynamically at runtime instead of @@ -52,7 +52,7 @@ function buildBundledSkillsTable(): string { function warnDeprecatedAgentInstructions(): void { const paths = [ join(sfHome, "agent-instructions.md"), - join(process.cwd(), ".gsd", "agent-instructions.md"), + join(process.cwd(), ".sf", "agent-instructions.md"), ]; for (const path of paths) { if (existsSync(path)) { @@ -69,7 +69,7 @@ export async function buildBeforeAgentStartResult( event: { prompt: string; systemPrompt: string }, ctx: ExtensionContext, ): Promise<{ systemPrompt: string; message?: { customType: string; content: string; display: false } } | undefined> { - if (!existsSync(join(process.cwd(), ".gsd"))) return undefined; + if (!existsSync(join(process.cwd(), ".sf"))) return undefined; const stopContextTimer = debugTime("context-inject"); const systemContent = loadPrompt("system", { @@ -106,7 +106,7 @@ export async function buildBeforeAgentStartResult( const { block: knowledgeBlock, globalSizeKb } = loadKnowledgeBlock(sfHome, process.cwd()); if (globalSizeKb > 4) { ctx.ui.notify( - `SF: ~/.gsd/agent/KNOWLEDGE.md is ${globalSizeKb.toFixed(1)}KB — consider trimming to keep system prompt lean.`, + `SF: ~/.sf/agent/KNOWLEDGE.md is ${globalSizeKb.toFixed(1)}KB — consider trimming to keep system prompt lean.`, "warning", ); } @@ -154,12 +154,12 @@ export async function buildBeforeAgentStartResult( const rawContent = rawCodebase.trim(); if (rawContent) { // Cap injection size to ~2 000 tokens to avoid bloating every request. - // Full map is always available at .gsd/CODEBASE.md. + // Full map is always available at .sf/CODEBASE.md. const MAX_CODEBASE_CHARS = 8_000; const generatedMatch = rawContent.match(/Generated: (\S+)/); const generatedAt = generatedMatch?.[1] ?? "unknown"; const content = rawContent.length > MAX_CODEBASE_CHARS - ? rawContent.slice(0, MAX_CODEBASE_CHARS) + "\n\n*(truncated — see .gsd/CODEBASE.md for full map)*" + ? rawContent.slice(0, MAX_CODEBASE_CHARS) + "\n\n*(truncated — see .sf/CODEBASE.md for full map)*" : rawContent; codebaseBlock = `\n\n[PROJECT CODEBASE — File structure and descriptions (generated ${generatedAt}, auto-refreshed when SF detects tracked file changes; use /sf codebase stats for status)]\n\n${content}`; } @@ -205,7 +205,7 @@ export async function buildBeforeAgentStartResult( } export function loadKnowledgeBlock(sfHomeDir: string, cwd: string): { block: string; globalSizeKb: number } { - // 1. Global knowledge (~/.gsd/agent/KNOWLEDGE.md) — cross-project, user-maintained + // 1. Global knowledge (~/.sf/agent/KNOWLEDGE.md) — cross-project, user-maintained let globalKnowledge = ""; let globalSizeKb = 0; const globalKnowledgePath = join(sfHomeDir, "agent", "KNOWLEDGE.md"); @@ -221,7 +221,7 @@ export function loadKnowledgeBlock(sfHomeDir: string, cwd: string): { block: str } } - // 2. Project knowledge (.gsd/KNOWLEDGE.md) — project-specific + // 2. Project knowledge (.sf/KNOWLEDGE.md) — project-specific let projectKnowledge = ""; const knowledgePath = resolveSfRootFile(cwd, "KNOWLEDGE"); if (existsSync(knowledgePath)) { @@ -285,7 +285,7 @@ function buildWorktreeContextBlock(): string { `- Branch: ${autoWorktree.branch}`, "", "All file operations, bash commands, and SF state resolve against the worktree path above.", - "Write every .gsd artifact in the worktree path above, never in the main project tree.", + "Write every .sf artifact in the worktree path above, never in the main project tree.", ].join("\n"); } @@ -524,7 +524,7 @@ export function buildForensicsContextInjection(basePath: string, prompt: string) * is complete or the session expires. */ export function clearForensicsMarker(basePath: string): void { - const markerPath = join(basePath, ".gsd", "runtime", "active-forensics.json"); + const markerPath = join(basePath, ".sf", "runtime", "active-forensics.json"); if (existsSync(markerPath)) { try { unlinkSync(markerPath); diff --git a/src/resources/extensions/sf/bootstrap/write-gate.ts b/src/resources/extensions/sf/bootstrap/write-gate.ts index 1a912c3d3..c3eea2aa3 100644 --- a/src/resources/extensions/sf/bootstrap/write-gate.ts +++ b/src/resources/extensions/sf/bootstrap/write-gate.ts @@ -6,10 +6,10 @@ const CONTEXT_MILESTONE_RE = /(?:^|[/\\])(M\d+(?:-[a-z0-9]{6})?)-CONTEXT\.md$/i; const DEPTH_VERIFICATION_MILESTONE_RE = /depth_verification[_-](M\d+(?:-[a-z0-9]{6})?)/i; /** - * Path segment that identifies .gsd/ planning artifacts. + * Path segment that identifies .sf/ planning artifacts. * Writes to these paths are allowed during queue mode. */ -const SF_DIR_RE = /(^|[/\\])\.gsd([/\\]|$)/; +const SF_DIR_RE = /(^|[/\\])\.sf([/\\]|$)/; /** * Read-only tool names that are always safe during queue mode. @@ -29,7 +29,7 @@ const QUEUE_SAFE_TOOLS = new Set([ * Bash commands that are read-only / investigative — safe during queue mode. * Matches the leading command in a bash invocation. */ -const BASH_READ_ONLY_RE = /^\s*(cat|head|tail|less|more|wc|file|stat|du|df|which|type|echo|printf|ls|find|grep|rg|awk|sed\b(?!.*-i)|sort|uniq|diff|comm|tr|cut|tee\s+-a\s+\/dev\/null|git\s+(log|show|diff|status|branch|tag|remote|rev-parse|ls-files|blame|shortlog|describe|stash\s+list|config\s+--get|cat-file)|gh\s+(issue|pr|api|repo|release)\s+(view|list|diff|status|checks)|mkdir\s+-p\s+\.gsd|rtk\s)/; +const BASH_READ_ONLY_RE = /^\s*(cat|head|tail|less|more|wc|file|stat|du|df|which|type|echo|printf|ls|find|grep|rg|awk|sed\b(?!.*-i)|sort|uniq|diff|comm|tr|cut|tee\s+-a\s+\/dev\/null|git\s+(log|show|diff|status|branch|tag|remote|rev-parse|ls-files|blame|shortlog|describe|stash\s+list|config\s+--get|cat-file)|gh\s+(issue|pr|api|repo|release)\s+(view|list|diff|status|checks)|mkdir\s+-p\s+\.sf|rtk\s)/; const verifiedDepthMilestones = new Set<string>(); let activeQueuePhase = false; @@ -75,7 +75,7 @@ function shouldPersistWriteGateSnapshot(env: NodeJS.ProcessEnv = process.env): b } function writeGateSnapshotPath(basePath: string = process.cwd()): string { - return join(basePath, ".gsd", "runtime", "write-gate-state.json"); + return join(basePath, ".sf", "runtime", "write-gate-state.json"); } function currentWriteGateSnapshot(): WriteGateSnapshot { @@ -89,7 +89,7 @@ function currentWriteGateSnapshot(): WriteGateSnapshot { function persistWriteGateSnapshot(basePath: string = process.cwd()): void { if (!shouldPersistWriteGateSnapshot()) return; const path = writeGateSnapshotPath(basePath); - mkdirSync(join(basePath, ".gsd", "runtime"), { recursive: true }); + mkdirSync(join(basePath, ".sf", "runtime"), { recursive: true }); const tempPath = `${path}.tmp`; writeFileSync(tempPath, JSON.stringify(currentWriteGateSnapshot(), null, 2), "utf-8"); renameSync(tempPath, path); @@ -409,7 +409,7 @@ export function shouldBlockContextArtifactSaveInSnapshot( * When the queue phase is active, the agent should only create planning * artifacts (milestones, CONTEXT.md, QUEUE.md, etc.) — never execute work. * This function blocks write/edit/bash tool calls that would modify source - * code outside of .gsd/. + * code outside of .sf/. * * @param toolName The tool being called (write, edit, bash, etc.) * @param input For write/edit: the file path. For bash: the command string. @@ -435,7 +435,7 @@ export function shouldBlockQueueExecutionInSnapshot( // Always-safe tools (read-only, discussion, planning) if (QUEUE_SAFE_TOOLS.has(toolName)) return { block: false }; - // write/edit — allow if targeting .gsd/ planning artifacts + // write/edit — allow if targeting .sf/ planning artifacts if (toolName === "write" || toolName === "edit") { if (SF_DIR_RE.test(input)) return { block: false }; return { diff --git a/src/resources/extensions/sf/cache.ts b/src/resources/extensions/sf/cache.ts index 40c52c664..15efedea1 100644 --- a/src/resources/extensions/sf/cache.ts +++ b/src/resources/extensions/sf/cache.ts @@ -5,7 +5,7 @@ // 2. Path cache (paths.ts) — directory listing results (readdirSync) // 3. Parse cache (files.ts) — parsed markdown file results // -// After any file write that changes .gsd/ contents, all three must be +// After any file write that changes .sf/ contents, all three must be // invalidated together to prevent stale reads. This module provides a // single function that clears all three atomically. @@ -18,7 +18,7 @@ import { clearArtifacts } from './sf-db.js'; * Invalidate all SF runtime caches in one call. * * Call this after file writes, milestone transitions, merge reconciliation, - * or any operation that changes .gsd/ contents on disk. Forgetting to clear + * or any operation that changes .sf/ contents on disk. Forgetting to clear * any single cache causes stale reads (see #431, #793). */ export function invalidateAllCaches(): void { diff --git a/src/resources/extensions/sf/captures.ts b/src/resources/extensions/sf/captures.ts index bb1f17c9b..3ffc19af3 100644 --- a/src/resources/extensions/sf/captures.ts +++ b/src/resources/extensions/sf/captures.ts @@ -1,11 +1,11 @@ /** * SF Captures — Fire-and-forget thought capture with triage classification * - * Append-only capture file at `.gsd/CAPTURES.md`. Each capture is an H3 section + * Append-only capture file at `.sf/CAPTURES.md`. Each capture is an H3 section * with bold metadata fields, parseable by the same patterns used in files.ts. * * Worktree-aware: captures always resolve to the original project root's - * `.gsd/CAPTURES.md`, not the worktree's local `.gsd/`. + * `.sf/CAPTURES.md`, not the worktree's local `.sf/`. */ import { existsSync, readFileSync, writeFileSync, mkdirSync } from "node:fs"; @@ -50,23 +50,23 @@ const VALID_CLASSIFICATIONS: readonly string[] = [ /** * Resolve the path to CAPTURES.md, aware of worktree context. * - * In worktree-isolated mode, basePath is `.gsd/worktrees/<MID>/`. - * Captures must resolve to the *original* project root's `.gsd/CAPTURES.md`, - * not the worktree-local `.gsd/`. This ensures all captures go to one file + * In worktree-isolated mode, basePath is `.sf/worktrees/<MID>/`. + * Captures must resolve to the *original* project root's `.sf/CAPTURES.md`, + * not the worktree-local `.sf/`. This ensures all captures go to one file * regardless of which worktree the agent is running in. * - * Detection: if basePath contains `/.gsd/worktrees/`, walk up to the - * directory that contains `.gsd/worktrees/` — that's the project root. + * Detection: if basePath contains `/.sf/worktrees/`, walk up to the + * directory that contains `.sf/worktrees/` — that's the project root. */ export function resolveCapturesPath(basePath: string): string { const resolved = resolve(basePath); - // Direct layout: /.gsd/worktrees/ - const worktreeMarker = `${sep}.gsd${sep}worktrees${sep}`; + // Direct layout: /.sf/worktrees/ + const worktreeMarker = `${sep}.sf${sep}worktrees${sep}`; let idx = resolved.indexOf(worktreeMarker); if (idx === -1) { - // Symlink-resolved layout: /.gsd/projects/<hash>/worktrees/ + // Symlink-resolved layout: /.sf/projects/<hash>/worktrees/ const symlinkRe = new RegExp( - `\\${sep}\\.gsd\\${sep}projects\\${sep}[a-f0-9]+\\${sep}worktrees\\${sep}`, + `\\${sep}\\.sf\\${sep}projects\\${sep}[a-f0-9]+\\${sep}worktrees\\${sep}`, ); const match = resolved.match(symlinkRe); if (match && match.index !== undefined) idx = match.index; @@ -74,7 +74,7 @@ export function resolveCapturesPath(basePath: string): string { if (idx !== -1) { // basePath is inside a worktree — resolve to project root const projectRoot = resolved.slice(0, idx); - return join(projectRoot, ".gsd", CAPTURES_FILENAME); + return join(projectRoot, ".sf", CAPTURES_FILENAME); } return join(sfRoot(basePath), CAPTURES_FILENAME); } @@ -83,7 +83,7 @@ export function resolveCapturesPath(basePath: string): string { /** * Append a new capture entry to CAPTURES.md. - * Creates `.gsd/` and the file if they don't exist. + * Creates `.sf/` and the file if they don't exist. * Returns the generated capture ID. */ export function appendCapture(basePath: string, text: string): string { diff --git a/src/resources/extensions/sf/codebase-generator.ts b/src/resources/extensions/sf/codebase-generator.ts index 9716ee89d..820cc9b2d 100644 --- a/src/resources/extensions/sf/codebase-generator.ts +++ b/src/resources/extensions/sf/codebase-generator.ts @@ -1,7 +1,7 @@ /** * SF Codebase Map Generator * - * Produces .gsd/CODEBASE.md — a structural table of contents for the project. + * Produces .sf/CODEBASE.md — a structural table of contents for the project. * Gives fresh agent contexts instant orientation without filesystem exploration. * * Generation: walk `git ls-files`, group by directory, output with descriptions. @@ -73,7 +73,7 @@ interface EnumeratedFiles { const DEFAULT_EXCLUDES = [ // ── AI / tooling meta ── ".agents/", - ".gsd/", + ".sf/", ".planning/", ".plans/", ".claude/", @@ -567,7 +567,7 @@ export function ensureCodebaseMapFresh( } /** - * Write CODEBASE.md to .gsd/ directory. + * Write CODEBASE.md to .sf/ directory. */ export function writeCodebaseMap(basePath: string, content: string): string { const root = sfRoot(basePath); diff --git a/src/resources/extensions/sf/commands-backlog.ts b/src/resources/extensions/sf/commands-backlog.ts index 338a3fbd8..e46edf25c 100644 --- a/src/resources/extensions/sf/commands-backlog.ts +++ b/src/resources/extensions/sf/commands-backlog.ts @@ -2,7 +2,7 @@ * SF Command — /sf backlog * * Structured backlog management with 999.x numbering. - * Items stored in .gsd/BACKLOG.md as markdown checklist. + * Items stored in .sf/BACKLOG.md as markdown checklist. * Items can be promoted to active slices via add-slice. */ diff --git a/src/resources/extensions/sf/commands-bootstrap.ts b/src/resources/extensions/sf/commands-bootstrap.ts index afe8b8e99..1240294d9 100644 --- a/src/resources/extensions/sf/commands-bootstrap.ts +++ b/src/resources/extensions/sf/commands-bootstrap.ts @@ -32,7 +32,7 @@ const TOP_LEVEL_SUBCOMMANDS = [ { cmd: "forensics", desc: "Examine execution logs" }, { cmd: "init", desc: "Project init wizard" }, { cmd: "setup", desc: "Global setup status and configuration" }, - { cmd: "migrate", desc: "Migrate a v1 .planning directory to .gsd format" }, + { cmd: "migrate", desc: "Migrate a v1 .planning directory to .sf format" }, { cmd: "remote", desc: "Control remote auto-mode" }, { cmd: "steer", desc: "Hard-steer plan documents during execution" }, { cmd: "inspect", desc: "Show SQLite DB diagnostics" }, diff --git a/src/resources/extensions/sf/commands-cmux.ts b/src/resources/extensions/sf/commands-cmux.ts index 52edc08ef..d392d677d 100644 --- a/src/resources/extensions/sf/commands-cmux.ts +++ b/src/resources/extensions/sf/commands-cmux.ts @@ -32,7 +32,7 @@ export function autoEnableCmuxPreferences(): boolean { prefs.version = prefs.version || 1; const frontmatter = serializePreferencesToFrontmatter(prefs); - let body = "\n# SF Skill Preferences\n\nSee `~/.gsd/agent/extensions/sf/docs/preferences-reference.md` for full field documentation and examples.\n"; + let body = "\n# SF Skill Preferences\n\nSee `~/.sf/agent/extensions/sf/docs/preferences-reference.md` for full field documentation and examples.\n"; const preserved = extractBodyAfterFrontmatter(readFileSync(path, "utf-8")); if (preserved) body = preserved; @@ -62,7 +62,7 @@ async function writeProjectCmuxPreferences( prefs.version = prefs.version || 1; const frontmatter = serializePreferencesToFrontmatter(prefs); - let body = "\n# SF Skill Preferences\n\nSee `~/.gsd/agent/extensions/sf/docs/preferences-reference.md` for full field documentation and examples.\n"; + let body = "\n# SF Skill Preferences\n\nSee `~/.sf/agent/extensions/sf/docs/preferences-reference.md` for full field documentation and examples.\n"; if (existsSync(path)) { const preserved = extractBodyAfterFrontmatter(readFileSync(path, "utf-8")); if (preserved) body = preserved; diff --git a/src/resources/extensions/sf/commands-codebase.ts b/src/resources/extensions/sf/commands-codebase.ts index b90286d0a..e31e178a0 100644 --- a/src/resources/extensions/sf/commands-codebase.ts +++ b/src/resources/extensions/sf/commands-codebase.ts @@ -1,7 +1,7 @@ /** * SF Command — /sf codebase * - * Generate and manage the codebase map (.gsd/CODEBASE.md). + * Generate and manage the codebase map (.sf/CODEBASE.md). * Subcommands: generate, update, stats, help */ diff --git a/src/resources/extensions/sf/commands-config.ts b/src/resources/extensions/sf/commands-config.ts index c0e35edd1..c6695df95 100644 --- a/src/resources/extensions/sf/commands-config.ts +++ b/src/resources/extensions/sf/commands-config.ts @@ -34,7 +34,7 @@ function getStoredToolKey(auth: AuthStorage, providerId: string): string | undef */ export function loadToolApiKeys(): void { try { - const authPath = join(process.env.HOME ?? "", ".gsd", "agent", "auth.json"); + const authPath = join(process.env.HOME ?? "", ".sf", "agent", "auth.json"); if (!existsSync(authPath)) return; const auth = AuthStorage.create(authPath); @@ -50,7 +50,7 @@ export function loadToolApiKeys(): void { } export function getConfigAuthStorage(): AuthStorage { - const authPath = join(process.env.HOME ?? "", ".gsd", "agent", "auth.json"); + const authPath = join(process.env.HOME ?? "", ".sf", "agent", "auth.json"); mkdirSync(dirname(authPath), { recursive: true }); return AuthStorage.create(authPath); } diff --git a/src/resources/extensions/sf/commands-extensions.ts b/src/resources/extensions/sf/commands-extensions.ts index ab848daf1..eb56d00c4 100644 --- a/src/resources/extensions/sf/commands-extensions.ts +++ b/src/resources/extensions/sf/commands-extensions.ts @@ -3,7 +3,7 @@ * * Manage the extension registry: list, enable, disable, info. * Self-contained — no imports outside the extensions tree (extensions are loaded - * via jiti at runtime from ~/.gsd/agent/, not compiled by tsc). + * via jiti at runtime from ~/.sf/agent/, not compiled by tsc). */ import type { ExtensionCommandContext } from "@sf-run/pi-coding-agent"; @@ -11,7 +11,7 @@ import { existsSync, mkdirSync, readFileSync, readdirSync, renameSync, writeFile import { dirname, join } from "node:path"; import { homedir } from "node:os"; -const sfHome = process.env.SF_HOME || join(homedir(), ".gsd"); +const sfHome = process.env.SF_HOME || join(homedir(), ".sf"); // ─── Types (mirrored from extension-registry.ts) ──────────────────────────── diff --git a/src/resources/extensions/sf/commands-handlers.ts b/src/resources/extensions/sf/commands-handlers.ts index 554884e3d..69e2ae5ec 100644 --- a/src/resources/extensions/sf/commands-handlers.ts +++ b/src/resources/extensions/sf/commands-handlers.ts @@ -51,7 +51,7 @@ async function fetchLatestVersionForCommand(): Promise<string | null> { } export function dispatchDoctorHeal(pi: ExtensionAPI, scope: string | undefined, reportText: string, structuredIssues: string): void { - const workflowPath = process.env.SF_WORKFLOW_PATH ?? join(process.env.HOME ?? "~", ".gsd", "agent", "SF-WORKFLOW.md"); + const workflowPath = process.env.SF_WORKFLOW_PATH ?? join(process.env.HOME ?? "~", ".sf", "agent", "SF-WORKFLOW.md"); const workflow = readFileSync(workflowPath, "utf-8"); const prompt = loadPrompt("doctor-heal", { doctorSummary: reportText, @@ -187,7 +187,7 @@ export async function handleCapture(args: string, ctx: ExtensionCommandContext): const basePath = process.cwd(); - // Ensure .gsd/ exists — capture should work even without a milestone + // Ensure .sf/ exists — capture should work even without a milestone const sfDir = sfRoot(basePath); if (!existsSync(sfDir)) { mkdirSync(sfDir, { recursive: true }); @@ -238,7 +238,7 @@ export async function handleTriage(ctx: ExtensionCommandContext, pi: ExtensionAP roadmapContext: roadmapContext || "(no active roadmap)", }); - const workflowPath = process.env.SF_WORKFLOW_PATH ?? join(process.env.HOME ?? "~", ".gsd", "agent", "SF-WORKFLOW.md"); + const workflowPath = process.env.SF_WORKFLOW_PATH ?? join(process.env.HOME ?? "~", ".sf", "agent", "SF-WORKFLOW.md"); const workflow = readFileSync(workflowPath, "utf-8"); pi.sendMessage( @@ -270,7 +270,7 @@ export async function handleSteer(change: string, ctx: ExtensionCommandContext, const targetPath = wtPath ?? basePath; await appendOverride(targetPath, change, appliedAt); - const overrideLoc = wtPath ? "worktree `.gsd/OVERRIDES.md`" : "`.gsd/OVERRIDES.md`"; + const overrideLoc = wtPath ? "worktree `.sf/OVERRIDES.md`" : "`.sf/OVERRIDES.md`"; if (isAutoActive()) { pi.sendMessage({ diff --git a/src/resources/extensions/sf/commands-logs.ts b/src/resources/extensions/sf/commands-logs.ts index 09bc650c7..d8c10170e 100644 --- a/src/resources/extensions/sf/commands-logs.ts +++ b/src/resources/extensions/sf/commands-logs.ts @@ -295,7 +295,7 @@ async function handleLogsList(basePath: string, ctx: ExtensionCommandContext): P const lines: string[] = []; if (activities.length > 0) { - lines.push("Activity Logs (.gsd/activity/):"); + lines.push("Activity Logs (.sf/activity/):"); lines.push(" # Unit Type Unit ID Size Age"); lines.push(" " + "─".repeat(70)); @@ -319,7 +319,7 @@ async function handleLogsList(basePath: string, ctx: ExtensionCommandContext): P if (debugLogs.length > 0) { lines.push(""); - lines.push("Debug Logs (.gsd/debug/):"); + lines.push("Debug Logs (.sf/debug/):"); for (let i = 0; i < debugLogs.length; i++) { const d = debugLogs[i]; const size = formatSize(d.size).padStart(7, " "); @@ -424,7 +424,7 @@ async function handleLogsDebug(basePath: string, ctx: ExtensionCommandContext, i if (idx === undefined) { // List debug logs - const lines: string[] = ["Debug Logs (.gsd/debug/):", ""]; + const lines: string[] = ["Debug Logs (.sf/debug/):", ""]; for (let i = 0; i < debugLogs.length; i++) { const d = debugLogs[i]; lines.push(` ${i + 1}. ${d.filename} ${formatSize(d.size)} ${formatAge(d.mtime)}`); diff --git a/src/resources/extensions/sf/commands-maintenance.ts b/src/resources/extensions/sf/commands-maintenance.ts index 8a46a6d4b..3a41b5ebc 100644 --- a/src/resources/extensions/sf/commands-maintenance.ts +++ b/src/resources/extensions/sf/commands-maintenance.ts @@ -247,7 +247,7 @@ export async function handleSkip(unitArg: string, ctx: ExtensionCommandContext, const { existsSync: fileExists, writeFileSync: writeFile, mkdirSync: mkDir, readFileSync: readFile } = await import("node:fs"); const { join: pathJoin } = await import("node:path"); - const completedKeysFile = pathJoin(basePath, ".gsd", "completed-units.json"); + const completedKeysFile = pathJoin(basePath, ".sf", "completed-units.json"); let keys: string[] = []; try { if (fileExists(completedKeysFile)) { @@ -278,7 +278,7 @@ export async function handleSkip(unitArg: string, ctx: ExtensionCommandContext, } keys.push(skipKey); - mkDir(pathJoin(basePath, ".gsd"), { recursive: true }); + mkDir(pathJoin(basePath, ".sf"), { recursive: true }); writeFile(completedKeysFile, JSON.stringify(keys), "utf-8"); ctx.ui.notify(`Skipped: ${skipKey}. Will not be dispatched in auto-mode.`, "success"); diff --git a/src/resources/extensions/sf/commands-mcp-status.ts b/src/resources/extensions/sf/commands-mcp-status.ts index 07b3d81fb..01383eb95 100644 --- a/src/resources/extensions/sf/commands-mcp-status.ts +++ b/src/resources/extensions/sf/commands-mcp-status.ts @@ -68,7 +68,7 @@ function readMcpConfigs(): McpServerRawConfig[] { const seen = new Set<string>(); const configPaths = [ join(process.cwd(), ".mcp.json"), - join(process.cwd(), ".gsd", "mcp.json"), + join(process.cwd(), ".sf", "mcp.json"), ]; for (const configPath of configPaths) { @@ -118,7 +118,7 @@ export function formatMcpStatusReport(servers: McpServerStatus[]): string { return [ "No MCP servers configured.", "", - "Add servers to .mcp.json or .gsd/mcp.json to enable MCP integrations.", + "Add servers to .mcp.json or .sf/mcp.json to enable MCP integrations.", "Tip: run /sf mcp init . to write the local SF workflow MCP config.", "See: https://modelcontextprotocol.io/quickstart", ].join("\n"); diff --git a/src/resources/extensions/sf/commands-pr-branch.ts b/src/resources/extensions/sf/commands-pr-branch.ts index bbb84843a..a11e12e89 100644 --- a/src/resources/extensions/sf/commands-pr-branch.ts +++ b/src/resources/extensions/sf/commands-pr-branch.ts @@ -2,7 +2,7 @@ * SF Command — /sf pr-branch * * Creates a clean PR branch by cherry-picking commits while stripping - * any changes to .gsd/, .planning/, and PLAN.md paths. Useful for + * any changes to .sf/, .planning/, and PLAN.md paths. Useful for * upstream PRs where planning artifacts should not be included. */ @@ -16,7 +16,7 @@ import { nativeBranchExists, } from "./native-git-bridge.js"; -const EXCLUDED_PATHS = [".gsd", ".planning", "PLAN.md"] as const; +const EXCLUDED_PATHS = [".sf", ".planning", "PLAN.md"] as const; function git(basePath: string, args: readonly string[]): string { return execFileSync("git", args, { cwd: basePath, encoding: "utf-8" }).trim(); @@ -63,7 +63,7 @@ function getCodeOnlyCommits(basePath: string, base: string, head: string): strin .split("\n") .filter(Boolean); const hasCodeChanges = files.some( - (f) => !f.startsWith(".gsd/") && !f.startsWith(".planning/") && f !== "PLAN.md", + (f) => !f.startsWith(".sf/") && !f.startsWith(".planning/") && f !== "PLAN.md", ); if (hasCodeChanges) { codeCommits.push(sha); @@ -111,7 +111,7 @@ function assertNoExcludedPaths(basePath: string, base: string): void { .split("\n") .filter(Boolean); const leaked = files.filter( - (f) => f.startsWith(".gsd/") || f.startsWith(".planning/") || f === "PLAN.md", + (f) => f.startsWith(".sf/") || f.startsWith(".planning/") || f === "PLAN.md", ); if (leaked.length > 0) { throw new Error( @@ -146,12 +146,12 @@ export async function handlePrBranch( const commits = getCodeOnlyCommits(basePath, baseRef, "HEAD"); if (commits.length === 0) { - ctx.ui.notify("No code-only commits found (all commits only touch .gsd/ files).", "info"); + ctx.ui.notify("No code-only commits found (all commits only touch .sf/ files).", "info"); return; } if (dryRun) { - const lines = [`Would create PR branch with ${commits.length} commits (filtering .gsd/ paths):\n`]; + const lines = [`Would create PR branch with ${commits.length} commits (filtering .sf/ paths):\n`]; for (const sha of commits) { const msg = git(basePath, ["log", "--format=%s", "-1", sha]); lines.push(` ${sha.slice(0, 8)} ${msg}`); @@ -220,7 +220,7 @@ export async function handlePrBranch( const skippedMsg = skipped > 0 ? ` (${skipped} skipped — contained only planning artifacts)` : ""; ctx.ui.notify( - `Created ${prBranch} with ${picked} commits${skippedMsg} (no .gsd/ artifacts).\nSwitch back: git checkout ${currentBranch}`, + `Created ${prBranch} with ${picked} commits${skippedMsg} (no .sf/ artifacts).\nSwitch back: git checkout ${currentBranch}`, "success", ); } catch (err) { diff --git a/src/resources/extensions/sf/commands-prefs-wizard.ts b/src/resources/extensions/sf/commands-prefs-wizard.ts index 545dec120..ee1a5f436 100644 --- a/src/resources/extensions/sf/commands-prefs-wizard.ts +++ b/src/resources/extensions/sf/commands-prefs-wizard.ts @@ -123,7 +123,7 @@ export async function handleImportClaude(ctx: ExtensionCommandContext, scope: "g const writePrefs = async (prefs: Record<string, unknown>): Promise<void> => { prefs.version = prefs.version || 1; const frontmatter = serializePreferencesToFrontmatter(prefs); - let body = "\n# SF Skill Preferences\n\nSee `~/.gsd/agent/extensions/sf/docs/preferences-reference.md` for full field documentation and examples.\n"; + let body = "\n# SF Skill Preferences\n\nSee `~/.sf/agent/extensions/sf/docs/preferences-reference.md` for full field documentation and examples.\n"; if (existsSync(path)) { const preserved = extractBodyAfterFrontmatter(readFileSync(path, "utf-8")); if (preserved) body = preserved; @@ -145,7 +145,7 @@ export async function handlePrefsMode(ctx: ExtensionCommandContext, scope: "glob prefs.version = prefs.version || 1; const frontmatter = serializePreferencesToFrontmatter(prefs); - let body = "\n# SF Skill Preferences\n\nSee `~/.gsd/agent/extensions/sf/docs/preferences-reference.md` for full field documentation and examples.\n"; + let body = "\n# SF Skill Preferences\n\nSee `~/.sf/agent/extensions/sf/docs/preferences-reference.md` for full field documentation and examples.\n"; if (existsSync(path)) { const preserved = extractBodyAfterFrontmatter(readFileSync(path, "utf-8")); if (preserved) body = preserved; @@ -739,7 +739,7 @@ export async function handlePrefsWizard( const frontmatter = serializePreferencesToFrontmatter(prefs); // Preserve existing body content (everything after closing ---) - let body = "\n# SF Skill Preferences\n\nSee `~/.gsd/agent/extensions/sf/docs/preferences-reference.md` for full field documentation and examples.\n"; + let body = "\n# SF Skill Preferences\n\nSee `~/.sf/agent/extensions/sf/docs/preferences-reference.md` for full field documentation and examples.\n"; if (existsSync(path)) { const preserved = extractBodyAfterFrontmatter(readFileSync(path, "utf-8")); if (preserved) body = preserved; diff --git a/src/resources/extensions/sf/commands/catalog.ts b/src/resources/extensions/sf/commands/catalog.ts index 62805361d..7229bb33a 100644 --- a/src/resources/extensions/sf/commands/catalog.ts +++ b/src/resources/extensions/sf/commands/catalog.ts @@ -5,7 +5,7 @@ import { join } from "node:path"; import { loadRegistry } from "../workflow-templates.js"; import { resolveProjectRoot } from "../worktree.js"; -const sfHome = process.env.SF_HOME || join(homedir(), ".gsd"); +const sfHome = process.env.SF_HOME || join(homedir(), ".sf"); export interface GsdCommandDefinition { cmd: string; @@ -53,9 +53,9 @@ export const TOP_LEVEL_SUBCOMMANDS: readonly GsdCommandDefinition[] = [ { cmd: "doctor", desc: "Runtime health checks with auto-fix" }, { cmd: "logs", desc: "Browse activity logs, debug logs, and metrics" }, { cmd: "forensics", desc: "Examine execution logs" }, - { cmd: "init", desc: "Project init wizard — detect, configure, bootstrap .gsd/" }, + { cmd: "init", desc: "Project init wizard — detect, configure, bootstrap .sf/" }, { cmd: "setup", desc: "Global setup status and configuration" }, - { cmd: "migrate", desc: "Migrate a v1 .planning directory to .gsd format" }, + { cmd: "migrate", desc: "Migrate a v1 .planning directory to .sf format" }, { cmd: "remote", desc: "Control remote auto-mode" }, { cmd: "steer", desc: "Hard-steer plan documents during execution" }, { cmd: "inspect", desc: "Show SQLite DB diagnostics" }, @@ -73,12 +73,12 @@ export const TOP_LEVEL_SUBCOMMANDS: readonly GsdCommandDefinition[] = [ { cmd: "mcp", desc: "MCP server status, connectivity, and local config bootstrap (status, check, init)" }, { cmd: "rethink", desc: "Conversational project reorganization — reorder, park, discard, add milestones" }, { cmd: "workflow", desc: "Custom workflow lifecycle (new, run, list, validate, pause, resume)" }, - { cmd: "codebase", desc: "Generate, refresh, and inspect the codebase map cache (.gsd/CODEBASE.md)" }, + { cmd: "codebase", desc: "Generate, refresh, and inspect the codebase map cache (.sf/CODEBASE.md)" }, { cmd: "ship", desc: "Create PR from milestone artifacts and open for review" }, { cmd: "do", desc: "Route freeform text to the right SF command" }, { cmd: "session-report", desc: "Session cost, tokens, and work summary" }, { cmd: "backlog", desc: "Manage backlog items (add, promote, remove, list)" }, - { cmd: "pr-branch", desc: "Create clean PR branch filtering .gsd/ commits" }, + { cmd: "pr-branch", desc: "Create clean PR branch filtering .sf/ commits" }, { cmd: "add-tests", desc: "Generate tests for completed slices" }, ]; @@ -168,7 +168,7 @@ const NESTED_COMPLETIONS: CompletionMap = { { cmd: "branches", desc: "Remove merged milestone and legacy branches" }, { cmd: "snapshots", desc: "Remove old execution snapshots" }, { cmd: "worktrees", desc: "Remove merged/safe-to-delete worktrees" }, - { cmd: "projects", desc: "Audit orphaned ~/.gsd/projects/ state directories" }, + { cmd: "projects", desc: "Audit orphaned ~/.sf/projects/ state directories" }, { cmd: "projects --fix", desc: "Delete orphaned project state directories (cannot be undone)" }, ], knowledge: [ @@ -258,7 +258,7 @@ const NESTED_COMPLETIONS: CompletionMap = { ], "session-report": [ { cmd: "--json", desc: "Machine-readable JSON output" }, - { cmd: "--save", desc: "Save report to .gsd/reports/" }, + { cmd: "--save", desc: "Save report to .sf/reports/" }, ], backlog: [ { cmd: "add", desc: "Add item to backlog" }, @@ -375,7 +375,7 @@ export function getGsdArgumentCompletions(prefix: string) { // Workflow definition-name completion for `workflow run <name>` and `workflow validate <name>` if (command === "workflow" && (subcommand === "run" || subcommand === "validate") && parts.length <= 3) { try { - const defsDir = join(resolveProjectRoot(process.cwd()), ".gsd", "workflow-defs"); + const defsDir = join(resolveProjectRoot(process.cwd()), ".sf", "workflow-defs"); if (existsSync(defsDir)) { return readdirSync(defsDir) .filter((f) => f.endsWith(".yaml") && f.startsWith(third)) diff --git a/src/resources/extensions/sf/commands/handlers/core.ts b/src/resources/extensions/sf/commands/handlers/core.ts index 124a67350..e09486cd5 100644 --- a/src/resources/extensions/sf/commands/handlers/core.ts +++ b/src/resources/extensions/sf/commands/handlers/core.ts @@ -41,7 +41,7 @@ export function showHelp(ctx: ExtensionCommandContext, args = ""): void { " /sf setup Global setup status [llm|search|remote|keys|prefs]", " /sf model Switch active session model", " /sf prefs Manage preferences", - " /sf doctor Diagnose and repair .gsd/ state", + " /sf doctor Diagnose and repair .sf/ state", "", "Use /sf help full for the complete command reference.", ]; @@ -83,7 +83,7 @@ export function showHelp(ctx: ExtensionCommandContext, args = ""): void { " /sf codebase [generate|update|stats] Manage the CODEBASE.md cache used in prompt context", "", "SETUP & CONFIGURATION", - " /sf init Project init wizard — detect, configure, bootstrap .gsd/", + " /sf init Project init wizard — detect, configure, bootstrap .sf/", " /sf setup Global setup status [llm|search|remote|keys|prefs]", " /sf model Switch active session model [provider/model|model-id]", " /sf mode Set workflow mode (solo/team) [global|project]", @@ -98,10 +98,10 @@ export function showHelp(ctx: ExtensionCommandContext, args = ""): void { " /sf mcp MCP server status and connectivity [status|check <server>|init [dir]]", "", "MAINTENANCE", - " /sf doctor Diagnose and repair .gsd/ state [audit|fix|heal] [scope]", + " /sf doctor Diagnose and repair .sf/ state [audit|fix|heal] [scope]", " /sf export Export milestone/slice results [--json|--markdown|--html] [--all]", " /sf cleanup Remove merged branches or snapshots [branches|snapshots]", - " /sf migrate Migrate .planning/ (v1) to .gsd/ (v2) format", + " /sf migrate Migrate .planning/ (v1) to .sf/ (v2) format", " /sf remote Control remote auto-mode [slack|discord|status|disconnect]", " /sf inspect Show SQLite DB diagnostics (schema, row counts, recent entries)", " /sf update Update SF to the latest version via npm", diff --git a/src/resources/extensions/sf/commands/handlers/workflow.ts b/src/resources/extensions/sf/commands/handlers/workflow.ts index b244f59dd..19342a943 100644 --- a/src/resources/extensions/sf/commands/handlers/workflow.ts +++ b/src/resources/extensions/sf/commands/handlers/workflow.ts @@ -166,7 +166,7 @@ async function handleCustomWorkflow( return true; } const base = projectRoot(); - const defPath = join(base, ".gsd", "workflow-defs", `${defName}.yaml`); + const defPath = join(base, ".sf", "workflow-defs", `${defName}.yaml`); if (!existsSync(defPath)) { ctx.ui.notify(`Definition not found: ${defPath}`, "error"); return true; diff --git a/src/resources/extensions/sf/db-writer.ts b/src/resources/extensions/sf/db-writer.ts index 67c96278f..1ecdb9348 100644 --- a/src/resources/extensions/sf/db-writer.ts +++ b/src/resources/extensions/sf/db-writer.ts @@ -666,8 +666,8 @@ export interface SaveArtifactOpts { /** * Save an artifact to DB and write the corresponding markdown file to disk. - * The path is relative to .gsd/ (e.g. "milestones/M001/slices/S06/tasks/T01-SUMMARY.md"). - * The full file path is computed as basePath + '.gsd/' + path. + * The path is relative to .sf/ (e.g. "milestones/M001/slices/S06/tasks/T01-SUMMARY.md"). + * The full file path is computed as basePath + '.sf/' + path. */ export async function saveArtifactToDb( opts: SaveArtifactOpts, @@ -677,10 +677,10 @@ export async function saveArtifactToDb( const db = await import('./sf-db.js'); // Guard against path traversal before any reads/writes - const sfDir = resolve(basePath, '.gsd'); - const fullPath = resolve(basePath, '.gsd', opts.path); + const sfDir = resolve(basePath, '.sf'); + const fullPath = resolve(basePath, '.sf', opts.path); if (!fullPath.startsWith(sfDir)) { - throw new SFError(SF_IO_ERROR, `saveArtifactToDb: path escapes .gsd/ directory: ${opts.path}`); + throw new SFError(SF_IO_ERROR, `saveArtifactToDb: path escapes .sf/ directory: ${opts.path}`); } // Shrinkage guard: if the file already exists and the new content is diff --git a/src/resources/extensions/sf/definition-loader.ts b/src/resources/extensions/sf/definition-loader.ts index a3cce2528..936eb5d7d 100644 --- a/src/resources/extensions/sf/definition-loader.ts +++ b/src/resources/extensions/sf/definition-loader.ts @@ -1,7 +1,7 @@ /** * definition-loader.ts — Parse and validate V1 YAML workflow definitions. * - * Loads definition YAML files from `.gsd/workflow-defs/`, validates the + * Loads definition YAML files from `.sf/workflow-defs/`, validates the * V1 schema shape, and returns typed TypeScript objects. Pure functions * with no engine or runtime dependencies — just `yaml` and `node:fs`. * diff --git a/src/resources/extensions/sf/detection.ts b/src/resources/extensions/sf/detection.ts index 8a0e20709..8c24d6243 100644 --- a/src/resources/extensions/sf/detection.ts +++ b/src/resources/extensions/sf/detection.ts @@ -11,7 +11,7 @@ import { join } from "node:path"; import { homedir } from "node:os"; import { sfRoot } from "./paths.js"; -const sfHome = process.env.SF_HOME || join(homedir(), ".gsd"); +const sfHome = process.env.SF_HOME || join(homedir(), ".sf"); // ─── Types ────────────────────────────────────────────────────────────────────── @@ -22,7 +22,7 @@ export interface ProjectDetection { /** Is this the first time SF has been used on this machine? */ isFirstEverLaunch: boolean; - /** Does ~/.gsd/ exist with preferences? */ + /** Does ~/.sf/ exist with preferences? */ hasGlobalSetup: boolean; /** v1 details (only when state === 'v1-planning') */ @@ -242,7 +242,7 @@ const TEST_MARKERS = [ /** Directories skipped during bounded recursive project scans. */ const RECURSIVE_SCAN_IGNORED_DIRS = new Set([ ".git", - ".gsd", + ".sf", ".planning", ".plans", ".claude", @@ -716,7 +716,7 @@ function detectVerificationCommands( // ─── Global Setup Detection ───────────────────────────────────────────────────── /** - * Check if global SF setup exists (has ~/.gsd/ with preferences). + * Check if global SF setup exists (has ~/.sf/ with preferences). */ export function hasGlobalSetup(): boolean { return ( @@ -727,7 +727,7 @@ export function hasGlobalSetup(): boolean { /** * Check if this is the very first time SF has been used on this machine. - * Returns true if ~/.gsd/ doesn't exist or has no preferences or auth. + * Returns true if ~/.sf/ doesn't exist or has no preferences or auth. */ export function isFirstEverLaunch(): boolean { if (!existsSync(sfHome)) return true; diff --git a/src/resources/extensions/sf/doctor-engine-checks.ts b/src/resources/extensions/sf/doctor-engine-checks.ts index 5eb2701b3..a32314223 100644 --- a/src/resources/extensions/sf/doctor-engine-checks.ts +++ b/src/resources/extensions/sf/doctor-engine-checks.ts @@ -13,7 +13,7 @@ export async function checkEngineHealth( issues: DoctorIssue[], fixesApplied: string[], ): Promise<void> { - const dbPath = join(basePath, ".gsd", "sf.db"); + const dbPath = join(basePath, ".sf", "sf.db"); if (!isDbAvailable() && existsSync(dbPath)) { issues.push({ @@ -22,7 +22,7 @@ export async function checkEngineHealth( scope: "project", unitId: "project", message: "Database unavailable — using filesystem state derivation (degraded mode). State queries may be slower and less reliable.", - file: ".gsd/sf.db", + file: ".sf/sf.db", fixable: false, }); } @@ -161,7 +161,7 @@ export async function checkEngineHealth( // relative to the event log and re-render them. try { if (isDbAvailable()) { - const eventLogPath = join(basePath, ".gsd", "event-log.jsonl"); + const eventLogPath = join(basePath, ".sf", "event-log.jsonl"); const events = readEvents(eventLogPath); if (events.length > 0) { const lastEventTs = new Date(events[events.length - 1]!.ts).getTime(); diff --git a/src/resources/extensions/sf/doctor-environment.ts b/src/resources/extensions/sf/doctor-environment.ts index c3d126d0d..c0183d0ef 100644 --- a/src/resources/extensions/sf/doctor-environment.ts +++ b/src/resources/extensions/sf/doctor-environment.ts @@ -38,15 +38,15 @@ const CMD_TIMEOUT = 5_000; // ── Helpers ──────────────────────────────────────────────────────────────── /** Worktree sentinel — path segment that marks an auto-worktree directory. */ -const WORKTREE_PATH_SEGMENT = `${join(".gsd", "worktrees")}/`; +const WORKTREE_PATH_SEGMENT = `${join(".sf", "worktrees")}/`; /** - * Resolve the project root when running inside a `.gsd/worktrees/<name>/` + * Resolve the project root when running inside a `.sf/worktrees/<name>/` * auto-worktree. Returns `null` if not in a worktree. * * Detection order: * 1. `SF_WORKTREE` env var (set by the worktree launcher) - * 2. `.gsd/worktrees/` segment in basePath + * 2. `.sf/worktrees/` segment in basePath */ function resolveWorktreeProjectRoot(basePath: string): string | null { const envRoot = process.env.SF_WORKTREE; @@ -56,7 +56,7 @@ function resolveWorktreeProjectRoot(basePath: string): string | null { const idx = normalised.indexOf(WORKTREE_PATH_SEGMENT.replace(/\\/g, "/")); if (idx === -1) return null; - // Everything before `.gsd/worktrees/` is the project root + // Everything before `.sf/worktrees/` is the project root return basePath.slice(0, idx); } diff --git a/src/resources/extensions/sf/doctor-git-checks.ts b/src/resources/extensions/sf/doctor-git-checks.ts index 5eba5131f..a56dcf559 100644 --- a/src/resources/extensions/sf/doctor-git-checks.ts +++ b/src/resources/extensions/sf/doctor-git-checks.ts @@ -16,7 +16,7 @@ import { loadEffectiveSFPreferences } from "./preferences.js"; /** * Returns true if the directory contains only doctor artifacts - * (e.g. `.gsd/doctor-history.jsonl`). These dirs are created by + * (e.g. `.sf/doctor-history.jsonl`). These dirs are created by * appendDoctorHistory() writing to worktree-scoped paths during the audit * and should not be flagged as orphaned worktrees (#3105). */ @@ -25,9 +25,9 @@ function isDoctorArtifactOnly(dirPath: string): boolean { const entries = readdirSync(dirPath); // Empty dir — not a doctor artifact, still orphaned if (entries.length === 0) return false; - // Only a .gsd subdirectory - if (entries.length === 1 && entries[0] === ".gsd") { - const sfEntries = readdirSync(join(dirPath, ".gsd")); + // Only a .sf subdirectory + if (entries.length === 1 && entries[0] === ".sf") { + const sfEntries = readdirSync(join(dirPath, ".sf")); return sfEntries.length <= 1 && sfEntries.every(e => e === "doctor-history.jsonl"); } return false; @@ -319,8 +319,8 @@ export async function checkGitHealth( try { const wtDir = worktreesDir(basePath); if (existsSync(wtDir)) { - // Resolve symlinks and normalize separators so that symlinked .gsd - // paths (e.g. ~/.gsd/projects/<hash>/worktrees/…) match the paths + // Resolve symlinks and normalize separators so that symlinked .sf + // paths (e.g. ~/.sf/projects/<hash>/worktrees/…) match the paths // returned by `git worktree list`. const normalizePath = (p: string): string => { try { p = realpathSync(p); } catch { /* path may not exist */ } @@ -336,7 +336,7 @@ export async function checkGitHealth( } catch { continue; } const normalizedFullPath = normalizePath(fullPath); if (!registeredPaths.has(normalizedFullPath)) { - // Skip directories that only contain doctor artifacts (.gsd/doctor-history.jsonl). + // Skip directories that only contain doctor artifacts (.sf/doctor-history.jsonl). // appendDoctorHistory() can recreate these dirs during the audit itself, // causing a circular false positive (#3105 Bug 1). if (isDoctorArtifactOnly(fullPath)) continue; @@ -413,7 +413,7 @@ export async function checkGitHealth( // ── Worktree lifecycle checks ────────────────────────────────────────── // Check SF-managed worktrees for: merged branches, stale work, dirty - // state, and unpushed commits. Only worktrees under .gsd/worktrees/. + // state, and unpushed commits. Only worktrees under .sf/worktrees/. try { const healthStatuses = getAllWorktreeHealth(basePath); const cwd = process.cwd(); diff --git a/src/resources/extensions/sf/doctor-global-checks.ts b/src/resources/extensions/sf/doctor-global-checks.ts index 2efd79cae..25d13e162 100644 --- a/src/resources/extensions/sf/doctor-global-checks.ts +++ b/src/resources/extensions/sf/doctor-global-checks.ts @@ -5,7 +5,7 @@ import type { DoctorIssue, DoctorIssueCode } from "./doctor-types.js"; import { readRepoMeta, externalProjectsRoot } from "./repo-identity.js"; /** - * Check for orphaned project state directories in ~/.gsd/projects/. + * Check for orphaned project state directories in ~/.sf/projects/. * * A project directory is orphaned when its recorded gitRoot no longer exists * on disk — the repo was deleted, moved, or the external drive was unmounted. diff --git a/src/resources/extensions/sf/doctor-runtime-checks.ts b/src/resources/extensions/sf/doctor-runtime-checks.ts index 4ca045695..b9b9df53d 100644 --- a/src/resources/extensions/sf/doctor-runtime-checks.ts +++ b/src/resources/extensions/sf/doctor-runtime-checks.ts @@ -32,7 +32,7 @@ export async function checkRuntimeHealth( scope: "project", unitId: "project", message: `Stale auto.lock from PID ${lock.pid} (started ${lock.startedAt}, was executing ${lock.unitType} ${lock.unitId}) — process is no longer running`, - file: ".gsd/auto.lock", + file: ".sf/auto.lock", fixable: true, }); @@ -47,7 +47,7 @@ export async function checkRuntimeHealth( } // ── Stranded lock directory ──────────────────────────────────────────── - // proper-lockfile creates a `.gsd.lock/` directory as the OS-level lock + // proper-lockfile creates a `.sf.lock/` directory as the OS-level lock // mechanism. If the process was SIGKILLed or crashed hard, this directory // can remain on disk without any live process holding it. The next session // fails to acquire the lock until the directory is removed (#1245). @@ -95,7 +95,7 @@ export async function checkRuntimeHealth( scope: "project", unitId: status.milestoneId, message: `Stale parallel session for ${status.milestoneId} (PID ${status.pid}, started ${new Date(status.startedAt).toISOString()}, last heartbeat ${new Date(status.lastHeartbeat).toISOString()}) — process is no longer running`, - file: `.gsd/parallel/${status.milestoneId}.status.json`, + file: `.sf/parallel/${status.milestoneId}.status.json`, fixable: true, }); @@ -139,7 +139,7 @@ export async function checkRuntimeHealth( scope: "project", unitId: "project", message: `${orphaned.length} completed-unit key(s) reference missing artifacts: ${orphaned.slice(0, 3).join(", ")}${orphaned.length > 3 ? "..." : ""}`, - file: ".gsd/completed-units.json", + file: ".sf/completed-units.json", fixable: true, }); @@ -176,7 +176,7 @@ export async function checkRuntimeHealth( scope: "project", unitId: "project", message: `hook-state.json has ${Object.keys(state.cycleCounts).length} residual cycle count(s) from a previous session`, - file: ".gsd/hook-state.json", + file: ".sf/hook-state.json", fixable: true, }); @@ -217,7 +217,7 @@ export async function checkRuntimeHealth( scope: "project", unitId: "project", message: `Activity logs: ${files.length} files, ${totalMB.toFixed(1)}MB (thresholds: ${BLOAT_FILE_THRESHOLD} files / ${BLOAT_SIZE_MB}MB)`, - file: ".gsd/activity/", + file: ".sf/activity/", fixable: true, }); @@ -245,7 +245,7 @@ export async function checkRuntimeHealth( scope: "project", unitId: "project", message: "STATE.md is missing — state display will not work", - file: ".gsd/STATE.md", + file: ".sf/STATE.md", fixable: true, }); @@ -279,7 +279,7 @@ export async function checkRuntimeHealth( scope: "project", unitId: "project", message: `STATE.md is stale — shows "${current.phase}" but derived state is "${fresh.phase}"`, - file: ".gsd/STATE.md", + file: ".sf/STATE.md", fixable: true, }); @@ -307,16 +307,16 @@ export async function checkRuntimeHealth( // NOTE: SF_RUNTIME_PATTERNS in gitignore.ts is the canonical source of truth. // This is a minimal subset for the doctor check. const criticalPatterns = [ - ".gsd/activity/", - ".gsd/runtime/", - ".gsd/auto.lock", - ".gsd/sf.db*", - ".gsd/completed-units*.json", - ".gsd/event-log.jsonl", + ".sf/activity/", + ".sf/runtime/", + ".sf/auto.lock", + ".sf/sf.db*", + ".sf/completed-units*.json", + ".sf/event-log.jsonl", ]; - // If blanket .gsd/ or .gsd is present, all patterns are covered - const hasBlanketIgnore = existingLines.has(".gsd/") || existingLines.has(".gsd"); + // If blanket .sf/ or .sf is present, all patterns are covered + const hasBlanketIgnore = existingLines.has(".sf/") || existingLines.has(".sf"); if (!hasBlanketIgnore) { const missing = criticalPatterns.filter(p => !existingLines.has(p)); @@ -344,26 +344,26 @@ export async function checkRuntimeHealth( // ── External state symlink health ────────────────────────────────────── try { - const localSf = join(basePath, ".gsd"); + const localSf = join(basePath, ".sf"); if (existsSync(localSf)) { const stat = lstatSync(localSf); - // Check for .gsd.migrating (failed migration) - const migratingPath = join(basePath, ".gsd.migrating"); + // Check for .sf.migrating (failed migration) + const migratingPath = join(basePath, ".sf.migrating"); if (existsSync(migratingPath)) { issues.push({ severity: "error", code: "failed_migration", scope: "project", unitId: "project", - message: "Found .gsd.migrating — a previous external state migration failed. State may be incomplete.", - file: ".gsd.migrating", + message: "Found .sf.migrating — a previous external state migration failed. State may be incomplete.", + file: ".sf.migrating", fixable: true, }); if (shouldFix("failed_migration")) { if (recoverFailedMigration(basePath)) { - fixesApplied.push("recovered failed migration (.gsd.migrating → .gsd)"); + fixesApplied.push("recovered failed migration (.sf.migrating → .sf)"); } } } @@ -378,8 +378,8 @@ export async function checkRuntimeHealth( code: "broken_symlink", scope: "project", unitId: "project", - message: ".gsd symlink target does not exist. External state directory may have been deleted.", - file: ".gsd", + message: ".sf symlink target does not exist. External state directory may have been deleted.", + file: ".sf", fixable: false, }); } @@ -389,11 +389,11 @@ export async function checkRuntimeHealth( // Non-fatal — external state check failed } - // ── Numbered .gsd collision variants (#2205) ─────────────────────────── - // macOS APFS can create ".gsd 2", ".gsd 3" etc. when a directory blocks - // symlink creation. These must be removed so the canonical .gsd is used. + // ── Numbered .sf collision variants (#2205) ─────────────────────────── + // macOS APFS can create ".sf 2", ".sf 3" etc. when a directory blocks + // symlink creation. These must be removed so the canonical .sf is used. try { - const variantPattern = /^\.gsd \d+$/; + const variantPattern = /^\.sf \d+$/; const entries = readdirSync(basePath); const variants = entries.filter(e => variantPattern.test(e)); if (variants.length > 0) { @@ -412,7 +412,7 @@ export async function checkRuntimeHealth( if (shouldFix("numbered_sf_variant")) { const removed = cleanNumberedGsdVariants(basePath); for (const name of removed) { - fixesApplied.push(`removed numbered .gsd variant: ${name}`); + fixesApplied.push(`removed numbered .sf variant: ${name}`); } } } @@ -434,7 +434,7 @@ export async function checkRuntimeHealth( scope: "project", unitId: "project", message: "metrics.json has an unexpected structure (version !== 1 or units is not an array) — metrics data may be unreliable", - file: ".gsd/metrics.json", + file: ".sf/metrics.json", fixable: false, }); } @@ -445,7 +445,7 @@ export async function checkRuntimeHealth( scope: "project", unitId: "project", message: "metrics.json is not valid JSON — metrics data may be corrupt", - file: ".gsd/metrics.json", + file: ".sf/metrics.json", fixable: false, }); } @@ -473,7 +473,7 @@ export async function checkRuntimeHealth( scope: "project", unitId: "project", message: `metrics.json has ${parsed.units.length} unit entries (${fileSizeMB}MB) — threshold is ${BLOAT_UNITS_THRESHOLD}. Run /sf doctor --fix to prune to the newest 1500 entries.`, - file: ".gsd/metrics.json", + file: ".sf/metrics.json", fixable: true, }); if (shouldFix("metrics_ledger_bloat")) { diff --git a/src/resources/extensions/sf/export.ts b/src/resources/extensions/sf/export.ts index 83b8b267d..053ac5ad7 100644 --- a/src/resources/extensions/sf/export.ts +++ b/src/resources/extensions/sf/export.ts @@ -213,7 +213,7 @@ export async function handleExport(args: string, ctx: ExtensionCommandContext, b phase: data.phase, }); ctx.ui.notify( - `HTML report saved: .gsd/reports/${bn(outPath)}\nOpening in browser...`, + `HTML report saved: .sf/reports/${bn(outPath)}\nOpening in browser...`, "success", ); openInBrowser(outPath); diff --git a/src/resources/extensions/sf/forensics.ts b/src/resources/extensions/sf/forensics.ts index a9565aad3..200daf967 100644 --- a/src/resources/extensions/sf/forensics.ts +++ b/src/resources/extensions/sf/forensics.ts @@ -56,7 +56,7 @@ interface UnitTrace { mtime: number; } -/** Summary of .gsd/activity/ directory metadata. */ +/** Summary of .sf/activity/ directory metadata. */ interface ActivityLogMeta { fileCount: number; totalSizeBytes: number; @@ -65,7 +65,7 @@ interface ActivityLogMeta { } /** - * Summary of .gsd/journal/ data for forensic investigation. + * Summary of .sf/journal/ data for forensic investigation. * * To avoid loading huge journal histories into memory, only the most recent * daily files are fully parsed. Older files are line-counted for totals. @@ -165,7 +165,7 @@ async function writeForensicsDedupPref(ctx: ExtensionCommandContext, enabled: bo const frontmatter = serializePreferencesToFrontmatter(prefs); const raw = existsSync(prefsPath) ? readFileSync(prefsPath, "utf-8") : ""; - let body = "\n# SF Skill Preferences\n\nSee `~/.gsd/agent/extensions/sf/docs/preferences-reference.md` for full field documentation and examples.\n"; + let body = "\n# SF Skill Preferences\n\nSee `~/.sf/agent/extensions/sf/docs/preferences-reference.md` for full field documentation and examples.\n"; const start = raw.startsWith("---\n") ? 4 : raw.startsWith("---\r\n") ? 5 : -1; if (start !== -1) { const closingIdx = raw.indexOf("\n---", start); @@ -237,11 +237,11 @@ export async function handleForensics( const report = await buildForensicReport(basePath); const savedPath = saveForensicReport(basePath, report, problemDescription); - // Derive SF source dir for prompt — fall back to ~/.gsd/agent/extensions/sf/ + // Derive SF source dir for prompt — fall back to ~/.sf/agent/extensions/sf/ // when import.meta.url resolves to the npm-global install path (Windows). let sfSourceDir = dirname(fileURLToPath(import.meta.url)); if (!existsSync(join(sfSourceDir, "prompts"))) { - const sfHome = process.env.SF_HOME || join(homedir(), ".gsd"); + const sfHome = process.env.SF_HOME || join(homedir(), ".sf"); const fallback = join(sfHome, "agent", "extensions", "sf"); if (existsSync(join(fallback, "prompts"))) sfSourceDir = fallback; } @@ -319,7 +319,7 @@ export async function buildForensicReport(basePath: string): Promise<ForensicRep } // 8. SF version — use SF_VERSION env var set by the loader at startup. - // Extensions run from ~/.gsd/agent/extensions/sf/ at runtime, so path-traversal + // Extensions run from ~/.sf/agent/extensions/sf/ at runtime, so path-traversal // from import.meta.url would resolve to ~/package.json (wrong on every system). const sfVersion = process.env.SF_VERSION || "unknown"; diff --git a/src/resources/extensions/sf/git-self-heal.ts b/src/resources/extensions/sf/git-self-heal.ts index 13500dd25..b3abbc87e 100644 --- a/src/resources/extensions/sf/git-self-heal.ts +++ b/src/resources/extensions/sf/git-self-heal.ts @@ -3,7 +3,7 @@ * * Four synchronous functions for recovering from broken git state * during auto-mode operations. Uses only `git reset --hard HEAD` — - * never `git clean` (which would delete untracked .gsd/ dirs). + * never `git clean` (which would delete untracked .sf/ dirs). * * Observability: Each function returns structured results describing * what actions were taken. `formatGitError` maps raw git errors to diff --git a/src/resources/extensions/sf/git-service.ts b/src/resources/extensions/sf/git-service.ts index 098b19cb0..3d0e6bffd 100644 --- a/src/resources/extensions/sf/git-service.ts +++ b/src/resources/extensions/sf/git-service.ts @@ -46,7 +46,7 @@ export interface GitPreferences { push_branches?: boolean; remote?: string; snapshots?: boolean; - /** Deprecated. .gsd/ is managed externally; retained for compatibility. */ + /** Deprecated. .sf/ is managed externally; retained for compatibility. */ commit_docs?: boolean; pre_merge_check?: boolean | string; commit_type?: string; @@ -163,7 +163,7 @@ export function buildTaskCommitMessage(ctx: TaskCommitContext): string { } /** - * Thrown when a slice merge hits code conflicts in non-.gsd files. + * Thrown when a slice merge hits code conflicts in non-.sf files. * The working tree is left in a conflicted state (no reset) so the * caller can dispatch a fix-merge session to resolve it. */ @@ -182,7 +182,7 @@ export class MergeConflictError extends SFError { super( SF_MERGE_CONFLICT, `${strategy === "merge" ? "Merge" : "Squash-merge"} of "${branch}" into "${mainBranch}" ` + - `failed with conflicts in ${conflictedFiles.length} non-.gsd file(s): ${conflictedFiles.join(", ")}`, + `failed with conflicts in ${conflictedFiles.length} non-.sf file(s): ${conflictedFiles.join(", ")}`, ); this.name = "MergeConflictError"; this.conflictedFiles = conflictedFiles; @@ -209,28 +209,28 @@ export interface PreMergeCheckResult { * This array must stay synchronized with it. */ export const RUNTIME_EXCLUSION_PATHS: readonly string[] = [ - ".gsd/activity/", - ".gsd/forensics/", - ".gsd/runtime/", - ".gsd/worktrees/", - ".gsd/parallel/", - ".gsd/auto.lock", - ".gsd/metrics.json", - ".gsd/completed-units*.json", // covers completed-units.json and archived completed-units-{MID}.json - ".gsd/state-manifest.json", - ".gsd/STATE.md", - ".gsd/sf.db*", - ".gsd/journal/", - ".gsd/doctor-history.jsonl", - ".gsd/event-log.jsonl", - ".gsd/DISCUSSION-MANIFEST.json", + ".sf/activity/", + ".sf/forensics/", + ".sf/runtime/", + ".sf/worktrees/", + ".sf/parallel/", + ".sf/auto.lock", + ".sf/metrics.json", + ".sf/completed-units*.json", // covers completed-units.json and archived completed-units-{MID}.json + ".sf/state-manifest.json", + ".sf/STATE.md", + ".sf/sf.db*", + ".sf/journal/", + ".sf/doctor-history.jsonl", + ".sf/event-log.jsonl", + ".sf/DISCUSSION-MANIFEST.json", ]; // ─── Integration Branch Metadata ─────────────────────────────────────────── /** * Path to the milestone metadata file that stores the integration branch. - * Format: .gsd/milestones/<MID>/<MID>-META.json + * Format: .sf/milestones/<MID>/<MID>-META.json */ function milestoneMetaPath(basePath: string, milestoneId: string): string { return join(sfRoot(basePath), "milestones", milestoneId, `${milestoneId}-META.json`); @@ -304,7 +304,7 @@ export function writeIntegrationBranch( existing.integrationBranch = branch; writeFileSync(metaFile, JSON.stringify(existing, null, 2) + "\n", "utf-8"); - // .gsd/ is managed externally (symlinked) — metadata is not committed to git. + // .sf/ is managed externally (symlinked) — metadata is not committed to git. } export type IntegrationBranchResolutionStatus = "recorded" | "fallback" | "missing"; @@ -479,10 +479,10 @@ export class GitServiceImpl { // the git reset HEAD step below would otherwise undo the rm --cached. // // SAFETY: Only untrack the specific RUNTIME paths (activity/, runtime/, - // auto.lock, etc.) — NOT all of .gsd/. If .gsd/milestones/ files were + // auto.lock, etc.) — NOT all of .sf/. If .sf/milestones/ files were // previously tracked, they stay tracked until the milestone completes // and the worktree is torn down. This prevents a mid-execution behavioral - // discontinuity where the first half of a milestone has .gsd/ artifacts + // discontinuity where the first half of a milestone has .sf/ artifacts // committed but the second half doesn't (#1326). if (!this._runtimeFilesCleanedUp) { let cleaned = false; @@ -491,7 +491,7 @@ export class GitServiceImpl { if (removed.length > 0) cleaned = true; } if (cleaned) { - nativeCommit(this.basePath, "chore: untrack .gsd/ runtime files from git index", { allowEmpty: false }); + nativeCommit(this.basePath, "chore: untrack .sf/ runtime files from git index", { allowEmpty: false }); } this._runtimeFilesCleanedUp = true; } @@ -500,14 +500,14 @@ export class GitServiceImpl { // hashed by git. The old approach of `git add -A` followed by unstaging // hangs indefinitely on repos with large untracked artifact trees (#1605). // - // Exclude only RUNTIME paths from staging — not the entire .gsd/ directory. - // When .gsd/milestones/ files are already tracked in the index (projects - // where .gsd/ is not gitignored, or Windows junctions that git sees as + // Exclude only RUNTIME paths from staging — not the entire .sf/ directory. + // When .sf/milestones/ files are already tracked in the index (projects + // where .sf/ is not gitignored, or Windows junctions that git sees as // real directories), they should continue to be committed. Excluding the - // entire .gsd/ directory mid-milestone causes silent commit failure where + // entire .sf/ directory mid-milestone causes silent commit failure where // the second half of a milestone's artifacts are never committed (#1326). // - // If .gsd/ IS in .gitignore (the default for external state projects), + // If .sf/ IS in .gitignore (the default for external state projects), // git add -A already skips it and the exclusions are harmless no-ops. const allExclusions = [...RUNTIME_EXCLUSION_PATHS, ...extraExclusions]; @@ -524,7 +524,7 @@ export class GitServiceImpl { const entries = readdirSync(msDir, { withFileTypes: true }); for (const entry of entries) { if (entry.isDirectory() && entry.name !== milestoneLock) { - allExclusions.push(`.gsd/milestones/${entry.name}/`); + allExclusions.push(`.sf/milestones/${entry.name}/`); } } } catch { @@ -563,7 +563,7 @@ export class GitServiceImpl { * (e.g. pre-switch commits, stop commits, state rebuild commits). * * Returns the commit message on success, or null if nothing to commit. - * @param extraExclusions Additional paths to exclude from staging (e.g. [".gsd/"] for pre-switch commits). + * @param extraExclusions Additional paths to exclude from staging (e.g. [".sf/"] for pre-switch commits). */ autoCommit( unitType: string, @@ -658,8 +658,8 @@ export class GitServiceImpl { // Re-run smartStage so the same RUNTIME_EXCLUSION_PATHS apply. // Snapshot commits used nativeAddTracked (git add -u) which stages - // ALL tracked modifications including .gsd/ state files. Without - // re-staging, those .gsd/ changes leak into the absorbed commit. + // ALL tracked modifications including .sf/ state files. Without + // re-staging, those .sf/ changes leak into the absorbed commit. this.smartStage(); try { diff --git a/src/resources/extensions/sf/gitignore.ts b/src/resources/extensions/sf/gitignore.ts index cb2a6798b..eef35940f 100644 --- a/src/resources/extensions/sf/gitignore.ts +++ b/src/resources/extensions/sf/gitignore.ts @@ -25,29 +25,29 @@ import { GIT_NO_PROMPT_ENV } from "./git-constants.js"; * but retained for backwards compatibility during migration. */ const SF_RUNTIME_PATTERNS = [ - ".gsd/activity/", - ".gsd/forensics/", - ".gsd/runtime/", - ".gsd/worktrees/", - ".gsd/parallel/", - ".gsd/auto.lock", - ".gsd/metrics.json", - ".gsd/completed-units*.json", // covers completed-units.json and archived completed-units-{MID}.json - ".gsd/state-manifest.json", - ".gsd/STATE.md", - ".gsd/sf.db*", - ".gsd/journal/", - ".gsd/doctor-history.jsonl", - ".gsd/event-log.jsonl", - ".gsd/DISCUSSION-MANIFEST.json", - ".gsd/milestones/**/*-CONTINUE.md", - ".gsd/milestones/**/continue.md", + ".sf/activity/", + ".sf/forensics/", + ".sf/runtime/", + ".sf/worktrees/", + ".sf/parallel/", + ".sf/auto.lock", + ".sf/metrics.json", + ".sf/completed-units*.json", // covers completed-units.json and archived completed-units-{MID}.json + ".sf/state-manifest.json", + ".sf/STATE.md", + ".sf/sf.db*", + ".sf/journal/", + ".sf/doctor-history.jsonl", + ".sf/event-log.jsonl", + ".sf/DISCUSSION-MANIFEST.json", + ".sf/milestones/**/*-CONTINUE.md", + ".sf/milestones/**/continue.md", ] as const; const BASELINE_PATTERNS = [ // ── SF state directory (symlink to external storage) ── - ".gsd", - ".gsd-id", + ".sf", + ".sf-id", ".bg-shell/", // ── OS junk ── @@ -93,22 +93,22 @@ const BASELINE_PATTERNS = [ ]; /** - * Check whether `.gsd` is covered by the project's `.gitignore`. + * Check whether `.sf` is covered by the project's `.gitignore`. * * Uses `git check-ignore` for accurate evaluation — this respects nested * .gitignore files, global gitignore, and negation patterns. Returns true - * only when git would actually ignore `.gsd/`. + * only when git would actually ignore `.sf/`. * * Returns false (not ignored) if: * - No `.gitignore` exists - * - `.gsd` is not listed in any active ignore rule + * - `.sf` is not listed in any active ignore rule * - Not a git repo or git is unavailable */ export function isGsdGitignored(basePath: string): boolean { - // Check both `.gsd` and `.gsd/` because `.gsd/` in .gitignore (trailing + // Check both `.sf` and `.sf/` because `.sf/` in .gitignore (trailing // slash = directory-only pattern) only matches the directory form. Using // both paths covers all gitignore pattern variants. - for (const path of [".gsd", ".gsd/"]) { + for (const path of [".sf", ".sf/"]) { try { // git check-ignore exits 0 when the path IS ignored, 1 when it is NOT. execFileSync("git", ["check-ignore", "-q", path], { @@ -116,7 +116,7 @@ export function isGsdGitignored(basePath: string): boolean { stdio: "pipe", env: GIT_NO_PROMPT_ENV, }); - return true; // exit 0 → .gsd is ignored + return true; // exit 0 → .sf is ignored } catch { // exit 1 → this form is NOT ignored, try the other } @@ -125,21 +125,21 @@ export function isGsdGitignored(basePath: string): boolean { } /** - * Check whether `.gsd/` contains files tracked by git. - * If so, the project intentionally keeps `.gsd/` in version control - * and we must NOT add `.gsd` to `.gitignore` or attempt migration. + * Check whether `.sf/` contains files tracked by git. + * If so, the project intentionally keeps `.sf/` in version control + * and we must NOT add `.sf` to `.gitignore` or attempt migration. * - * Returns true if git tracks at least one file under `.gsd/`. + * Returns true if git tracks at least one file under `.sf/`. * Returns false (safe to ignore) if: * - Not a git repo - * - `.gsd/` is a symlink (external state, should be ignored) - * - `.gsd/` doesn't exist - * - No tracked files found under `.gsd/` + * - `.sf/` is a symlink (external state, should be ignored) + * - `.sf/` doesn't exist + * - No tracked files found under `.sf/` */ export function hasGitTrackedGsdFiles(basePath: string): boolean { - const localSf = join(basePath, ".gsd"); + const localSf = join(basePath, ".sf"); - // If .gsd doesn't exist or is already a symlink, no tracked files concern + // If .sf doesn't exist or is already a symlink, no tracked files concern if (!existsSync(localSf)) return false; try { if (lstatSync(localSf).isSymbolicLink()) return false; @@ -147,9 +147,9 @@ export function hasGitTrackedGsdFiles(basePath: string): boolean { return false; } - // Check if git tracks any files under .gsd/ + // Check if git tracks any files under .sf/ try { - const tracked = nativeLsFiles(basePath, ".gsd"); + const tracked = nativeLsFiles(basePath, ".sf"); if (tracked.length > 0) return true; // nativeLsFiles swallows git failures and returns []. An empty result @@ -174,9 +174,9 @@ export function hasGitTrackedGsdFiles(basePath: string): boolean { * Creates the file if missing; appends missing patterns. * Returns true if the file was created or modified, false if already complete. * - * **Safety check:** If `.gsd/` contains git-tracked files (i.e., the project - * intentionally keeps `.gsd/` in version control), the `.gsd` ignore pattern - * is excluded to prevent data loss. Only the `.gsd` pattern is affected — + * **Safety check:** If `.sf/` contains git-tracked files (i.e., the project + * intentionally keeps `.sf/` in version control), the `.sf` ignore pattern + * is excluded to prevent data loss. Only the `.sf` pattern is affected — * all other baseline patterns are still applied normally. */ export function ensureGitignore( @@ -201,11 +201,11 @@ export function ensureGitignore( .filter((l) => l && !l.startsWith("#")), ); - // Determine which patterns to apply. If .gsd/ has tracked files, - // exclude the ".gsd" pattern to prevent deleting tracked state. + // Determine which patterns to apply. If .sf/ has tracked files, + // exclude the ".sf" pattern to prevent deleting tracked state. const sfIsTracked = hasGitTrackedGsdFiles(basePath); const patternsToApply = sfIsTracked - ? BASELINE_PATTERNS.filter((p) => p !== ".gsd") + ? BASELINE_PATTERNS.filter((p) => p !== ".sf") : BASELINE_PATTERNS; // Find patterns not yet present @@ -238,7 +238,7 @@ export function ensureGitignore( * * Note: These are strictly runtime/ephemeral paths (activity logs, lock files, * metrics, STATE.md). They are always safe to untrack, even when the project - * intentionally keeps other `.gsd/` files (like PROJECT.md, milestones/) in + * intentionally keeps other `.sf/` files (like PROJECT.md, milestones/) in * version control. */ export function untrackRuntimeFiles(basePath: string): void { @@ -256,7 +256,7 @@ export function untrackRuntimeFiles(basePath: string): void { } /** - * Ensure basePath/.gsd/PREFERENCES.md exists as an empty template. + * Ensure basePath/.sf/PREFERENCES.md exists as an empty template. * Creates the file with frontmatter only if it doesn't exist. * Returns true if created, false if already exists. * @@ -287,7 +287,7 @@ auto_supervisor: {} Project-specific guidance for skill selection and execution preferences. -See \`~/.gsd/agent/extensions/sf/docs/preferences-reference.md\` for full field documentation and examples. +See \`~/.sf/agent/extensions/sf/docs/preferences-reference.md\` for full field documentation and examples. ## Fields diff --git a/src/resources/extensions/sf/graph-context.ts b/src/resources/extensions/sf/graph-context.ts index 46adac16c..c3b51ca7b 100644 --- a/src/resources/extensions/sf/graph-context.ts +++ b/src/resources/extensions/sf/graph-context.ts @@ -56,7 +56,7 @@ export interface GraphSubgraphOptions { function readGraphFile(projectDir: string): GraphFileShape | null { try { - const graphPath = join(projectDir, ".gsd", "graphs", "graph.json"); + const graphPath = join(projectDir, ".sf", "graphs", "graph.json"); const raw = readFileSync(graphPath, "utf-8"); const parsed = JSON.parse(raw) as Partial<GraphFileShape>; const nodes = Array.isArray(parsed.nodes) ? parsed.nodes : []; @@ -193,7 +193,7 @@ export async function inlineGraphSubgraph( const sections: string[] = [ `### Knowledge Graph Context (term: "${term}")`, - `Source: \`.gsd/graphs/graph.json\``, + `Source: \`.sf/graphs/graph.json\``, staleAnnotation, "", `**Nodes (${result.nodes.length}):**`, diff --git a/src/resources/extensions/sf/guided-flow-queue.ts b/src/resources/extensions/sf/guided-flow-queue.ts index 5055b2914..35aae2e5f 100644 --- a/src/resources/extensions/sf/guided-flow-queue.ts +++ b/src/resources/extensions/sf/guided-flow-queue.ts @@ -45,7 +45,7 @@ export async function showQueue( pi: ExtensionAPI, basePath: string, ): Promise<void> { - // ── Ensure .gsd/ exists ───────────────────────────────────────────── + // ── Ensure .sf/ exists ───────────────────────────────────────────── const sf = sfRoot(basePath); if (!existsSync(sf)) { ctx.ui.notify("No SF project found. Run /sf to start one first.", "warning"); @@ -139,9 +139,9 @@ export async function handleQueueReorder( syncProjectMdSequence(basePath, state.registry, result.order); // Commit the change - const filesToAdd = [".gsd/QUEUE-ORDER.json", ".gsd/PROJECT.md"]; + const filesToAdd = [".sf/QUEUE-ORDER.json", ".sf/PROJECT.md"]; for (const r of result.depsToRemove) { - filesToAdd.push(`.gsd/milestones/${r.milestone}/${r.milestone}-CONTEXT.md`); + filesToAdd.push(`.sf/milestones/${r.milestone}/${r.milestone}-CONTEXT.md`); } try { nativeAddPaths(basePath, filesToAdd); @@ -200,7 +200,7 @@ export async function showQueueAdd( preamble, existingMilestonesContext: existingContext, inlinedTemplates: queueInlinedTemplates, - commitInstruction: "Do not commit planning artifacts — .gsd/ is managed externally.", + commitInstruction: "Do not commit planning artifacts — .sf/ is managed externally.", }); pi.sendMessage( diff --git a/src/resources/extensions/sf/guided-flow.ts b/src/resources/extensions/sf/guided-flow.ts index 12ef80e4f..c5565fc3b 100644 --- a/src/resources/extensions/sf/guided-flow.ts +++ b/src/resources/extensions/sf/guided-flow.ts @@ -116,9 +116,9 @@ function runPlanningFlowGate( // ─── Commit Instruction Helpers ────────────────────────────────────────────── -/** Build commit instruction for planning prompts. .gsd/ is managed externally and always gitignored. */ +/** Build commit instruction for planning prompts. .sf/ is managed externally and always gitignored. */ function buildDocsCommitInstruction(_message: string): string { - return "Do not commit planning artifacts — .gsd/ is managed externally."; + return "Do not commit planning artifacts — .sf/ is managed externally."; } // ─── Auto-start after discuss ───────────────────────────────────────────────── @@ -375,7 +375,7 @@ async function dispatchWorkflow( }); } - const workflowPath = process.env.SF_WORKFLOW_PATH ?? join(process.env.HOME ?? "~", ".gsd", "agent", "SF-WORKFLOW.md"); + const workflowPath = process.env.SF_WORKFLOW_PATH ?? join(process.env.HOME ?? "~", ".sf", "agent", "SF-WORKFLOW.md"); const workflow = readFileSync(workflowPath, "utf-8"); pi.sendMessage( @@ -453,7 +453,7 @@ function resolveAvailableModel<T extends { id: string; provider: string }>( * Used by all three "new milestone" paths (first ever, no active, all complete). */ function buildDiscussPrompt(nextId: string, preamble: string, _basePath: string, pi: ExtensionAPI, ctx: ExtensionCommandContext, preparationContext?: string): string { - const milestoneRel = `.gsd/milestones/${nextId}`; + const milestoneRel = `.sf/milestones/${nextId}`; const structuredQuestionsAvailable = getStructuredQuestionsAvailability(pi, ctx); const inlinedTemplates = [ inlineTemplate("project", "Project"), @@ -480,7 +480,7 @@ function buildDiscussPrompt(nextId: string, preamble: string, _basePath: string, * Uses the discuss-headless prompt template with seed context injected. */ function buildHeadlessDiscussPrompt(nextId: string, seedContext: string, _basePath: string): string { - const milestoneRel = `.gsd/milestones/${nextId}`; + const milestoneRel = `.sf/milestones/${nextId}`; const inlinedTemplates = [ inlineTemplate("project", "Project"), inlineTemplate("requirements", "Requirements"), @@ -553,8 +553,8 @@ async function prepareAndBuildDiscussPrompt( } /** - * Bootstrap a .gsd/ project from scratch for headless use. - * Ensures git repo, .gsd/ structure, gitignore, and preferences all exist. + * Bootstrap a .sf/ project from scratch for headless use. + * Ensures git repo, .sf/ structure, gitignore, and preferences all exist. */ function bootstrapGsdProject(basePath: string): void { if (!nativeIsRepo(basePath) || isInheritedRepo(basePath)) { @@ -585,7 +585,7 @@ export async function showHeadlessMilestoneCreation( // Clear stale reservations from previous cancelled sessions (#2488) clearReservedMilestoneIds(); - // Ensure .gsd/ is bootstrapped + // Ensure .sf/ is bootstrapped bootstrapGsdProject(basePath); // Generate next milestone ID @@ -683,7 +683,7 @@ async function buildDiscussSlicePrompt( ? `## Inlined Context (preloaded — do not re-read these files)\n\n${inlined.join("\n\n---\n\n")}` : `## Inlined Context\n\n_(no context files found yet — go in blind and ask broad questions)_`; - const sliceDirPath = `.gsd/milestones/${mid}/slices/${sid}`; + const sliceDirPath = `.sf/milestones/${mid}/slices/${sid}`; const sliceContextPath = `${sliceDirPath}/${sid}-CONTEXT.md`; // When re-discussing, inject a preamble so the agent treats this as an update interview @@ -716,7 +716,7 @@ export async function showDiscuss( pi: ExtensionAPI, basePath: string, ): Promise<void> { - // Guard: no .gsd/ project + // Guard: no .sf/ project if (!existsSync(sfRoot(basePath))) { ctx.ui.notify("No SF project found. Run /sf to start one first.", "warning"); return; @@ -1255,8 +1255,8 @@ export async function showWorkflowEntry( } // ── Detection preamble — run before any bootstrap ──────────────────── - // Check bootstrap completeness, not just .gsd/ directory existence. - // A zombie .gsd/ state (symlink exists but missing PREFERENCES.md and + // Check bootstrap completeness, not just .sf/ directory existence. + // A zombie .sf/ state (symlink exists but missing PREFERENCES.md and // milestones/) must trigger the init wizard, not skip it (#2942). const sfPath = sfRoot(basePath); const hasBootstrapArtifacts = existsSync(sfPath) @@ -1278,17 +1278,17 @@ export async function showWorkflowEntry( // "fresh" — fall through to init wizard } - // No .gsd/ or zombie .gsd/ — run the project init wizard + // No .sf/ or zombie .sf/ — run the project init wizard const result = await showProjectInit(ctx, pi, basePath, detection); if (!result.completed) return; // User cancelled - // Init wizard bootstrapped .gsd/ — fall through to the normal flow below + // Init wizard bootstrapped .sf/ — fall through to the normal flow below // which will detect "no milestones" and start the discuss prompt } // ── Ensure git repo exists — SF needs it for worktree isolation ────── // Also handle inherited repos: if basePath is a subdirectory of another - // git repo that has no .gsd, create a fresh repo to prevent cross-project + // git repo that has no .sf, create a fresh repo to prevent cross-project // state leaks (#1639). if (!nativeIsRepo(basePath) || isInheritedRepo(basePath)) { const mainBranch = loadEffectiveSFPreferences()?.preferences?.git?.main_branch || "main"; @@ -1407,7 +1407,7 @@ export async function showWorkflowEntry( // First ever — skip wizard, just ask directly pendingAutoStartMap.set(basePath, { ctx, pi, basePath, milestoneId: nextId, step: stepMode, createdAt: Date.now() }); await dispatchWorkflow(pi, await prepareAndBuildDiscussPrompt(ctx, pi, nextId, - `New project, milestone ${nextId}. Do NOT read or explore .gsd/ — it's empty scaffolding.`, + `New project, milestone ${nextId}. Do NOT read or explore .sf/ — it's empty scaffolding.`, basePath ), "sf-run", ctx, "discuss-milestone"); } else { diff --git a/src/resources/extensions/sf/init-wizard.ts b/src/resources/extensions/sf/init-wizard.ts index 66f2cf937..5d06d718a 100644 --- a/src/resources/extensions/sf/init-wizard.ts +++ b/src/resources/extensions/sf/init-wizard.ts @@ -1,9 +1,9 @@ /** * SF Init Wizard — Per-project onboarding. * - * Guides users through project setup when entering a directory without .gsd/. + * Guides users through project setup when entering a directory without .sf/. * Detects project ecosystem, offers v1 migration, configures project preferences, - * bootstraps .gsd/ structure, and transitions to the first milestone discussion. + * bootstraps .sf/ structure, and transitions to the first milestone discussion. */ import type { ExtensionAPI, ExtensionCommandContext } from "@sf-run/pi-coding-agent"; @@ -23,7 +23,7 @@ import { generateCodebaseMap, writeCodebaseMap } from "./codebase-generator.js"; interface InitWizardResult { /** Whether the wizard completed (vs cancelled) */ completed: boolean; - /** Whether .gsd/ was created */ + /** Whether .sf/ was created */ bootstrapped: boolean; } @@ -55,7 +55,7 @@ const DEFAULT_PREFS: ProjectPreferences = { /** * Run the project init wizard. - * Called when entering a directory without .gsd/ (or via /sf init). + * Called when entering a directory without .sf/ (or via /sf init). */ export async function showProjectInit( ctx: ExtensionCommandContext, @@ -232,7 +232,7 @@ export async function showProjectInit( // Non-fatal — skill installation failure should never block project init } - // ── Step 9: Bootstrap .gsd/ ──────────────────────────────────────────────── + // ── Step 9: Bootstrap .sf/ ──────────────────────────────────────────────── bootstrapGsdDirectory(basePath, prefs, signals); // Initialize SQLite database so SF starts in full-capability mode (#3880). @@ -311,13 +311,13 @@ export async function offerMigration( { id: "migrate", label: "Migrate to SF v2", - description: "Convert .planning/ to .gsd/ format", + description: "Convert .planning/ to .sf/ format", recommended: true, }, { id: "fresh", label: "Start fresh", - description: "Ignore .planning/ and create new .gsd/", + description: "Ignore .planning/ and create new .sf/", }, ], notYetMessage: "Run /sf init when ready.", @@ -330,7 +330,7 @@ export async function offerMigration( // ─── Re-init Handler ──────────────────────────────────────────────────────────── /** - * Handle /sf init when .gsd/ already exists. + * Handle /sf init when .sf/ already exists. * Offers preference reset without destructive milestone deletion. */ export async function handleReinit( @@ -526,7 +526,7 @@ function buildPreferencesFile(prefs: ProjectPreferences): string { lines.push(""); lines.push("Generated by `/sf init`. Edit directly or use `/sf prefs project` to modify."); lines.push(""); - lines.push("See `~/.gsd/agent/extensions/sf/docs/preferences-reference.md` for full field documentation."); + lines.push("See `~/.sf/agent/extensions/sf/docs/preferences-reference.md` for full field documentation."); lines.push(""); return lines.join("\n"); diff --git a/src/resources/extensions/sf/journal.ts b/src/resources/extensions/sf/journal.ts index 8e46d1075..50fb4136a 100644 --- a/src/resources/extensions/sf/journal.ts +++ b/src/resources/extensions/sf/journal.ts @@ -1,7 +1,7 @@ /** * SF Event Journal — structured JSONL event log for auto-mode iterations. * - * Writes daily-rotated JSONL files to `.gsd/journal/YYYY-MM-DD.jsonl`. + * Writes daily-rotated JSONL files to `.sf/journal/YYYY-MM-DD.jsonl`. * Zero imports from `auto/` — depends only on node:fs, node:path, and paths.ts. * * Observability: diff --git a/src/resources/extensions/sf/key-manager.ts b/src/resources/extensions/sf/key-manager.ts index ad806f9f2..a354ef1cb 100644 --- a/src/resources/extensions/sf/key-manager.ts +++ b/src/resources/extensions/sf/key-manager.ts @@ -114,7 +114,7 @@ export function describeCredential(cred: AuthCredential): string { * Get the auth.json path. */ export function getAuthPath(): string { - return join(process.env.HOME ?? "~", ".gsd", "agent", "auth.json"); + return join(process.env.HOME ?? "~", ".sf", "agent", "auth.json"); } /** diff --git a/src/resources/extensions/sf/learning/fallback-chain-writer.mjs b/src/resources/extensions/sf/learning/fallback-chain-writer.mjs index e2f03be02..cb85b1d72 100644 --- a/src/resources/extensions/sf/learning/fallback-chain-writer.mjs +++ b/src/resources/extensions/sf/learning/fallback-chain-writer.mjs @@ -1,7 +1,7 @@ /** * sf-learning: fallback-chain writer * - * Writes per-unit-type runtime fallback chains into `~/.gsd/agent/settings.json` + * Writes per-unit-type runtime fallback chains into `~/.sf/agent/settings.json` * under `fallback.chains.*`, so pi-ai's `FallbackResolver` has ONE entry per * active unit type to walk when a dispatch hits a 429 or other retryable * failure. Without this, the resolver reads an empty `chains` object and @@ -10,10 +10,10 @@ * * ## Why this lives in the plugin, not in preferences.md * - * `~/.gsd/preferences.md` tells sf which model to START a unit with — it + * `~/.sf/preferences.md` tells sf which model to START a unit with — it * feeds `before_model_select`, which this plugin already intercepts. But * once dispatch begins and the LLM call 429s, pi-ai's retry path reads - * `~/.gsd/agent/settings.json` → `fallback.chains` directly via + * `~/.sf/agent/settings.json` → `fallback.chains` directly via * `SettingsManager.getFallbackSettings()`. Those two configs are separate * pipelines. preferences.md never reaches the retry walker. * @@ -76,7 +76,7 @@ const NEUTRAL_PRIOR_SCORE = 50; const PRIORITY_STEP = 10; const DEFAULT_CHAIN_NAME = "default"; const MAIN_CHAIN_NAME = "main"; -const PROJECT_SETTINGS_SUBPATH = ".gsd/agent/settings.json"; +const PROJECT_SETTINGS_SUBPATH = ".sf/agent/settings.json"; /** * Compute blended ranking for a single unit type across every model we @@ -254,7 +254,7 @@ function rankedToEntries(ranked, bareIdIndex) { /** * Read settings.json, merge in new fallback chains, and atomically replace. * - * @param {string} settingsPath - absolute path to ~/.gsd/agent/settings.json + * @param {string} settingsPath - absolute path to ~/.sf/agent/settings.json * @param {Record<string, Array>} chainsByName - map of chain name → entries */ function writeSettingsWithChains(settingsPath, chainsByName) { @@ -342,12 +342,12 @@ function resolveCanonicalPath(pathValue) { } /** - * Check for a project-level `.gsd/agent/settings.json` in `cwd`. + * Check for a project-level `.sf/agent/settings.json` in `cwd`. * pi-ai's settings manager deep-merges project settings over global, * so a project-level `fallback` block silently neutralizes the chains * this plugin writes globally (combatant finding #4). * - * Bails out early when `cwd/.gsd/agent/settings.json` resolves to the same + * Bails out early when `cwd/.sf/agent/settings.json` resolves to the same * canonical path as the global settings file — i.e. when sf is invoked * from `$HOME` and the "project-level" probe aliases the global file. * Without this guard, the plugin warns about its own writes shadowing @@ -391,7 +391,7 @@ function detectProjectSettingsShadow(cwd, globalSettingsPath, log) { * unit types (used when the current model isn't in any unit-specific * chain — e.g. the user overrode the model via `/sf model`). * - * Also checks for a project-level `.gsd/agent/settings.json` that might + * Also checks for a project-level `.sf/agent/settings.json` that might * silently shadow the global chains via pi-ai's deep-merge, and warns * via `deps.opts.log` when one is found. * diff --git a/src/resources/extensions/sf/learning/fallback-chain-writer.test.mjs b/src/resources/extensions/sf/learning/fallback-chain-writer.test.mjs index d66ad4044..a2e95a0cf 100644 --- a/src/resources/extensions/sf/learning/fallback-chain-writer.test.mjs +++ b/src/resources/extensions/sf/learning/fallback-chain-writer.test.mjs @@ -228,10 +228,10 @@ test("writeFallbackChains logs a warning when enabledModels is missing or empty" } }); -test("writeFallbackChains warns via log when project-level .gsd/agent/settings.json shadows fallback", () => { - // Create a fake project cwd with a .gsd/agent/settings.json containing a fallback block. +test("writeFallbackChains warns via log when project-level .sf/agent/settings.json shadows fallback", () => { + // Create a fake project cwd with a .sf/agent/settings.json containing a fallback block. const projectDir = mkdtempSync(join(tmpdir(), "sf-proj-")); - const projectSettingsDir = join(projectDir, ".gsd", "agent"); + const projectSettingsDir = join(projectDir, ".sf", "agent"); mkdirSync(projectSettingsDir, { recursive: true }); const projectSettingsPath = join(projectSettingsDir, "settings.json"); writeFileSync(projectSettingsPath, JSON.stringify({ fallback: { enabled: true, chains: {} } })); @@ -335,15 +335,15 @@ test("hardcoded main chain coexists with blender-computed per-unit-type chains", test("writeFallbackChains does NOT warn when cwd is the parent of the global settings file (false-positive guard)", () => { // Regression: when sf is invoked from $HOME, detectProjectSettingsShadow - // used to probe `$HOME/.gsd/agent/settings.json` — which IS the global + // used to probe `$HOME/.sf/agent/settings.json` — which IS the global // settings file itself. It then warned that the global file was shadowing // its own write. Surfaced 2026-04-15 in notifications.jsonl as - // "WARNING: project-level settings.json at /home/mhugo/.gsd/agent/settings.json". + // "WARNING: project-level settings.json at /home/mhugo/.sf/agent/settings.json". // // Fix: detectProjectSettingsShadow compares the resolved project path to // the global settingsPath and bails early when they match. const fakeHome = mkdtempSync(join(tmpdir(), "sf-fakehome-")); - const globalSettingsDir = join(fakeHome, ".gsd", "agent"); + const globalSettingsDir = join(fakeHome, ".sf", "agent"); mkdirSync(globalSettingsDir, { recursive: true }); const globalSettingsPath = join(globalSettingsDir, "settings.json"); writeFileSync( @@ -378,7 +378,7 @@ test("writeFallbackChains does NOT warn when cwd is the parent of the global set test("writeFallbackChains does NOT warn when project settings has no fallback block", () => { const projectDir = mkdtempSync(join(tmpdir(), "sf-proj-")); - const projectSettingsDir = join(projectDir, ".gsd", "agent"); + const projectSettingsDir = join(projectDir, ".sf", "agent"); mkdirSync(projectSettingsDir, { recursive: true }); writeFileSync(join(projectSettingsDir, "settings.json"), JSON.stringify({ defaultProvider: "kimi-coding" })); diff --git a/src/resources/extensions/sf/learning/index.mjs b/src/resources/extensions/sf/learning/index.mjs index 3bdd074a5..774d6a7d9 100644 --- a/src/resources/extensions/sf/learning/index.mjs +++ b/src/resources/extensions/sf/learning/index.mjs @@ -13,7 +13,7 @@ * * import { init } from "./index.mjs"; * const plugin = await init(pi, { - * dbPath: "~/.gsd/sf-learning.db", + * dbPath: "~/.sf/sf-learning.db", * priorsPath: "./src/data/model-benchmarks.json", * weightsPath: "./src/data/unit-weights.json", * nPrior: 10, @@ -55,7 +55,7 @@ import { writeFallbackChains } from "./fallback-chain-writer.mjs"; const MODULE_DIRECTORY = dirname(fileURLToPath(import.meta.url)); const SCHEMA_PATH = resolve(MODULE_DIRECTORY, "outcome-schema.sql"); -const DEFAULT_DB_PATH = "~/.gsd/sf-learning.db"; +const DEFAULT_DB_PATH = "~/.sf/sf-learning.db"; const DEFAULT_N_PRIOR = 10; const DEFAULT_ROLLING_DAYS = 30; const DEFAULT_EXPLORATION_C = 1.4; @@ -63,7 +63,7 @@ const HOME_REGEX = /^~(?=$|\/)/; /** * @typedef {Object} PluginConfig - * @property {string} [dbPath] - default: ~/.gsd/sf-learning.db + * @property {string} [dbPath] - default: ~/.sf/sf-learning.db * @property {string} [priorsPath] - default: <plugin>/data/model-benchmarks.json * @property {string} [weightsPath] - default: <plugin>/data/unit-weights.json * @property {number} [nPrior=10] @@ -266,7 +266,7 @@ export async function init(pi, config = {}) { } // Regenerate pi-ai runtime fallback chains (read by FallbackResolver). - // Writes ~/.gsd/agent/settings.json → fallback.chains.* atomically. + // Writes ~/.sf/agent/settings.json → fallback.chains.* atomically. // Failure is logged but never blocks plugin init — stale chains are // still better than a broken plugin. let fallbackWriteSummary = null; diff --git a/src/resources/extensions/sf/markdown-renderer.ts b/src/resources/extensions/sf/markdown-renderer.ts index 136c8c910..e96c1c967 100644 --- a/src/resources/extensions/sf/markdown-renderer.ts +++ b/src/resources/extensions/sf/markdown-renderer.ts @@ -42,8 +42,8 @@ import { clearPathCache } from "./paths.js"; // ─── Helpers ────────────────────────────────────────────────────────────── /** - * Convert an absolute file path to a .gsd-relative artifact path. - * E.g. "/project/.gsd/milestones/M001/M001-ROADMAP.md" → "milestones/M001/M001-ROADMAP.md" + * Convert an absolute file path to a .sf-relative artifact path. + * E.g. "/project/.sf/milestones/M001/M001-ROADMAP.md" → "milestones/M001/M001-ROADMAP.md" */ function toArtifactPath(absPath: string, basePath: string): string { const root = sfRoot(basePath); diff --git a/src/resources/extensions/sf/md-importer.ts b/src/resources/extensions/sf/md-importer.ts index d3ee50325..b669e91f1 100644 --- a/src/resources/extensions/sf/md-importer.ts +++ b/src/resources/extensions/sf/md-importer.ts @@ -1,5 +1,5 @@ // SF Markdown Importer -// Parses DECISIONS.md, REQUIREMENTS.md, and hierarchy artifacts from a .gsd/ tree, +// Parses DECISIONS.md, REQUIREMENTS.md, and hierarchy artifacts from a .sf/ tree, // then upserts everything into the SQLite database. // // Exports: parseDecisionsTable, parseRequirementsSections, migrateFromMarkdown @@ -311,7 +311,7 @@ const SLICE_SUFFIXES = ['PLAN', 'SUMMARY', 'RESEARCH', 'CONTEXT', 'ASSESSMENT', const TASK_SUFFIXES = ['PLAN', 'SUMMARY', 'CONTINUE', 'CONTEXT', 'RESEARCH']; /** - * Import hierarchy artifacts (roadmaps, plans, summaries, etc.) from the .gsd/ tree. + * Import hierarchy artifacts (roadmaps, plans, summaries, etc.) from the .sf/ tree. * Walks milestones → slices → tasks directories. */ function importHierarchyArtifacts(sfDir: string): number { @@ -494,7 +494,7 @@ function findFileByPrefixAndSuffix(dir: string, idPrefix: string, suffix: string // ─── Hierarchy Migration (milestones/slices/tasks from roadmaps+plans) ──── /** - * Walk .gsd/milestones/ dirs, parse roadmaps and plans, and populate + * Walk .sf/milestones/ dirs, parse roadmaps and plans, and populate * the milestones/slices/tasks DB tables. * * - Milestone title: from roadmap H1 (e.g. "# M001: Title") or CONTEXT.md @@ -690,7 +690,7 @@ export function migrateHierarchyToDb(basePath: string): { // ─── Orchestrator ────────────────────────────────────────────────────────── /** - * Import all markdown artifacts from a .gsd/ directory into the database. + * Import all markdown artifacts from a .sf/ directory into the database. * Opens the DB if not already open. Wraps all imports in a single transaction. * Returns counts of imported items for logging. * diff --git a/src/resources/extensions/sf/metrics.ts b/src/resources/extensions/sf/metrics.ts index 31cfd39de..d3b044bf6 100644 --- a/src/resources/extensions/sf/metrics.ts +++ b/src/resources/extensions/sf/metrics.ts @@ -3,7 +3,7 @@ * * Accumulates per-unit usage data across auto-mode sessions. * Data is extracted from session entries before each context wipe, - * written to .gsd/metrics.json, and surfaced in the dashboard. + * written to .sf/metrics.json, and surfaced in the dashboard. * * Data flow: * 1. Before newSession() wipes context, snapshotUnitMetrics() scans diff --git a/src/resources/extensions/sf/migrate-external.ts b/src/resources/extensions/sf/migrate-external.ts index 22f214c73..d8ff67f32 100644 --- a/src/resources/extensions/sf/migrate-external.ts +++ b/src/resources/extensions/sf/migrate-external.ts @@ -1,8 +1,8 @@ /** * SF External State Migration * - * Migrates legacy in-project `.gsd/` directories to the external - * `~/.gsd/projects/<hash>/` state directory. After migration, a + * Migrates legacy in-project `.sf/` directories to the external + * `~/.sf/projects/<hash>/` state directory. After migration, a * symlink replaces the original directory so all paths remain valid. */ @@ -20,29 +20,29 @@ export interface MigrationResult { } /** - * Migrate a legacy in-project `.gsd/` directory to external storage. + * Migrate a legacy in-project `.sf/` directory to external storage. * * Algorithm: - * 1. If `<project>/.gsd` is a symlink or doesn't exist -> skip - * 2. If `<project>/.gsd` is a real directory: + * 1. If `<project>/.sf` is a symlink or doesn't exist -> skip + * 2. If `<project>/.sf` is a real directory: * a. Compute external path from repoIdentity * b. mkdir -p external dir - * c. Rename `.gsd` -> `.gsd.migrating` (atomic on same FS, acts as lock) + * c. Rename `.sf` -> `.sf.migrating` (atomic on same FS, acts as lock) * d. Copy contents to external dir (skip `worktrees/` subdirectory) - * e. Create symlink `.gsd -> external path` - * f. Remove `.gsd.migrating` - * 3. On failure: rename `.gsd.migrating` back to `.gsd` (rollback) + * e. Create symlink `.sf -> external path` + * f. Remove `.sf.migrating` + * 3. On failure: rename `.sf.migrating` back to `.sf` (rollback) */ export function migrateToExternalState(basePath: string): MigrationResult { - // Worktrees get their .gsd via syncSfStateToWorktree(), not migration. + // Worktrees get their .sf via syncSfStateToWorktree(), not migration. // Migration inside a worktree would compute the same external hash as the // main repo (externalGsdRoot hashes remoteUrl + gitRoot), creating a broken - // junction and orphaning .gsd.migrating (#2970). + // junction and orphaning .sf.migrating (#2970). if (isInsideWorktree(basePath)) { return { migrated: false }; } - const localSf = join(basePath, ".gsd"); + const localSf = join(basePath, ".sf"); // Skip if doesn't exist if (!existsSync(localSf)) { @@ -56,19 +56,19 @@ export function migrateToExternalState(basePath: string): MigrationResult { return { migrated: false }; } if (!stat.isDirectory()) { - return { migrated: false, error: ".gsd exists but is not a directory or symlink" }; + return { migrated: false, error: ".sf exists but is not a directory or symlink" }; } } catch (err) { - return { migrated: false, error: `Cannot stat .gsd: ${getErrorMessage(err)}` }; + return { migrated: false, error: `Cannot stat .sf: ${getErrorMessage(err)}` }; } - // Skip if .gsd/ contains git-tracked files — the project intentionally - // keeps .gsd/ in version control and migration would destroy that. + // Skip if .sf/ contains git-tracked files — the project intentionally + // keeps .sf/ in version control and migration would destroy that. if (hasGitTrackedGsdFiles(basePath)) { return { migrated: false }; } - // Skip if .gsd/worktrees/ has active worktree directories (#1337). + // Skip if .sf/worktrees/ has active worktree directories (#1337). // On Windows, active git worktrees hold OS-level directory handles that // prevent rename/delete. Attempting migration causes EBUSY and data loss. const worktreesDir = join(localSf, "worktrees"); @@ -85,13 +85,13 @@ export function migrateToExternalState(basePath: string): MigrationResult { } const externalPath = externalGsdRoot(basePath); - const migratingPath = join(basePath, ".gsd.migrating"); + const migratingPath = join(basePath, ".sf.migrating"); try { // mkdir -p the external dir mkdirSync(externalPath, { recursive: true }); - // Rename .gsd -> .gsd.migrating (atomic lock). + // Rename .sf -> .sf.migrating (atomic lock). // On Windows, NTFS may reject rename with EPERM if file descriptors are // open (VS Code watchers, antivirus on-access scan). Fall back to // copy+delete (#1292). @@ -129,7 +129,7 @@ export function migrateToExternalState(basePath: string): MigrationResult { } } - // Create symlink .gsd -> external path + // Create symlink .sf -> external path symlinkSync(externalPath, localSf, "junction"); // Verify the symlink resolves correctly before removing the backup (#1377). @@ -154,12 +154,12 @@ export function migrateToExternalState(basePath: string): MigrationResult { return { migrated: false, error: `Migration verification failed: ${getErrorMessage(verifyErr)}` }; } - // Clean the git index — any .gsd/* files tracked before migration now + // Clean the git index — any .sf/* files tracked before migration now // sit behind the symlink and git can't follow it, causing them to show // as deleted. Remove them from the index so the working tree stays clean. - // --ignore-unmatch makes this a no-op on fresh projects with no tracked .gsd/. + // --ignore-unmatch makes this a no-op on fresh projects with no tracked .sf/. try { - execFileSync("git", ["rm", "-r", "--cached", "--ignore-unmatch", ".gsd"], { + execFileSync("git", ["rm", "-r", "--cached", "--ignore-unmatch", ".sf"], { cwd: basePath, stdio: ["ignore", "pipe", "ignore"], env: GIT_NO_PROMPT_ENV, @@ -169,18 +169,18 @@ export function migrateToExternalState(basePath: string): MigrationResult { // Non-fatal — git may be unavailable or nothing was tracked } - // Remove .gsd.migrating only after symlink is verified and index is clean + // Remove .sf.migrating only after symlink is verified and index is clean rmSync(migratingPath, { recursive: true, force: true }); return { migrated: true }; } catch (err) { - // Rollback: rename .gsd.migrating back to .gsd + // Rollback: rename .sf.migrating back to .sf try { if (existsSync(migratingPath) && !existsSync(localSf)) { renameSync(migratingPath, localSf); } } catch { - // Rollback failed -- leave .gsd.migrating for doctor to detect + // Rollback failed -- leave .sf.migrating for doctor to detect } return { @@ -191,12 +191,12 @@ export function migrateToExternalState(basePath: string): MigrationResult { } /** - * Recover from a failed migration (`.gsd.migrating` exists). - * Moves `.gsd.migrating` back to `.gsd` if `.gsd` doesn't exist. + * Recover from a failed migration (`.sf.migrating` exists). + * Moves `.sf.migrating` back to `.sf` if `.sf` doesn't exist. */ export function recoverFailedMigration(basePath: string): boolean { - const localSf = join(basePath, ".gsd"); - const migratingPath = join(basePath, ".gsd.migrating"); + const localSf = join(basePath, ".sf"); + const migratingPath = join(basePath, ".sf.migrating"); if (!existsSync(migratingPath)) return false; if (existsSync(localSf)) return false; // both exist -- ambiguous, don't touch diff --git a/src/resources/extensions/sf/migrate/command.ts b/src/resources/extensions/sf/migrate/command.ts index 4e742c3be..aba7ff422 100644 --- a/src/resources/extensions/sf/migrate/command.ts +++ b/src/resources/extensions/sf/migrate/command.ts @@ -1,5 +1,5 @@ /** - * /sf migrate — one-shot migration from .planning to .gsd + * /sf migrate — one-shot migration from .planning to .sf * * Thin UX orchestrator: resolves paths, runs the validate → parse → transform → * preview → write pipeline, and shows confirmation UI via showNextAction. @@ -98,7 +98,7 @@ export async function handleMigrate( if (!existsSync(sourcePath)) { ctx.ui.notify( `Directory not found: ${sourcePath}\n\n` + - 'Migration converts a .planning/ directory (from older SF versions) into .gsd/ format.\n' + + 'Migration converts a .planning/ directory (from older SF versions) into .sf/ format.\n' + 'If you are starting a new project, use /sf:new-project instead.\n' + 'If migrating, ensure the path contains a .planning/ directory.', "error", @@ -148,7 +148,7 @@ export async function handleMigrate( const targetGsdExists = existsSync(sfRoot(process.cwd())); if (targetGsdExists) { lines.push(""); - lines.push("⚠ A .gsd directory already exists in the current working directory — it will be overwritten."); + lines.push("⚠ A .sf directory already exists in the current working directory — it will be overwritten."); } // ── Confirmation via showNextAction ──────────────────────────────────────── @@ -158,8 +158,8 @@ export async function handleMigrate( actions: [ { id: "confirm", - label: "Write .gsd directory", - description: `Migrate ${preview.milestoneCount} milestone(s) to ${process.cwd()}/.gsd`, + label: "Write .sf directory", + description: `Migrate ${preview.milestoneCount} milestone(s) to ${process.cwd()}/.sf`, recommended: true, }, { @@ -177,13 +177,13 @@ export async function handleMigrate( } // ── Write ────────────────────────────────────────────────────────────────── - ctx.ui.notify("Writing .gsd directory…", "info"); + ctx.ui.notify("Writing .sf directory…", "info"); const result = await writeSFDirectory(project, process.cwd()); const sfPath = sfRoot(process.cwd()); ctx.ui.notify( - `✓ Migration complete — ${result.paths.length} file(s) written to .gsd/`, + `✓ Migration complete — ${result.paths.length} file(s) written to .sf/`, "info", ); @@ -191,7 +191,7 @@ export async function handleMigrate( const reviewChoice = await showNextAction(ctx, { title: "Migration written", summary: [ - `${result.paths.length} files written to .gsd/`, + `${result.paths.length} files written to .sf/`, "", "The agent can now review the migrated output against SF-2 standards —", "checking structure, content quality, deriveState() round-trip, and", @@ -201,7 +201,7 @@ export async function handleMigrate( { id: "review", label: "Review migration", - description: "Agent audits the .gsd output and reports PASS/FAIL per category", + description: "Agent audits the .sf output and reports PASS/FAIL per category", recommended: true, }, { @@ -210,7 +210,7 @@ export async function handleMigrate( description: "Trust the migration output as-is", }, ], - notYetMessage: "Run /sf migrate again to re-migrate, or review .gsd manually.", + notYetMessage: "Run /sf migrate again to re-migrate, or review .sf manually.", }); if (reviewChoice === "review") { diff --git a/src/resources/extensions/sf/migrate/writer.ts b/src/resources/extensions/sf/migrate/writer.ts index 8a0fc7c6a..cc8bcd769 100644 --- a/src/resources/extensions/sf/migrate/writer.ts +++ b/src/resources/extensions/sf/migrate/writer.ts @@ -1,7 +1,7 @@ // SF Directory Writer — Format Functions & Directory Orchestrator // Format functions: pure string-returning functions that serialize SF types into the exact markdown // format that SF-2's parsers expect (parseRoadmap, parsePlan, parseSummary, parseRequirementCounts). -// writeSFDirectory: orchestrator that writes a complete .gsd directory tree from a SFProject. +// writeSFDirectory: orchestrator that writes a complete .sf directory tree from a SFProject. import { join } from 'node:path'; import { saveFile } from '../files.js'; @@ -412,7 +412,7 @@ export function formatState(milestones: SFMilestone[]): string { // ─── Directory Writer Orchestrator ───────────────────────────────────────── /** - * Write a complete .gsd directory tree from a SFProject. + * Write a complete .sf directory tree from a SFProject. * Iterates milestones → slices → tasks, calls format functions, * and writes each file via saveFile(). Returns a manifest of written paths. * diff --git a/src/resources/extensions/sf/milestone-ids.ts b/src/resources/extensions/sf/milestone-ids.ts index 41d0173d3..beb15cd48 100644 --- a/src/resources/extensions/sf/milestone-ids.ts +++ b/src/resources/extensions/sf/milestone-ids.ts @@ -129,7 +129,7 @@ export function findMilestoneIds(basePath: string): string[] { } catch (err) { // Log why milestone scanning failed — silent [] here causes infinite loops (#456) if (existsSync(dir)) { - logWarning("engine", `findMilestoneIds: .gsd/milestones/ exists but readdirSync failed — ${getErrorMessage(err)}`); + logWarning("engine", `findMilestoneIds: .sf/milestones/ exists but readdirSync failed — ${getErrorMessage(err)}`); } return []; } diff --git a/src/resources/extensions/sf/native-git-bridge.ts b/src/resources/extensions/sf/native-git-bridge.ts index c218c27c9..a33b7e209 100644 --- a/src/resources/extensions/sf/native-git-bridge.ts +++ b/src/resources/extensions/sf/native-git-bridge.ts @@ -723,7 +723,7 @@ export function nativeAddAllWithExclusions(basePath: string, exclusions: readonl if (stderr.includes("ignored by one of your .gitignore files")) { return; } - // When .gsd is a symlink, git rejects `:!.gsd/...` pathspecs with + // When .sf is a symlink, git rejects `:!.sf/...` pathspecs with // "beyond a symbolic link". Fall back to `git add -u` which only // stages changes to already-tracked files — O(tracked) not O(filesystem). // Using `git add -A` here would traverse the entire working tree, @@ -877,7 +877,7 @@ export function nativeMergeSquash(basePath: string, branch: string): GitMergeRes stderr.includes("overwritten by merge") ) { // Extract filenames from git stderr so callers can report which files - // are dirty instead of generically blaming .gsd/ (#2151). + // are dirty instead of generically blaming .sf/ (#2151). // Git lists them as tab-indented lines between the "would be overwritten" // header and the "Please commit" footer. const dirtyFiles = stderr diff --git a/src/resources/extensions/sf/native-parser-bridge.ts b/src/resources/extensions/sf/native-parser-bridge.ts index 1f15ab277..e44b7c8d4 100644 --- a/src/resources/extensions/sf/native-parser-bridge.ts +++ b/src/resources/extensions/sf/native-parser-bridge.ts @@ -121,7 +121,7 @@ export interface BatchParsedFile { } /** - * Batch-parse all .md files in a .gsd/ directory tree using the native parser. + * Batch-parse all .md files in a .sf/ directory tree using the native parser. * Returns null if native module unavailable. */ export function nativeBatchParseGsdFiles(directory: string): BatchParsedFile[] | null { @@ -154,7 +154,7 @@ export interface GsdTreeEntry { } /** - * Native-backed directory tree scan of a .gsd/ directory. + * Native-backed directory tree scan of a .sf/ directory. * Returns a flat list of all entries, or null if native module unavailable. */ export function nativeScanGsdTree(directory: string): GsdTreeEntry[] | null { diff --git a/src/resources/extensions/sf/notification-store.ts b/src/resources/extensions/sf/notification-store.ts index ef803357e..941e385ad 100644 --- a/src/resources/extensions/sf/notification-store.ts +++ b/src/resources/extensions/sf/notification-store.ts @@ -1,6 +1,6 @@ // SF Extension — Persistent Notification Store // Captures all ctx.ui.notify() calls and workflow-logger warnings to -// .gsd/notifications.jsonl so they survive context resets and session restarts. +// .sf/notifications.jsonl so they survive context resets and session restarts. // Rotates at MAX_ENTRIES to prevent unbounded growth. import { appendFileSync, existsSync, mkdirSync, openSync, closeSync, readFileSync, renameSync, unlinkSync, writeFileSync } from "node:fs"; @@ -85,7 +85,7 @@ export function appendNotification( }; try { - const dir = join(_basePath, ".gsd"); + const dir = join(_basePath, ".sf"); mkdirSync(dir, { recursive: true }); appendFileSync(join(dir, FILENAME), JSON.stringify(entry) + "\n", "utf-8"); _lineCount++; @@ -223,7 +223,7 @@ export function _resetNotificationStore(): void { // ─── Internal ─────────────────────────────────────────────────────────── function _readEntriesFromDisk(basePath: string): NotificationEntry[] { - const filePath = join(basePath, ".gsd", FILENAME); + const filePath = join(basePath, ".sf", FILENAME); if (!existsSync(filePath)) return []; try { const content = readFileSync(filePath, "utf-8"); @@ -276,7 +276,7 @@ function _emitChange(): void { * Must be called inside _withLock for cross-process safety. */ function _atomicWrite(basePath: string, content: string): void { - const dir = join(basePath, ".gsd"); + const dir = join(basePath, ".sf"); mkdirSync(dir, { recursive: true }); const target = join(dir, FILENAME); const tmp = target + ".tmp." + process.pid; @@ -291,14 +291,14 @@ function _atomicWrite(basePath: string, content: string): void { * to avoid deadlocking the UI on a stale lock. */ function _withLock<T>(basePath: string, fn: () => T): T { - const lockPath = join(basePath, ".gsd", LOCKFILE); + const lockPath = join(basePath, ".sf", LOCKFILE); let fd: number | null = null; const maxAttempts = 5; const retryMs = 20; for (let i = 0; i < maxAttempts; i++) { try { - mkdirSync(join(basePath, ".gsd"), { recursive: true }); + mkdirSync(join(basePath, ".sf"), { recursive: true }); fd = openSync(lockPath, "wx"); break; } catch (err: any) { diff --git a/src/resources/extensions/sf/parallel-merge.ts b/src/resources/extensions/sf/parallel-merge.ts index 97454f73a..9ac3a874a 100644 --- a/src/resources/extensions/sf/parallel-merge.ts +++ b/src/resources/extensions/sf/parallel-merge.ts @@ -38,7 +38,7 @@ export type MergeOrder = "sequential" | "by-completion"; * Returns true when milestones.status = 'complete' in the worktree's sf.db. */ export function isMilestoneCompleteInWorktreeDb(basePath: string, mid: string): boolean { - const dbPath = join(basePath, ".gsd", "worktrees", mid, ".gsd", "sf.db"); + const dbPath = join(basePath, ".sf", "worktrees", mid, ".sf", "sf.db"); if (!existsSync(dbPath)) return false; try { @@ -56,11 +56,11 @@ export function isMilestoneCompleteInWorktreeDb(basePath: string, mid: string): /** * Discover milestone IDs with status='complete' in their worktree DB, - * scanning .gsd/worktrees/<MID>/.gsd/sf.db for each worktree directory. + * scanning .sf/worktrees/<MID>/.sf/sf.db for each worktree directory. */ function discoverDbCompletedMilestones(basePath: string): Set<string> { const completed = new Set<string>(); - const worktreeDir = join(basePath, ".gsd", "worktrees"); + const worktreeDir = join(basePath, ".sf", "worktrees"); try { for (const entry of readdirSync(worktreeDir)) { if (entry.startsWith("M") && isMilestoneCompleteInWorktreeDb(basePath, entry)) { @@ -115,7 +115,7 @@ export function determineMergeOrder( title: mid, pid: 0, process: null, - worktreePath: basePath ? join(basePath, ".gsd", "worktrees", mid) : "", + worktreePath: basePath ? join(basePath, ".sf", "worktrees", mid) : "", startedAt: 0, state: "stopped", cost: 0, diff --git a/src/resources/extensions/sf/parallel-monitor-overlay.ts b/src/resources/extensions/sf/parallel-monitor-overlay.ts index 9ba4c747d..a130a07a4 100644 --- a/src/resources/extensions/sf/parallel-monitor-overlay.ts +++ b/src/resources/extensions/sf/parallel-monitor-overlay.ts @@ -98,8 +98,8 @@ function tailRead(filePath: string, maxBytes: number): string { } function discoverWorkers(basePath: string): string[] { - const parallelDir = join(basePath, ".gsd", "parallel"); - const worktreeDir = join(basePath, ".gsd", "worktrees"); + const parallelDir = join(basePath, ".sf", "parallel"); + const worktreeDir = join(basePath, ".sf", "worktrees"); const mids = new Set<string>(); if (existsSync(parallelDir)) { @@ -115,7 +115,7 @@ function discoverWorkers(basePath: string): string[] { if (existsSync(worktreeDir)) { try { for (const d of readdirSync(worktreeDir)) { - if (d.startsWith("M") && existsSync(join(worktreeDir, d, ".gsd", "auto.lock"))) { + if (d.startsWith("M") && existsSync(join(worktreeDir, d, ".sf", "auto.lock"))) { mids.add(d); } } @@ -126,7 +126,7 @@ function discoverWorkers(basePath: string): string[] { } function querySliceProgress(basePath: string, mid: string): SliceProgress[] { - const dbPath = join(basePath, ".gsd", "worktrees", mid, ".gsd", "sf.db"); + const dbPath = join(basePath, ".sf", "worktrees", mid, ".sf", "sf.db"); if (!existsSync(dbPath)) return []; try { @@ -144,7 +144,7 @@ function querySliceProgress(basePath: string, mid: string): SliceProgress[] { } function extractCostFromNdjson(basePath: string, mid: string): number { - const stdoutPath = join(basePath, ".gsd", "parallel", `${mid}.stdout.log`); + const stdoutPath = join(basePath, ".sf", "parallel", `${mid}.stdout.log`); if (!existsSync(stdoutPath)) return 0; try { const content = readFileSync(stdoutPath, "utf-8"); @@ -166,7 +166,7 @@ function extractCostFromNdjson(basePath: string, mid: string): number { } function queryRecentCompletions(basePath: string, mid: string): string[] { - const dbPath = join(basePath, ".gsd", "worktrees", mid, ".gsd", "sf.db"); + const dbPath = join(basePath, ".sf", "worktrees", mid, ".sf", "sf.db"); if (!existsSync(dbPath)) return []; try { const sql = `SELECT id, slice_id, one_liner FROM tasks WHERE milestone_id='${mid}' AND status='complete' AND completed_at IS NOT NULL ORDER BY completed_at DESC LIMIT 5`; @@ -184,12 +184,12 @@ function queryRecentCompletions(basePath: string, mid: string): string[] { function collectWorkerData(basePath: string): WorkerView[] { const mids = discoverWorkers(basePath); - const parallelDir = join(basePath, ".gsd", "parallel"); + const parallelDir = join(basePath, ".sf", "parallel"); const workers: WorkerView[] = []; for (const mid of mids) { const status = readJsonSafe<StatusJson>(join(parallelDir, `${mid}.status.json`)); - const lock = readJsonSafe<AutoLock>(join(basePath, ".gsd", "worktrees", mid, ".gsd", "auto.lock")); + const lock = readJsonSafe<AutoLock>(join(basePath, ".sf", "worktrees", mid, ".sf", "auto.lock")); const slices = querySliceProgress(basePath, mid); const pid = lock?.pid || status?.pid || 0; diff --git a/src/resources/extensions/sf/parallel-orchestrator.ts b/src/resources/extensions/sf/parallel-orchestrator.ts index 0769c3c52..da66422ae 100644 --- a/src/resources/extensions/sf/parallel-orchestrator.ts +++ b/src/resources/extensions/sf/parallel-orchestrator.ts @@ -101,7 +101,7 @@ function stateFilePath(basePath: string): string { } /** - * Persist the current orchestrator state to .gsd/orchestrator.json. + * Persist the current orchestrator state to .sf/orchestrator.json. * Uses atomic write (tmp + rename) to prevent partial reads. */ export function persistState(basePath: string): void { @@ -158,7 +158,7 @@ function isPidAlive(pid: number): boolean { } /** - * Restore orchestrator state from .gsd/orchestrator.json. + * Restore orchestrator state from .sf/orchestrator.json. * Checks PID liveness for each worker: * - Living PID → state "running", process stays null (no handle) * - Dead PID → removed from restored state @@ -251,7 +251,7 @@ function restoreRuntimeState(basePath: string): boolean { // Fallback: rebuild coordinator state from live session status files. // This covers cases where orchestrator.json is missing/corrupt but workers are - // still running and writing heartbeats under .gsd/parallel/. + // still running and writing heartbeats under .sf/parallel/. cleanupStaleSessions(basePath); const statuses = readAllSessionStatuses(basePath); if (statuses.length === 0) { @@ -548,7 +548,7 @@ function createMilestoneWorktree(basePath: string, milestoneId: string): string // Run post-create hook if configured runWorktreePostCreateHook(basePath, info.path); - // Copy .gsd/ planning artifacts (milestones, CONTEXT, ROADMAP, etc.) from the + // Copy .sf/ planning artifacts (milestones, CONTEXT, ROADMAP, etc.) from the // project root into the worktree. Without this, workers for newly-planned // milestones can't find their roadmap and exit immediately (#2184 Bug 4). syncSfStateToWorktree(basePath, info.path); @@ -594,8 +594,8 @@ export function spawnWorker( SF_MILESTONE_LOCK: milestoneId, // Pass the real project root so workers don't need to re-derive it. // Without this, process.cwd() resolves symlinks and the worktree - // path heuristic can match the user-level ~/.gsd instead of the - // project .gsd, causing writes to ~ and corrupting user config. + // path heuristic can match the user-level ~/.sf instead of the + // project .sf, causing writes to ~ and corrupting user config. SF_PROJECT_ROOT: basePath, // Prevent workers from spawning their own parallel sessions SF_PARALLEL_WORKER: "1", diff --git a/src/resources/extensions/sf/paths.ts b/src/resources/extensions/sf/paths.ts index 755924e0d..370844d26 100644 --- a/src/resources/extensions/sf/paths.ts +++ b/src/resources/extensions/sf/paths.ts @@ -21,7 +21,7 @@ const dirEntryCache = new Map<string, Dirent[]>(); const dirListCache = new Map<string, string[]>(); // ─── Native Tree Cache ──────────────────────────────────────────────────────── -// When the native module is available, scan the entire .gsd/ tree in one call +// When the native module is available, scan the entire .sf/ tree in one call // and serve directory listings from memory instead of individual readdirSync calls. let nativeTreeCache: Map<string, GsdTreeEntry[]> | null = null; @@ -62,7 +62,7 @@ function cachedReaddirWithTypes(dirPath: string): Dirent[] { const cached = dirEntryCache.get(dirPath); if (cached) return cached; - // Try native tree cache for paths under .gsd/ + // Try native tree cache for paths under .sf/ if (nativeTreeBase) { const key = nativeTreeKey(dirPath, nativeTreeBase); if (key && nativeTreeCache) { @@ -104,7 +104,7 @@ function cachedReaddir(dirPath: string): string[] { const cached = dirListCache.get(dirPath); if (cached) return cached; - // Try native tree cache for paths under .gsd/ + // Try native tree cache for paths under .sf/ if (nativeTreeBase) { const key = nativeTreeKey(dirPath, nativeTreeBase); if (key && nativeTreeCache) { @@ -292,13 +292,13 @@ export function _clearGsdRootCache(): void { } /** - * Resolve the `.gsd` directory for a given project base path. + * Resolve the `.sf` directory for a given project base path. * * Probe order: - * 1. basePath/.gsd — fast path (common case) + * 1. basePath/.sf — fast path (common case) * 2. git rev-parse root — handles cwd-is-a-subdirectory - * 3. Walk up from basePath — handles moved .gsd in an ancestor (bounded by git root) - * 4. basePath/.gsd — creation fallback (init scenario) + * 3. Walk up from basePath — handles moved .sf in an ancestor (bounded by git root) + * 4. basePath/.sf — creation fallback (init scenario) * * Result is cached per basePath for the process lifetime. */ @@ -314,23 +314,23 @@ export function sfRoot(basePath: string): string { export const gsdRoot = sfRoot; /** - * Detect if a path is inside a .gsd/worktrees/<name>/ structure. + * Detect if a path is inside a .sf/worktrees/<name>/ structure. * - * SF auto-worktrees live at <project>/.gsd/worktrees/<milestoneId>/. + * SF auto-worktrees live at <project>/.sf/worktrees/<milestoneId>/. * When sfRoot() is called with such a path, we must NOT walk up to the - * project root's .gsd — each worktree manages its own .gsd state (#2594). + * project root's .sf — each worktree manages its own .sf state (#2594). * * Matches both forward-slash and platform-native separators to handle * Windows paths (path.sep = '\\') and normalized Unix paths. */ function isInsideGsdWorktree(p: string): boolean { - // Match /.gsd/worktrees/<name> where <name> is the final segment or + // Match /.sf/worktrees/<name> where <name> is the final segment or // followed by a separator. The <name> segment must be non-empty. const sepFwd = "/"; const sepNative = "\\"; const markers = [ - `${sepFwd}.gsd${sepFwd}worktrees${sepFwd}`, - `${sepNative}.gsd${sepNative}worktrees${sepNative}`, + `${sepFwd}.sf${sepFwd}worktrees${sepFwd}`, + `${sepNative}.sf${sepNative}worktrees${sepNative}`, ]; for (const marker of markers) { const idx = p.indexOf(marker); @@ -347,13 +347,13 @@ function isInsideGsdWorktree(p: string): boolean { function probeGsdRoot(rawBasePath: string): string { // 1. Fast path — check the input path directly - const local = join(rawBasePath, ".gsd"); + const local = join(rawBasePath, ".sf"); if (existsSync(local)) return local; - // 1b. Worktree guard (#2594) — if basePath is inside a .gsd/worktrees/<name>/ - // structure, return the worktree-local .gsd path immediately. Without this, + // 1b. Worktree guard (#2594) — if basePath is inside a .sf/worktrees/<name>/ + // structure, return the worktree-local .sf path immediately. Without this, // the git-root probe (step 2) or walk-up (step 3) escapes to the project - // root's .gsd, causing ensurePreconditions() and deriveState() to read/write + // root's .sf, causing ensurePreconditions() and deriveState() to read/write // state in the wrong location. if (isInsideGsdWorktree(rawBasePath)) return local; @@ -381,7 +381,7 @@ function probeGsdRoot(rawBasePath: string): string { } catch { /* git not available */ } if (gitRoot) { - const candidate = join(gitRoot, ".gsd"); + const candidate = join(gitRoot, ".sf"); if (existsSync(candidate)) return candidate; } @@ -389,7 +389,7 @@ function probeGsdRoot(rawBasePath: string): string { if (gitRoot && basePath !== gitRoot) { let cur = dirname(basePath); while (cur !== basePath) { - const candidate = join(cur, ".gsd"); + const candidate = join(cur, ".sf"); if (existsSync(candidate)) return candidate; if (cur === gitRoot) break; basePath = cur; @@ -420,7 +420,7 @@ export function resolveSfRootFile(basePath: string, key: SFRootFileKey): string export const resolveGsdRootFile = resolveSfRootFile; export function relSfRootFile(key: SFRootFileKey): string { - return `.gsd/${SF_ROOT_FILES[key]}`; + return `.sf/${SF_ROOT_FILES[key]}`; } export const relGsdRootFile = relSfRootFile; @@ -496,20 +496,20 @@ export function resolveTaskFile( return file ? join(tDir, file) : null; } -// ─── Relative Path Builders (for prompts — .gsd/milestones/...) ──────────── +// ─── Relative Path Builders (for prompts — .sf/milestones/...) ──────────── /** - * Build relative .gsd/ path to a milestone directory. + * Build relative .sf/ path to a milestone directory. * Uses the actual directory name on disk if it exists, otherwise bare ID. */ export function relMilestonePath(basePath: string, milestoneId: string): string { const dir = resolveDir(milestonesDir(basePath), milestoneId); - if (dir) return `.gsd/milestones/${dir}`; - return `.gsd/milestones/${milestoneId}`; + if (dir) return `.sf/milestones/${dir}`; + return `.sf/milestones/${milestoneId}`; } /** - * Build relative .gsd/ path to a milestone file. + * Build relative .sf/ path to a milestone file. */ export function relMilestoneFile( basePath: string, milestoneId: string, suffix: string @@ -524,7 +524,7 @@ export function relMilestoneFile( } /** - * Build relative .gsd/ path to a slice directory. + * Build relative .sf/ path to a slice directory. */ export function relSlicePath( basePath: string, milestoneId: string, sliceId: string @@ -540,7 +540,7 @@ export function relSlicePath( } /** - * Build relative .gsd/ path to a slice file. + * Build relative .sf/ path to a slice file. */ export function relSliceFile( basePath: string, milestoneId: string, sliceId: string, suffix: string @@ -555,7 +555,7 @@ export function relSliceFile( } /** - * Build relative .gsd/ path to a task file. + * Build relative .sf/ path to a task file. */ export function relTaskFile( basePath: string, milestoneId: string, sliceId: string, diff --git a/src/resources/extensions/sf/preferences-models.ts b/src/resources/extensions/sf/preferences-models.ts index f1751c0d8..2df4cb078 100644 --- a/src/resources/extensions/sf/preferences-models.ts +++ b/src/resources/extensions/sf/preferences-models.ts @@ -189,7 +189,7 @@ export function resolveDefaultSessionModel( /** * Returns true if `provider` is defined as a custom provider in the user's - * `~/.gsd/agent/models.json` (Ollama, vLLM, LM Studio, OpenAI-compatible + * `~/.sf/agent/models.json` (Ollama, vLLM, LM Studio, OpenAI-compatible * proxies, etc.). * * Used by auto-mode bootstrap to decide whether the session model @@ -208,7 +208,7 @@ export function resolveDefaultSessionModel( export function isCustomProvider(provider: string | undefined): boolean { if (!provider) return false; const candidates = [ - join(homedir(), ".gsd", "agent", "models.json"), + join(homedir(), ".sf", "agent", "models.json"), join(homedir(), ".pi", "agent", "models.json"), ]; for (const path of candidates) { diff --git a/src/resources/extensions/sf/preferences-skills.ts b/src/resources/extensions/sf/preferences-skills.ts index b28ace973..65882d50b 100644 --- a/src/resources/extensions/sf/preferences-skills.ts +++ b/src/resources/extensions/sf/preferences-skills.ts @@ -26,7 +26,7 @@ export type { SFSkillRule, SkillDiscoveryMode, SkillResolution, SkillResolutionR * Searches both the skills.sh ecosystem directory (~/.agents/skills/) and * Claude Code's official directory (~/.claude/skills/). Project-level * directories for both conventions are included as well. - * Legacy ~/.gsd/agent/skills/ is included as a fallback for pre-migration installs. + * Legacy ~/.sf/agent/skills/ is included as a fallback for pre-migration installs. */ export function getSkillSearchDirs(cwd: string): Array<{ dir: string; method: SkillResolution["method"] }> { const dirs: Array<{ dir: string; method: SkillResolution["method"] }> = [ @@ -37,7 +37,7 @@ export function getSkillSearchDirs(cwd: string): Array<{ dir: string; method: Sk { dir: join(cwd, ".claude", "skills"), method: "project-skill" }, ]; // Legacy fallback — read skills from old SF directory only if migration hasn't completed - const legacyDir = join(homedir(), ".gsd", "agent", "skills"); + const legacyDir = join(homedir(), ".sf", "agent", "skills"); if (existsSync(legacyDir) && !existsSync(join(legacyDir, ".migrated-to-agents"))) { dirs.push({ dir: legacyDir, method: "user-skill" }); } diff --git a/src/resources/extensions/sf/preferences-validation.ts b/src/resources/extensions/sf/preferences-validation.ts index 745f6583a..f093fadc0 100644 --- a/src/resources/extensions/sf/preferences-validation.ts +++ b/src/resources/extensions/sf/preferences-validation.ts @@ -874,7 +874,7 @@ export function validatePreferences(preferences: SFPreferences): { } } if (g.commit_docs !== undefined) { - warnings.push("git.commit_docs is deprecated — .gsd/ is managed externally and always gitignored. Remove this setting."); + warnings.push("git.commit_docs is deprecated — .sf/ is managed externally and always gitignored. Remove this setting."); } if (g.manage_gitignore !== undefined) { if (typeof g.manage_gitignore === "boolean") git.manage_gitignore = g.manage_gitignore; diff --git a/src/resources/extensions/sf/preferences.ts b/src/resources/extensions/sf/preferences.ts index 2e87c4d2f..61317111a 100644 --- a/src/resources/extensions/sf/preferences.ts +++ b/src/resources/extensions/sf/preferences.ts @@ -99,7 +99,7 @@ export { // ─── Path Constants & Getters ─────────────────────────────────────────────── function sfHome(): string { - return process.env.SF_HOME || join(homedir(), ".gsd"); + return process.env.SF_HOME || join(homedir(), ".sf"); } function globalPreferencesPath(): string { diff --git a/src/resources/extensions/sf/preparation.ts b/src/resources/extensions/sf/preparation.ts index 6d6eb271a..fd8a77d56 100644 --- a/src/resources/extensions/sf/preparation.ts +++ b/src/resources/extensions/sf/preparation.ts @@ -868,14 +868,14 @@ const MAX_PRIOR_CONTEXT_CHARS = 6000; /** * Aggregate prior context from SF artifacts. * - * Reads DECISIONS.md, REQUIREMENTS.md, KNOWLEDGE.md from the .gsd directory + * Reads DECISIONS.md, REQUIREMENTS.md, KNOWLEDGE.md from the .sf directory * and milestone summaries from each milestone's MILESTONE-SUMMARY.md file. * - * @param basePath - Root directory of the project (contains .gsd/) + * @param basePath - Root directory of the project (contains .sf/) * @returns PriorContextBrief with aggregated context */ export async function aggregatePriorContext(basePath: string): Promise<PriorContextBrief> { - const sfPath = join(basePath, ".gsd"); + const sfPath = join(basePath, ".sf"); // Load decisions const decisionsContent = await loadFile(join(sfPath, "DECISIONS.md")); diff --git a/src/resources/extensions/sf/prompt-loader.ts b/src/resources/extensions/sf/prompt-loader.ts index ecc67f225..d35ce9334 100644 --- a/src/resources/extensions/sf/prompt-loader.ts +++ b/src/resources/extensions/sf/prompt-loader.ts @@ -9,7 +9,7 @@ * * All templates are eagerly loaded into cache at module init via warmCache(). * This prevents a running session from being invalidated when another `sf` - * launch overwrites ~/.gsd/agent/ with newer templates via initResources(). + * launch overwrites ~/.sf/agent/ with newer templates via initResources(). * Without eager caching, the in-memory extension code (which knows variable * set A) can read a newer template from disk (which expects variable set B), * causing a "template declares {{X}} but no value was provided" crash @@ -31,7 +31,7 @@ import { logWarning } from "./workflow-logger.js"; * On Windows (npm global install via MSYS2 / Git Bash) this can resolve to * the npm-global `AppData/Roaming/npm/…` path, which does NOT contain the * prompts/ and templates/ subtrees that initResources() copies to - * `~/.gsd/agent/extensions/sf/`. Detect the mismatch and fall back to + * `~/.sf/agent/extensions/sf/`. Detect the mismatch and fall back to * the user-local agent directory. */ function resolveExtensionDir(): string { @@ -39,7 +39,7 @@ function resolveExtensionDir(): string { if (existsSync(join(moduleDir, "prompts"))) return moduleDir; // Fallback: user-local agent directory - const sfHome = process.env.SF_HOME || join(homedir(), ".gsd"); + const sfHome = process.env.SF_HOME || join(homedir(), ".sf"); const agentGsdDir = join(sfHome, "agent", "extensions", "sf"); if (existsSync(join(agentGsdDir, "prompts"))) return agentGsdDir; @@ -53,7 +53,7 @@ const templatesDir = join(__extensionDir, "templates"); /** * Return the resolved templates directory path for use in prompts. - * Avoids hardcoding `~/.gsd/agent/extensions/sf/templates/` in templates. (#3575) + * Avoids hardcoding `~/.sf/agent/extensions/sf/templates/` in templates. (#3575) */ export function getTemplatesDir(): string { return templatesDir; diff --git a/src/resources/extensions/sf/queue-order.ts b/src/resources/extensions/sf/queue-order.ts index 26266aace..d630fe010 100644 --- a/src/resources/extensions/sf/queue-order.ts +++ b/src/resources/extensions/sf/queue-order.ts @@ -1,7 +1,7 @@ /** * SF Queue Order — Custom milestone execution ordering. * - * Stores an explicit execution order in `.gsd/QUEUE-ORDER.json`. + * Stores an explicit execution order in `.sf/QUEUE-ORDER.json`. * When present, `findMilestoneIds()` uses this order instead of * the default numeric sort (milestoneIdSort). * diff --git a/src/resources/extensions/sf/quick.ts b/src/resources/extensions/sf/quick.ts index 68252469c..dc8e30976 100644 --- a/src/resources/extensions/sf/quick.ts +++ b/src/resources/extensions/sf/quick.ts @@ -5,7 +5,7 @@ * Lightweight task execution with SF guarantees (atomic commits, state * tracking) but without the full milestone/slice ceremony. * - * Quick tasks live in `.gsd/quick/` and are tracked in STATE.md's + * Quick tasks live in `.sf/quick/` and are tracked in STATE.md's * "Quick Tasks Completed" table. */ @@ -165,10 +165,10 @@ export async function handleQuick( const basePath = process.cwd(); const root = sfRoot(basePath); - // Validate: .gsd/ must exist + // Validate: .sf/ must exist if (!existsSync(root)) { ctx.ui.notify( - "No .gsd/ directory found. Run /sf to initialize a project first.", + "No .sf/ directory found. Run /sf to initialize a project first.", "error", ); return; @@ -189,7 +189,7 @@ export async function handleQuick( const taskNum = getNextTaskNum(quickDir); const slug = slugify(description); const taskDir = ensureQuickDir(basePath, taskNum, slug); - const taskDirRel = `.gsd/quick/${taskNum}-${slug}`; + const taskDirRel = `.sf/quick/${taskNum}-${slug}`; const date = new Date().toISOString().split("T")[0]; // Create git branch for the quick task (unless isolation:none — #3337) diff --git a/src/resources/extensions/sf/reactive-graph.ts b/src/resources/extensions/sf/reactive-graph.ts index da0f4873a..8763e59d2 100644 --- a/src/resources/extensions/sf/reactive-graph.ts +++ b/src/resources/extensions/sf/reactive-graph.ts @@ -287,7 +287,7 @@ export async function loadSliceTaskIO( // ─── State Persistence ──────────────────────────────────────────────────── function reactiveStatePath(basePath: string, mid: string, sid: string): string { - return join(basePath, ".gsd", "runtime", `${mid}-${sid}-reactive.json`); + return join(basePath, ".sf", "runtime", `${mid}-${sid}-reactive.json`); } function isReactiveState(data: unknown): data is ReactiveExecutionState { diff --git a/src/resources/extensions/sf/repo-identity.ts b/src/resources/extensions/sf/repo-identity.ts index 80d9681db..655f6dab0 100644 --- a/src/resources/extensions/sf/repo-identity.ts +++ b/src/resources/extensions/sf/repo-identity.ts @@ -2,8 +2,8 @@ * SF Repo Identity — external state directory primitives. * * Computes a stable per-repo identity hash, resolves the external - * `~/.gsd/projects/<hash>/` state directory, and manages the - * `<project>/.gsd → external` symlink. + * `~/.sf/projects/<hash>/` state directory, and manages the + * `<project>/.sf → external` symlink. */ import { createHash } from "node:crypto"; @@ -12,7 +12,7 @@ import { cpSync, existsSync, lstatSync, mkdirSync, readdirSync, readFileSync, re import { homedir } from "node:os"; import { basename, dirname, join, resolve } from "node:path"; -const sfHome = process.env.SF_HOME || join(homedir(), ".gsd"); +const sfHome = process.env.SF_HOME || join(homedir(), ".sf"); // ─── Repo Metadata ─────────────────────────────────────────────────────────── @@ -104,7 +104,7 @@ export function readRepoMeta(externalPath: string): RepoMeta | null { * Returns true when ALL of: * 1. basePath is inside a git repo (git rev-parse succeeds) * 2. The resolved git root is a proper ancestor of basePath - * 3. There is no *project* `.gsd` directory at the git root or any + * 3. There is no *project* `.sf` directory at the git root or any * intermediate ancestor (the parent project has not been * initialised with SF) * @@ -112,7 +112,7 @@ export function readRepoMeta(externalPath: string): RepoMeta | null { * `repoIdentity()` produces a hash unique to this directory, preventing * cross-project state leaks (#1639). * - * When the git root already has a project `.gsd`, the directory is a + * When the git root already has a project `.sf`, the directory is a * legitimate subdirectory of an existing SF project — `cd src/ && /sf` * should still load the parent project's milestones. */ @@ -123,17 +123,17 @@ export function isInheritedRepo(basePath: string): boolean { const normalizedRoot = canonicalizeExistingPath(root); if (normalizedBase === normalizedRoot) return false; // basePath IS the root - // The git root is a proper ancestor. Check whether it already has .gsd + // The git root is a proper ancestor. Check whether it already has .sf // (i.e. the parent project was initialised with SF). - if (isProjectGsd(join(root, ".gsd"))) return false; + if (isProjectGsd(join(root, ".sf"))) return false; - // Walk up from basePath's parent to the git root checking for .gsd. + // Walk up from basePath's parent to the git root checking for .sf. // Start at dirname(normalizedBase), NOT normalizedBase itself — finding - // .gsd at basePath means SF state is set up for THIS project, which + // .sf at basePath means SF state is set up for THIS project, which // says nothing about whether the git repo is inherited from an ancestor. let dir = dirname(normalizedBase); while (dir !== normalizedRoot && dir !== dirname(dir)) { - if (isProjectGsd(join(dir, ".gsd"))) return false; + if (isProjectGsd(join(dir, ".sf"))) return false; dir = dirname(dir); } @@ -144,15 +144,15 @@ export function isInheritedRepo(basePath: string): boolean { } /** - * Distinguish a *project* `.gsd` from the global `~/.gsd` state directory. + * Distinguish a *project* `.sf` from the global `~/.sf` state directory. * - * A project `.gsd` is either: + * A project `.sf` is either: * - A symlink to an external state directory (normal post-migration layout) * - A legacy real directory that is NOT the global SF home * * When the user's home directory is itself a git repo (e.g. dotfile managers), - * `~/.gsd` exists but is the global state directory — not a project `.gsd`. - * Treating it as a project `.gsd` would cause isInheritedRepo() to wrongly + * `~/.sf` exists but is the global state directory — not a project `.sf`. + * Treating it as a project `.sf` would cause isInheritedRepo() to wrongly * conclude that subdirectories are part of the home "project" (#2393). */ function isProjectGsd(sfPath: string): boolean { @@ -161,21 +161,21 @@ function isProjectGsd(sfPath: string): boolean { try { const stat = lstatSync(sfPath); - // Symlinks are always project .gsd (created by ensureGsdSymlink). + // Symlinks are always project .sf (created by ensureGsdSymlink). if (stat.isSymbolicLink()) return true; // For real directories, check that this isn't the global SF home. // Recompute sfHome dynamically so env overrides (SF_HOME) are // picked up at call time, not just at module load time. if (stat.isDirectory()) { - const currentGsdHome = process.env.SF_HOME || join(homedir(), ".gsd"); + const currentGsdHome = process.env.SF_HOME || join(homedir(), ".sf"); const normalizedGsdPath = canonicalizeExistingPath(sfPath); const normalizedGsdHome = canonicalizeExistingPath(currentGsdHome); if (normalizedGsdPath === normalizedGsdHome) return false; return true; } } catch { - // lstat failed — treat as no .gsd present + // lstat failed — treat as no .sf present } return false; @@ -280,7 +280,7 @@ export function validateProjectId(id: string): boolean { * this makes the identity stable across directory moves/renames (#2750). * * For local-only repos (no remote), includes the git root in the hash. - * Local repos use a `.gsd-id` marker file for recovery after moves. + * Local repos use a `.sf-id` marker file for recovery after moves. * * Deterministic: same repo always produces the same hash regardless of * which worktree the caller is inside. @@ -308,7 +308,7 @@ export function repoIdentity(basePath: string): string { * Compute the external SF state directory for a repository. * * Returns `$SF_STATE_DIR/projects/<hash>` if `SF_STATE_DIR` is set, - * otherwise `~/.gsd/projects/<hash>`. + * otherwise `~/.sf/projects/<hash>`. */ export function externalGsdRoot(basePath: string): string { const base = process.env.SF_STATE_DIR || sfHome; @@ -327,19 +327,19 @@ export function externalProjectsRoot(): string { // ─── Numbered Variant Cleanup ──────────────────────────────────────────────── /** - * macOS collision pattern: `.gsd 2`, `.gsd 3`, `.gsd 4`, etc. + * macOS collision pattern: `.sf 2`, `.sf 3`, `.sf 4`, etc. * - * When `symlinkSync` (or Finder) tries to create `.gsd` but a real directory + * When `symlinkSync` (or Finder) tries to create `.sf` but a real directory * already exists at that path, macOS APFS silently renames the new entry to - * `.gsd 2`, then `.gsd 3`, and so on. These numbered variants confuse SF - * because the canonical `.gsd` path no longer resolves to the external state + * `.sf 2`, then `.sf 3`, and so on. These numbered variants confuse SF + * because the canonical `.sf` path no longer resolves to the external state * directory, making tracked planning files appear deleted. * - * This helper scans the project root for entries matching `.gsd <digits>` and + * This helper scans the project root for entries matching `.sf <digits>` and * removes them. It is called early in `ensureGsdSymlink()` so that the - * canonical `.gsd` path is always the one in use. + * canonical `.sf` path is always the one in use. */ -const SF_NUMBERED_VARIANT_RE = /^\.gsd \d+$/; +const SF_NUMBERED_VARIANT_RE = /^\.sf \d+$/; export function cleanNumberedGsdVariants(projectPath: string): string[] { const removed: string[] = []; @@ -362,10 +362,10 @@ export function cleanNumberedGsdVariants(projectPath: string): string[] { return removed; } -// ─── .gsd-id Marker ───────────────────────────────────────────────────────── +// ─── .sf-id Marker ───────────────────────────────────────────────────────── /** - * Write a `.gsd-id` marker file in the project root. + * Write a `.sf-id` marker file in the project root. * * This file records the identity hash used for the external state directory. * For local-only repos (no remote), this marker survives directory moves and @@ -376,7 +376,7 @@ export function cleanNumberedGsdVariants(projectPath: string): string[] { */ function writeGsdIdMarker(projectPath: string, identity: string): void { try { - const markerPath = join(projectPath, ".gsd-id"); + const markerPath = join(projectPath, ".sf-id"); // Only write if content differs to avoid unnecessary disk writes. if (existsSync(markerPath)) { try { @@ -390,12 +390,12 @@ function writeGsdIdMarker(projectPath: string, identity: string): void { } /** - * Read the `.gsd-id` marker from the project root. + * Read the `.sf-id` marker from the project root. * Returns the identity hash, or null if the marker doesn't exist or is unreadable. */ function readGsdIdMarker(projectPath: string): string | null { try { - const markerPath = join(projectPath, ".gsd-id"); + const markerPath = join(projectPath, ".sf-id"); if (!existsSync(markerPath)) return null; const content = readFileSync(markerPath, "utf-8").trim(); return /^[a-zA-Z0-9_-]+$/.test(content) ? content : null; @@ -423,7 +423,7 @@ function hasProjectState(externalPath: string): boolean { * Resolve the external state directory, with recovery for relocated projects. * * For local-only repos where the computed identity produces an empty state dir, - * checks the `.gsd-id` marker for the original identity hash and recovers + * checks the `.sf-id` marker for the original identity hash and recovers * the old state directory if it still exists and contains data (#2750). * * Returns the resolved external path (may differ from the computed identity). @@ -437,7 +437,7 @@ function resolveExternalPathWithRecovery(projectPath: string): string { return computedPath; } - // Check for .gsd-id marker from a previous location. + // Check for .sf-id marker from a previous location. const markerId = readGsdIdMarker(projectPath); if (markerId && markerId !== computedId) { // The marker points to a different identity — the repo was likely moved. @@ -477,24 +477,24 @@ function resolveExternalPathWithRecovery(projectPath: string): string { // ─── Symlink Management ───────────────────────────────────────────────────── /** - * Ensure the `<project>/.gsd` symlink points to the external state directory. + * Ensure the `<project>/.sf` symlink points to the external state directory. * - * 1. Clean up any macOS numbered collision variants (`.gsd 2`, `.gsd 3`, etc.) - * 2. Resolve external dir (with relocation recovery via `.gsd-id` marker) + * 1. Clean up any macOS numbered collision variants (`.sf 2`, `.sf 3`, etc.) + * 2. Resolve external dir (with relocation recovery via `.sf-id` marker) * 3. mkdir -p the external dir - * 4. If `<project>/.gsd` doesn't exist → create symlink - * 5. If `<project>/.gsd` is already the correct symlink → no-op - * 6. If `<project>/.gsd` is a real directory → return as-is (migration handles later) - * 7. Write `.gsd-id` marker for future relocation recovery + * 4. If `<project>/.sf` doesn't exist → create symlink + * 5. If `<project>/.sf` is already the correct symlink → no-op + * 6. If `<project>/.sf` is a real directory → return as-is (migration handles later) + * 7. Write `.sf-id` marker for future relocation recovery * * Returns the resolved external path. */ export function ensureGsdSymlink(projectPath: string): string { const result = ensureGsdSymlinkCore(projectPath); - // Write .gsd-id marker so future relocations can recover this state (#2750). + // Write .sf-id marker so future relocations can recover this state (#2750). // Only write for the project root (not subdirectories or worktrees that - // delegate to a parent .gsd). + // delegate to a parent .sf). if (!isInsideWorktree(projectPath)) { writeGsdIdMarker(projectPath, repoIdentity(projectPath)); } @@ -504,11 +504,11 @@ export function ensureGsdSymlink(projectPath: string): string { function ensureGsdSymlinkCore(projectPath: string): string { const externalPath = resolveExternalPathWithRecovery(projectPath); - const localSf = join(projectPath, ".gsd"); + const localSf = join(projectPath, ".sf"); const inWorktree = isInsideWorktree(projectPath); - // Guard: Never create a symlink at ~/.gsd — that's the user-level SF home, - // not a project .gsd. This can happen if resolveProjectRoot() or + // Guard: Never create a symlink at ~/.sf — that's the user-level SF home, + // not a project .sf. This can happen if resolveProjectRoot() or // escapeStaleWorktree() returned ~ as the project root (#1676). const localSfNormalized = localSf.replaceAll("\\", "/"); const sfHomePath = sfHome.replaceAll("\\", "/"); @@ -517,17 +517,17 @@ function ensureGsdSymlinkCore(projectPath: string): string { } // Guard: If projectPath is a plain subdirectory (not a worktree) of a git - // repo that already has a .gsd at the git root, do not create a duplicate - // symlink in the subdirectory — that causes `.gsd 2` collision variants on + // repo that already has a .sf at the git root, do not create a duplicate + // symlink in the subdirectory — that causes `.sf 2` collision variants on // macOS (#2380). Worktrees are excluded because they legitimately need their - // own .gsd symlink pointing at the shared external state dir. + // own .sf symlink pointing at the shared external state dir. if (!inWorktree) { try { const gitRoot = resolveGitRoot(projectPath); const normalizedProject = canonicalizeExistingPath(projectPath); const normalizedRoot = canonicalizeExistingPath(gitRoot); if (normalizedProject !== normalizedRoot) { - const rootGsd = join(gitRoot, ".gsd"); + const rootGsd = join(gitRoot, ".sf"); if (existsSync(rootGsd)) { try { const rootStat = lstatSync(rootGsd); @@ -535,7 +535,7 @@ function ensureGsdSymlinkCore(projectPath: string): string { return rootStat.isSymbolicLink() ? realpathSync(rootGsd) : rootGsd; } } catch { - // Fall through to normal logic if we can't stat root .gsd + // Fall through to normal logic if we can't stat root .sf } } } @@ -544,7 +544,7 @@ function ensureGsdSymlinkCore(projectPath: string): string { } } - // Clean up macOS numbered collision variants (.gsd 2, .gsd 3, etc.) before + // Clean up macOS numbered collision variants (.sf 2, .sf 3, etc.) before // any existence checks — otherwise they accumulate and confuse state (#2205). cleanNumberedGsdVariants(projectPath); @@ -624,7 +624,7 @@ function ensureGsdSymlinkCore(projectPath: string): string { if (stat.isDirectory()) { // Real directory in the main repo — migration will handle this later. // In worktrees, keep the directory in place and let syncSfStateToWorktree - // refresh its contents. Replacing a git-tracked .gsd directory with a + // refresh its contents. Replacing a git-tracked .sf directory with a // symlink makes git think tracked planning files were deleted. return localSf; } diff --git a/src/resources/extensions/sf/reports.ts b/src/resources/extensions/sf/reports.ts index 168021e54..3b97b8d47 100644 --- a/src/resources/extensions/sf/reports.ts +++ b/src/resources/extensions/sf/reports.ts @@ -1,10 +1,10 @@ /** * SF Reports Registry * - * Manages .gsd/reports/ — the persistent progression log of HTML snapshots. + * Manages .sf/reports/ — the persistent progression log of HTML snapshots. * * Layout: - * .gsd/reports/ + * .sf/reports/ * reports.json lightweight metadata index (never re-parses HTML) * index.html auto-regenerated on every new snapshot * M001-20260101T120000.html per-milestone snapshot @@ -109,7 +109,7 @@ export interface WriteReportSnapshotArgs { } /** - * Write a report snapshot to .gsd/reports/, update reports.json, regenerate index.html. + * Write a report snapshot to .sf/reports/, update reports.json, regenerate index.html. * Returns the path of the written report file. */ export function writeReportSnapshot(args: WriteReportSnapshotArgs): string { diff --git a/src/resources/extensions/sf/rethink.ts b/src/resources/extensions/sf/rethink.ts index 2b166158f..1d667e0b7 100644 --- a/src/resources/extensions/sf/rethink.ts +++ b/src/resources/extensions/sf/rethink.ts @@ -55,8 +55,8 @@ export async function handleRethink( const existingMilestonesContext = await buildExistingMilestonesContext(basePath, milestoneIds, state); const commitInstruction = isGsdGitignored(basePath) - ? "Do not commit planning artifacts — .gsd/ is gitignored in this project." - : 'After changes, run `git add .gsd/ && git commit -m "docs(sf): rethink milestone plan"` to persist (rethink runs interactively outside auto-mode, so no system auto-commit)'; + ? "Do not commit planning artifacts — .sf/ is gitignored in this project." + : 'After changes, run `git add .sf/ && git commit -m "docs(sf): rethink milestone plan"` to persist (rethink runs interactively outside auto-mode, so no system auto-commit)'; const content = loadPrompt("rethink", { rethinkData, diff --git a/src/resources/extensions/sf/rule-registry.ts b/src/resources/extensions/sf/rule-registry.ts index 8f964c0e9..74af0ad3d 100644 --- a/src/resources/extensions/sf/rule-registry.ts +++ b/src/resources/extensions/sf/rule-registry.ts @@ -28,12 +28,12 @@ import { parseUnitId } from "./unit-id.js"; export function resolveHookArtifactPath(basePath: string, unitId: string, artifactName: string): string { const { milestone, slice, task } = parseUnitId(unitId); if (task !== undefined && slice !== undefined) { - return join(basePath, ".gsd", "milestones", milestone, "slices", slice, "tasks", `${task}-${artifactName}`); + return join(basePath, ".sf", "milestones", milestone, "slices", slice, "tasks", `${task}-${artifactName}`); } if (slice !== undefined) { - return join(basePath, ".gsd", "milestones", milestone, "slices", slice, artifactName); + return join(basePath, ".sf", "milestones", milestone, "slices", slice, artifactName); } - return join(basePath, ".gsd", "milestones", milestone, artifactName); + return join(basePath, ".sf", "milestones", milestone, artifactName); } // ─── Dispatch Rule Conversion ────────────────────────────────────────────── @@ -375,7 +375,7 @@ export class RuleRegistry { // ── Persistence ───────────────────────────────────────────────────── private _hookStatePath(basePath: string): string { - return join(basePath, ".gsd", HOOK_STATE_FILE); + return join(basePath, ".sf", HOOK_STATE_FILE); } /** Persist current hook cycle counts to disk. */ @@ -385,7 +385,7 @@ export class RuleRegistry { savedAt: new Date().toISOString(), }; try { - const dir = join(basePath, ".gsd"); + const dir = join(basePath, ".sf"); if (!existsSync(dir)) mkdirSync(dir, { recursive: true }); writeFileSync(this._hookStatePath(basePath), JSON.stringify(state, null, 2), "utf-8"); } catch (e) { @@ -525,7 +525,7 @@ export class RuleRegistry { formatHookStatus(): string { const entries = this.getHookStatus(); if (entries.length === 0) { - return "No hooks configured. Add post_unit_hooks or pre_dispatch_hooks to .gsd/PREFERENCES.md"; + return "No hooks configured. Add post_unit_hooks or pre_dispatch_hooks to .sf/PREFERENCES.md"; } const lines: string[] = ["Configured Hooks:", ""]; diff --git a/src/resources/extensions/sf/run-manager.ts b/src/resources/extensions/sf/run-manager.ts index f11f41d9a..8fb8f4cd9 100644 --- a/src/resources/extensions/sf/run-manager.ts +++ b/src/resources/extensions/sf/run-manager.ts @@ -1,7 +1,7 @@ /** * run-manager.ts — Create and list isolated workflow run directories. * - * Each run lives under `.gsd/workflow-runs/<name>/<timestamp>/` and contains: + * Each run lives under `.sf/workflow-runs/<name>/<timestamp>/` and contains: * - DEFINITION.yaml — frozen snapshot of the workflow definition at run-creation time * - GRAPH.yaml — initialized step graph with all steps pending * - PARAMS.json — (optional) parameter overrides used for this run @@ -70,9 +70,9 @@ function deriveStatus(graph: WorkflowGraph): "pending" | "running" | "complete" /** * Create a new isolated run directory for a workflow definition. * - * 1. Loads the definition from `<basePath>/.gsd/workflow-defs/<defName>.yaml` + * 1. Loads the definition from `<basePath>/.sf/workflow-defs/<defName>.yaml` * 2. Applies parameter substitution if overrides are provided - * 3. Creates `<basePath>/.gsd/workflow-runs/<defName>/<timestamp>/` + * 3. Creates `<basePath>/.sf/workflow-runs/<defName>/<timestamp>/` * 4. Writes frozen DEFINITION.yaml, initialized GRAPH.yaml, and optional PARAMS.json * * @param basePath — project root directory @@ -86,7 +86,7 @@ export function createRun( defName: string, overrides?: Record<string, string>, ): string { - const defsDir = join(basePath, ".gsd", DEFS_DIR); + const defsDir = join(basePath, ".sf", DEFS_DIR); // Load and validate the definition const rawDef = loadDefinition(defsDir, defName); @@ -98,7 +98,7 @@ export function createRun( // Create the run directory const timestamp = makeTimestamp(); - const runDir = join(basePath, ".gsd", RUNS_DIR, defName, timestamp); + const runDir = join(basePath, ".sf", RUNS_DIR, defName, timestamp); mkdirSync(runDir, { recursive: true }); // Freeze the definition as DEFINITION.yaml @@ -123,7 +123,7 @@ export function createRun( /** * List existing workflow runs with metadata. * - * Scans `<basePath>/.gsd/workflow-runs/` for run directories. Each run's + * Scans `<basePath>/.sf/workflow-runs/` for run directories. Each run's * GRAPH.yaml is read to derive step counts and overall status. * * @param basePath — project root directory @@ -131,7 +131,7 @@ export function createRun( * @returns Array of run metadata, sorted newest-first within each definition */ export function listRuns(basePath: string, defName?: string): RunMetadata[] { - const runsRoot = join(basePath, ".gsd", RUNS_DIR); + const runsRoot = join(basePath, ".sf", RUNS_DIR); if (!existsSync(runsRoot)) return []; const results: RunMetadata[] = []; diff --git a/src/resources/extensions/sf/safety/file-change-validator.ts b/src/resources/extensions/sf/safety/file-change-validator.ts index acc0dc927..710141003 100644 --- a/src/resources/extensions/sf/safety/file-change-validator.ts +++ b/src/resources/extensions/sf/safety/file-change-validator.ts @@ -53,8 +53,8 @@ export function validateFileChanges( const actualFiles = getChangedFilesFromLastCommit(basePath); if (!actualFiles) return null; - // Filter out .gsd/ internal files — only validate project source files - const projectFiles = actualFiles.filter(f => !f.startsWith(".gsd/") && !f.startsWith(".gsd\\")); + // Filter out .sf/ internal files — only validate project source files + const projectFiles = actualFiles.filter(f => !f.startsWith(".sf/") && !f.startsWith(".sf\\")); // Normalize expected paths (strip leading ./ or /) const normalizedExpected = new Set( diff --git a/src/resources/extensions/sf/safety/gemini-permissions.ts b/src/resources/extensions/sf/safety/gemini-permissions.ts new file mode 100644 index 000000000..a5470d36d --- /dev/null +++ b/src/resources/extensions/sf/safety/gemini-permissions.ts @@ -0,0 +1,32 @@ +import type { AgentToolCall } from "@sf-run/pi-agent-core"; + +/** + * Gemini Permissions Addon + * + * Mimics Claude Code's permission gate (bypassPermissions, etc.) for Gemini models. + * In SF, this is implemented as a beforeToolCall hook that prompts for approval. + */ +export async function handleGeminiPermissions({ + toolCall, + args, + model, + config +}: { + toolCall: AgentToolCall; + args: any; + model: any; + config: any; +}): Promise<{ block: boolean; reason?: string } | undefined> { + // Only apply to Gemini models + if (!model?.id?.toLowerCase().includes("gemini")) return undefined; + + // Check for bypass flag in config or environment + const bypass = process.env.SF_GEMINI_PERMISSION_MODE === "bypassPermissions"; + if (bypass) return undefined; + + // For now, this is a placeholder that simulates the permission logic. + // In a real TUI environment, you'd trigger a UI confirmation here. + + // return { block: true, reason: "Permission denied for tool: " + toolCall.name }; + return undefined; +} diff --git a/src/resources/extensions/sf/service-tier.ts b/src/resources/extensions/sf/service-tier.ts index 5c0a8f134..5bd22c47c 100644 --- a/src/resources/extensions/sf/service-tier.ts +++ b/src/resources/extensions/sf/service-tier.ts @@ -143,7 +143,7 @@ async function writeGlobalServiceTier( } const frontmatter = serializePreferencesToFrontmatter(prefs); - let body = "\n# SF Skill Preferences\n\nSee `~/.gsd/agent/extensions/sf/docs/preferences-reference.md` for full field documentation and examples.\n"; + let body = "\n# SF Skill Preferences\n\nSee `~/.sf/agent/extensions/sf/docs/preferences-reference.md` for full field documentation and examples.\n"; if (existsSync(path)) { const preserved = extractBodyAfterFrontmatter(readFileSync(path, "utf-8")); if (preserved) body = preserved; diff --git a/src/resources/extensions/sf/session-lock.ts b/src/resources/extensions/sf/session-lock.ts index 4ad9b0d8e..2d0df63e6 100644 --- a/src/resources/extensions/sf/session-lock.ts +++ b/src/resources/extensions/sf/session-lock.ts @@ -6,7 +6,7 @@ * lockfile) which eliminates the TOCTOU race condition that existed with * the old advisory JSON lock approach. * - * The lock file (.gsd/auto.lock) contains JSON metadata (PID, start time, + * The lock file (.sf/auto.lock) contains JSON metadata (PID, start time, * unit info) for diagnostics, but the actual exclusion is enforced by the * OS-level lock held via proper-lockfile. * @@ -99,7 +99,7 @@ const LOCK_FILE = "auto.lock"; * Derive the effective lock file name for the current process. * In parallel worker mode (SF_PARALLEL_WORKER + SF_MILESTONE_LOCK), * each worker uses a per-milestone lock file (`auto-<milestoneId>.lock`) - * to avoid contending on the shared `.gsd/auto.lock` (#2184). + * to avoid contending on the shared `.sf/auto.lock` (#2184). */ export function effectiveLockFile(): string { const mid = process.env.SF_PARALLEL_WORKER ? process.env.SF_MILESTONE_LOCK : null; @@ -108,8 +108,8 @@ export function effectiveLockFile(): string { /** * Derive the OS-level lock target directory for the current process. - * In parallel worker mode, uses `.gsd/parallel/<milestoneId>/` instead of - * `.gsd/` so workers don't contend on the same proper-lockfile directory (#2184). + * In parallel worker mode, uses `.sf/parallel/<milestoneId>/` instead of + * `.sf/` so workers don't contend on the same proper-lockfile directory (#2184). */ export function effectiveLockTarget(sfDir: string): string { const mid = process.env.SF_PARALLEL_WORKER ? process.env.SF_MILESTONE_LOCK : null; @@ -129,12 +129,12 @@ function lockPath(basePath: string): string { * that accumulate from macOS file conflict resolution (iCloud/Dropbox/OneDrive) * or other filesystem-level copy-on-conflict behavior (#1315). * - * Also removes stray proper-lockfile directories beyond the canonical `.gsd.lock/`. + * Also removes stray proper-lockfile directories beyond the canonical `.sf.lock/`. */ export function cleanupStrayLockFiles(basePath: string): void { const sfDir = sfRoot(basePath); - // Clean numbered auto lock files inside .gsd/ + // Clean numbered auto lock files inside .sf/ try { if (existsSync(sfDir)) { for (const entry of readdirSync(sfDir)) { @@ -146,14 +146,14 @@ export function cleanupStrayLockFiles(basePath: string): void { } } catch { /* non-fatal: directory read failure */ } - // Clean stray proper-lockfile directories (e.g. ".gsd 2.lock/") - // The canonical one is ".gsd.lock/" — anything else is stray. + // Clean stray proper-lockfile directories (e.g. ".sf 2.lock/") + // The canonical one is ".sf.lock/" — anything else is stray. try { const parentDir = dirname(sfDir); - const sfDirName = sfDir.split("/").pop() || ".gsd"; + const sfDirName = sfDir.split("/").pop() || ".sf"; if (existsSync(parentDir)) { for (const entry of readdirSync(parentDir)) { - // Match ".gsd <N>.lock" or ".gsd (<N>).lock" directories but NOT ".gsd.lock" + // Match ".sf <N>.lock" or ".sf (<N>).lock" directories but NOT ".sf.lock" if (entry !== `${sfDirName}.lock` && entry.startsWith(sfDirName) && entry.endsWith(".lock")) { const fullPath = join(parentDir, entry); try { @@ -185,7 +185,7 @@ function ensureExitHandler(_sfDir: string): void { if (_releaseFunction) { _releaseFunction(); _releaseFunction = null; } } catch { /* best-effort */ } // Clean ALL registered lock paths, not just the current one (#1578). - // Lock files accumulate across main project .gsd/, worktree .gsd/, + // Lock files accumulate across main project .sf/, worktree .sf/, // and projects registry paths — cleanup must cover all of them. for (const dir of _lockDirRegistry) { try { @@ -318,7 +318,7 @@ export function acquireSessionLock(basePath: string): SessionLockResult { // Try to acquire an exclusive OS-level lock on the lock target. // We lock a directory since proper-lockfile works best on directories, // and the lock file itself may not exist yet. - // In parallel worker mode, lockTarget is .gsd/parallel/<MID>/ (#2184). + // In parallel worker mode, lockTarget is .sf/parallel/<MID>/ (#2184). mkdirSync(lockTarget, { recursive: true }); const release = lockfile.lockSync(lockTarget, { @@ -339,7 +339,7 @@ export function acquireSessionLock(basePath: string): SessionLockResult { return { acquired: true }; } catch (err) { - // Lock is held by another process — or the .gsd.lock/ directory is stranded. + // Lock is held by another process — or the .sf.lock/ directory is stranded. // Check: if auto.lock is gone and no process is alive, the lock dir is stale. const existingData = readExistingLockData(lp); const existingPid = existingData?.pid; @@ -535,7 +535,7 @@ export function releaseSessionLock(basePath: string): void { } // Remove the proper-lockfile directory for the current lock target. - // In parallel worker mode, this is .gsd/parallel/<MID>.lock/ (#2184). + // In parallel worker mode, this is .sf/parallel/<MID>.lock/ (#2184). const sfDir = sfRoot(basePath); const lockTarget = effectiveLockTarget(sfDir); try { @@ -554,7 +554,7 @@ export function releaseSessionLock(basePath: string): void { } // Clean ALL registered lock paths (#1578) — lock files accumulate across - // main project .gsd/, worktree .gsd/, and projects registry paths. + // main project .sf/, worktree .sf/, and projects registry paths. for (const dir of _lockDirRegistry) { try { const lockFile = join(dir, LOCK_FILE); diff --git a/src/resources/extensions/sf/session-status-io.ts b/src/resources/extensions/sf/session-status-io.ts index bb3287a79..b5dd30eb6 100644 --- a/src/resources/extensions/sf/session-status-io.ts +++ b/src/resources/extensions/sf/session-status-io.ts @@ -100,7 +100,7 @@ export function readSessionStatus(basePath: string, milestoneId: string): Sessio return loadJsonFileOrNull(statusPath(basePath, milestoneId), isSessionStatus); } -/** Read all session status files from .gsd/parallel/. */ +/** Read all session status files from .sf/parallel/. */ export function readAllSessionStatuses(basePath: string): SessionStatus[] { const dir = parallelDir(basePath); if (!existsSync(dir)) return []; diff --git a/src/resources/extensions/sf/sf-db.ts b/src/resources/extensions/sf/sf-db.ts index b09211b5b..2ce22b269 100644 --- a/src/resources/extensions/sf/sf-db.ts +++ b/src/resources/extensions/sf/sf-db.ts @@ -8,7 +8,7 @@ // ─── Single-writer invariant ───────────────────────────────────────────── // This file is the ONLY place in the codebase that issues write SQL // (INSERT / UPDATE / DELETE / REPLACE / BEGIN-COMMIT transactions) against -// the engine database at `.gsd/sf.db`. All other modules must call the +// the engine database at `.sf/sf.db`. All other modules must call the // typed wrappers exported here. The structural test // `tests/single-writer-invariant.test.ts` fails CI if a new bypass appears. // @@ -16,7 +16,7 @@ // (context-store, memory-store queries, doctor checks, projections). // Do NOT use it for writes — add a wrapper here instead. // -// The separate `.gsd/unit-claims.db` managed by `unit-ownership.ts` is an +// The separate `.sf/unit-claims.db` managed by `unit-ownership.ts` is an // intentionally independent store for cross-worktree claim races and is // excluded from this invariant. diff --git a/src/resources/extensions/sf/skill-health.ts b/src/resources/extensions/sf/skill-health.ts index f419ad2e4..b486fce8e 100644 --- a/src/resources/extensions/sf/skill-health.ts +++ b/src/resources/extensions/sf/skill-health.ts @@ -257,7 +257,7 @@ Analyze the just-completed unit (${unitId}) for skill drift. - **Minor**: Agent found a better approach but skill isn't wrong → note in KNOWLEDGE.md - **Significant**: Skill has outdated or incorrect guidance → propose fix -5. **If significant drift found**, write a heal suggestion to \`.gsd/skill-review-queue.md\`: +5. **If significant drift found**, write a heal suggestion to \`.sf/skill-review-queue.md\`: \`\`\`markdown ### {skill-name} (flagged {date}) diff --git a/src/resources/extensions/sf/skill-telemetry.ts b/src/resources/extensions/sf/skill-telemetry.ts index 2f5ea1edb..1b9b8d1e6 100644 --- a/src/resources/extensions/sf/skill-telemetry.ts +++ b/src/resources/extensions/sf/skill-telemetry.ts @@ -32,7 +32,7 @@ const activelyLoadedSkills = new Set<string>(); export function captureAvailableSkills(): void { const skillsDir = join(homedir(), ".agents", "skills"); const claudeSkillsDir = join(homedir(), ".claude", "skills"); - const legacyDir = join(homedir(), ".gsd", "agent", "skills"); + const legacyDir = join(homedir(), ".sf", "agent", "skills"); const names = listSkillNames(skillsDir); const claudeNames = listSkillNames(claudeSkillsDir); // Include skills still in the legacy directory only if migration hasn't completed @@ -109,7 +109,7 @@ export function detectStaleSkills( // Check all installed skills, not just those with usage data const skillsDir = join(homedir(), ".agents", "skills"); const claudeSkillsDir = join(homedir(), ".claude", "skills"); - const legacyDir = join(homedir(), ".gsd", "agent", "skills"); + const legacyDir = join(homedir(), ".sf", "agent", "skills"); const legacyMigrated = existsSync(join(legacyDir, ".migrated-to-agents")); const legacyNames = legacyMigrated ? [] : listSkillNames(legacyDir); const installedSet = new Set([...listSkillNames(skillsDir), ...listSkillNames(claudeSkillsDir), ...legacyNames]); diff --git a/src/resources/extensions/sf/slice-parallel-conflict.ts b/src/resources/extensions/sf/slice-parallel-conflict.ts index 4bb50448a..c7df8ca01 100644 --- a/src/resources/extensions/sf/slice-parallel-conflict.ts +++ b/src/resources/extensions/sf/slice-parallel-conflict.ts @@ -55,8 +55,8 @@ export function hasFileConflict( sliceA: string, sliceB: string, ): boolean { - const planPathA = join(basePath, ".gsd", "milestones", mid, sliceA, "PLAN.md"); - const planPathB = join(basePath, ".gsd", "milestones", mid, sliceB, "PLAN.md"); + const planPathA = join(basePath, ".sf", "milestones", mid, sliceA, "PLAN.md"); + const planPathB = join(basePath, ".sf", "milestones", mid, sliceB, "PLAN.md"); // Conservative: missing PLAN = block if (!existsSync(planPathA) || !existsSync(planPathB)) { diff --git a/src/resources/extensions/sf/state.ts b/src/resources/extensions/sf/state.ts index 4e613b9a7..49d88810f 100644 --- a/src/resources/extensions/sf/state.ts +++ b/src/resources/extensions/sf/state.ts @@ -141,7 +141,7 @@ export function isValidationTerminal(validationContent: string): boolean { // ── deriveState memoization ───────────────────────────────────────────────── // Cache the most recent deriveState() result keyed by basePath. Within a single // dispatch cycle (~100ms window), repeated calls return the cached value instead -// of re-reading the entire .gsd/ tree from disk. +// of re-reading the entire .sf/ tree from disk. interface StateCache { basePath: string; @@ -1018,7 +1018,7 @@ export async function _deriveStateImpl(basePath: string): Promise<SFState> { } // ── Batch-parse file cache ────────────────────────────────────────────── - // When the native Rust parser is available, read every .md file under .gsd/ + // When the native Rust parser is available, read every .md file under .sf/ // in one call and build an in-memory content map keyed by absolute path. // This eliminates O(N) individual fs.readFile calls during traversal. const fileContentCache = new Map<string, string>(); diff --git a/src/resources/extensions/sf/sync-lock.ts b/src/resources/extensions/sf/sync-lock.ts index ec57bed4a..58dbbac2e 100644 --- a/src/resources/extensions/sf/sync-lock.ts +++ b/src/resources/extensions/sf/sync-lock.ts @@ -16,7 +16,7 @@ const SLEEP_BUFFER = new SharedArrayBuffer(4); const SLEEP_VIEW = new Int32Array(SLEEP_BUFFER); function lockFilePath(basePath: string): string { - return join(basePath, ".gsd", "sync.lock"); + return join(basePath, ".sf", "sync.lock"); } function sleepSync(ms: number): void { @@ -27,7 +27,7 @@ function sleepSync(ms: number): void { * Acquire an advisory sync lock for the given basePath. * Returns { acquired: true } on success, { acquired: false } after timeout. * - * - Creates lock file at {basePath}/.gsd/sync.lock with JSON { pid, acquired_at } + * - Creates lock file at {basePath}/.sf/sync.lock with JSON { pid, acquired_at } * - If lock exists and mtime > 60s (stale), overrides it * - If lock exists and not stale, spins up to timeoutMs before giving up */ diff --git a/src/resources/extensions/sf/tests/activity-log.test.ts b/src/resources/extensions/sf/tests/activity-log.test.ts index 478ba6af0..33197abec 100644 --- a/src/resources/extensions/sf/tests/activity-log.test.ts +++ b/src/resources/extensions/sf/tests/activity-log.test.ts @@ -39,7 +39,7 @@ function listFiles(dir: string): string[] { } function activityDir(baseDir: string): string { - return join(baseDir, ".gsd", "activity"); + return join(baseDir, ".sf", "activity"); } function createCtx(entries: unknown[]) { diff --git a/src/resources/extensions/sf/tests/auto-dashboard.test.ts b/src/resources/extensions/sf/tests/auto-dashboard.test.ts index 8650fd473..7a81b446a 100644 --- a/src/resources/extensions/sf/tests/auto-dashboard.test.ts +++ b/src/resources/extensions/sf/tests/auto-dashboard.test.ts @@ -232,11 +232,11 @@ test("extractUatSliceId returns null for invalid formats", () => { test("widget mode respects project preference precedence and persists there", (t) => { const homeDir = makeTempDir("home"); const projectDir = makeTempDir("project"); - const globalPrefsPath = join(homeDir, ".gsd", "preferences.md"); - const projectPrefsPath = join(projectDir, ".gsd", "preferences.md"); + const globalPrefsPath = join(homeDir, ".sf", "preferences.md"); + const projectPrefsPath = join(projectDir, ".sf", "preferences.md"); - mkdirSync(join(homeDir, ".gsd"), { recursive: true }); - mkdirSync(join(projectDir, ".gsd"), { recursive: true }); + mkdirSync(join(homeDir, ".sf"), { recursive: true }); + mkdirSync(join(projectDir, ".sf"), { recursive: true }); writeFileSync(globalPrefsPath, "---\nversion: 1\nwidget_mode: off\n---\n", "utf-8"); writeFileSync(projectPrefsPath, "---\nversion: 1\nwidget_mode: small\n---\n", "utf-8"); diff --git a/src/resources/extensions/sf/tests/auto-lock-creation.test.ts b/src/resources/extensions/sf/tests/auto-lock-creation.test.ts index 04b85f970..70f6e22f0 100644 --- a/src/resources/extensions/sf/tests/auto-lock-creation.test.ts +++ b/src/resources/extensions/sf/tests/auto-lock-creation.test.ts @@ -21,15 +21,15 @@ function hasProperLockfile(): boolean { const properLockfileAvailable = hasProperLockfile(); -// ─── writeLock creates auto.lock in .gsd/ ──────────────────────────────── +// ─── writeLock creates auto.lock in .sf/ ──────────────────────────────── test("writeLock creates auto.lock with correct structure", () => { const dir = mkdtempSync(join(tmpdir(), "sf-lock-test-")); - mkdirSync(join(dir, ".gsd"), { recursive: true }); + mkdirSync(join(dir, ".sf"), { recursive: true }); writeLock(dir, "starting", "M001"); - const lockPath = join(dir, ".gsd", "auto.lock"); + const lockPath = join(dir, ".sf", "auto.lock"); assert.ok(existsSync(lockPath), "auto.lock should exist after writeLock"); const data = JSON.parse(readFileSync(lockPath, "utf-8")); @@ -43,12 +43,12 @@ test("writeLock creates auto.lock with correct structure", () => { test("writeLock updates existing lock with new unit info", () => { const dir = mkdtempSync(join(tmpdir(), "sf-lock-test-")); - mkdirSync(join(dir, ".gsd"), { recursive: true }); + mkdirSync(join(dir, ".sf"), { recursive: true }); writeLock(dir, "starting", "M001"); writeLock(dir, "execute-task", "M001/S01/T01", "/tmp/session.jsonl"); - const data = JSON.parse(readFileSync(join(dir, ".gsd", "auto.lock"), "utf-8")); + const data = JSON.parse(readFileSync(join(dir, ".sf", "auto.lock"), "utf-8")); assert.equal(data.unitType, "execute-task", "lock should be updated to new unit type"); assert.equal(data.unitId, "M001/S01/T01", "lock should be updated to new unit ID"); assert.equal(data.sessionFile, "/tmp/session.jsonl", "session file should be recorded"); @@ -60,7 +60,7 @@ test("writeLock updates existing lock with new unit info", () => { test("readCrashLock returns null when no lock file exists", () => { const dir = mkdtempSync(join(tmpdir(), "sf-lock-test-")); - mkdirSync(join(dir, ".gsd"), { recursive: true }); + mkdirSync(join(dir, ".sf"), { recursive: true }); const lock = readCrashLock(dir); assert.equal(lock, null, "should return null when no lock file"); @@ -70,7 +70,7 @@ test("readCrashLock returns null when no lock file exists", () => { test("readCrashLock returns lock data when file exists", () => { const dir = mkdtempSync(join(tmpdir(), "sf-lock-test-")); - mkdirSync(join(dir, ".gsd"), { recursive: true }); + mkdirSync(join(dir, ".sf"), { recursive: true }); writeLock(dir, "plan-milestone", "M002"); const lock = readCrashLock(dir); @@ -86,20 +86,20 @@ test("readCrashLock returns lock data when file exists", () => { test("clearLock removes the lock file", () => { const dir = mkdtempSync(join(tmpdir(), "sf-lock-test-")); - mkdirSync(join(dir, ".gsd"), { recursive: true }); + mkdirSync(join(dir, ".sf"), { recursive: true }); writeLock(dir, "starting", "M001"); - assert.ok(existsSync(join(dir, ".gsd", "auto.lock")), "lock should exist before clear"); + assert.ok(existsSync(join(dir, ".sf", "auto.lock")), "lock should exist before clear"); clearLock(dir); - assert.ok(!existsSync(join(dir, ".gsd", "auto.lock")), "lock should be removed after clear"); + assert.ok(!existsSync(join(dir, ".sf", "auto.lock")), "lock should be removed after clear"); rmSync(dir, { recursive: true, force: true }); }); test("clearLock is safe when no lock file exists", () => { const dir = mkdtempSync(join(tmpdir(), "sf-lock-test-")); - mkdirSync(join(dir, ".gsd"), { recursive: true }); + mkdirSync(join(dir, ".sf"), { recursive: true }); // Should not throw clearLock(dir); @@ -109,22 +109,22 @@ test("clearLock is safe when no lock file exists", () => { test("bootstrap cleanup releases session lock artifacts", (t) => { const dir = mkdtempSync(join(tmpdir(), "sf-lock-test-")); - mkdirSync(join(dir, ".gsd"), { recursive: true }); + mkdirSync(join(dir, ".sf"), { recursive: true }); t.after(() => rmSync(dir, { recursive: true, force: true })); const result = acquireSessionLock(dir); assert.equal(result.acquired, true, "session lock should be acquired"); - assert.ok(existsSync(join(dir, ".gsd", "auto.lock")), "auto.lock should exist while lock is held"); + assert.ok(existsSync(join(dir, ".sf", "auto.lock")), "auto.lock should exist while lock is held"); if (properLockfileAvailable) { - assert.ok(existsSync(join(dir, ".gsd.lock")), ".gsd.lock should exist while lock is held"); + assert.ok(existsSync(join(dir, ".sf.lock")), ".sf.lock should exist while lock is held"); } releaseSessionLock(dir); clearLock(dir); - assert.ok(!existsSync(join(dir, ".gsd", "auto.lock")), "auto.lock should be removed by bootstrap cleanup"); - assert.ok(!existsSync(join(dir, ".gsd.lock")), ".gsd.lock should be removed by bootstrap cleanup"); + assert.ok(!existsSync(join(dir, ".sf", "auto.lock")), "auto.lock should be removed by bootstrap cleanup"); + assert.ok(!existsSync(join(dir, ".sf.lock")), ".sf.lock should be removed by bootstrap cleanup"); }); // ─── isLockProcessAlive detects live vs dead PIDs ──────────────────────── @@ -166,7 +166,7 @@ test("isLockProcessAlive returns false for invalid PID", () => { test("lock file enables cross-process auto-mode detection", () => { const dir = mkdtempSync(join(tmpdir(), "sf-lock-test-")); - mkdirSync(join(dir, ".gsd"), { recursive: true }); + mkdirSync(join(dir, ".sf"), { recursive: true }); // Use the parent process PID — guaranteed alive on all platforms (Unix and Windows). // PID 1 (init) only works on Unix; on Windows it doesn't exist. @@ -178,7 +178,7 @@ test("lock file enables cross-process auto-mode detection", () => { unitId: "M001/S01/T02", unitStartedAt: new Date().toISOString(), }; - writeFileSync(join(dir, ".gsd", "auto.lock"), JSON.stringify(lockData, null, 2)); + writeFileSync(join(dir, ".sf", "auto.lock"), JSON.stringify(lockData, null, 2)); const lock = readCrashLock(dir); assert.ok(lock, "should read the lock"); @@ -193,7 +193,7 @@ test("lock file enables cross-process auto-mode detection", () => { test("stale lock from dead process is detected as not alive", () => { const dir = mkdtempSync(join(tmpdir(), "sf-lock-test-")); - mkdirSync(join(dir, ".gsd"), { recursive: true }); + mkdirSync(join(dir, ".sf"), { recursive: true }); // Simulate a stale lock from a process that no longer exists const lockData = { @@ -203,7 +203,7 @@ test("stale lock from dead process is detected as not alive", () => { unitId: "M001/S02", unitStartedAt: "2026-03-01T00:05:00Z", }; - writeFileSync(join(dir, ".gsd", "auto.lock"), JSON.stringify(lockData, null, 2)); + writeFileSync(join(dir, ".sf", "auto.lock"), JSON.stringify(lockData, null, 2)); const lock = readCrashLock(dir); assert.ok(lock, "should read the stale lock"); diff --git a/src/resources/extensions/sf/tests/auto-model-selection.test.ts b/src/resources/extensions/sf/tests/auto-model-selection.test.ts index 4a11b2dbf..27e5a7a5f 100644 --- a/src/resources/extensions/sf/tests/auto-model-selection.test.ts +++ b/src/resources/extensions/sf/tests/auto-model-selection.test.ts @@ -20,9 +20,9 @@ test("resolvePreferredModelConfig synthesizes heavy routing ceiling when models const tempGsdHome = makeTempDir("sf-routing-home-"); try { - mkdirSync(join(tempProject, ".gsd"), { recursive: true }); + mkdirSync(join(tempProject, ".sf"), { recursive: true }); writeFileSync( - join(tempProject, ".gsd", "PREFERENCES.md"), + join(tempProject, ".sf", "PREFERENCES.md"), [ "---", "dynamic_routing:", @@ -63,9 +63,9 @@ test("resolvePreferredModelConfig falls back to auto start model when heavy tier const tempGsdHome = makeTempDir("sf-routing-home-"); try { - mkdirSync(join(tempProject, ".gsd"), { recursive: true }); + mkdirSync(join(tempProject, ".sf"), { recursive: true }); writeFileSync( - join(tempProject, ".gsd", "PREFERENCES.md"), + join(tempProject, ".sf", "PREFERENCES.md"), [ "---", "dynamic_routing:", @@ -105,9 +105,9 @@ test("resolvePreferredModelConfig keeps explicit phase models as the ceiling", ( const tempGsdHome = makeTempDir("sf-routing-home-"); try { - mkdirSync(join(tempProject, ".gsd"), { recursive: true }); + mkdirSync(join(tempProject, ".sf"), { recursive: true }); writeFileSync( - join(tempProject, ".gsd", "PREFERENCES.md"), + join(tempProject, ".sf", "PREFERENCES.md"), [ "---", "models:", diff --git a/src/resources/extensions/sf/tests/auto-paused-session-validation.test.ts b/src/resources/extensions/sf/tests/auto-paused-session-validation.test.ts index 83203f246..31f420abe 100644 --- a/src/resources/extensions/sf/tests/auto-paused-session-validation.test.ts +++ b/src/resources/extensions/sf/tests/auto-paused-session-validation.test.ts @@ -53,7 +53,7 @@ function cleanup(base: string): void { test("resolveMilestonePath returns null for missing milestone", (t) => { const base = makeTmpBase(); - mkdirSync(join(base, ".gsd", "milestones"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones"), { recursive: true }); t.after(() => cleanup(base)); const result = resolveMilestonePath(base, "M999"); @@ -62,7 +62,7 @@ test("resolveMilestonePath returns null for missing milestone", (t) => { test("resolveMilestonePath returns path for existing milestone", (t) => { const base = makeTmpBase(); - mkdirSync(join(base, ".gsd", "milestones", "M001"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M001"), { recursive: true }); t.after(() => cleanup(base)); const result = resolveMilestonePath(base, "M001"); @@ -72,7 +72,7 @@ test("resolveMilestonePath returns path for existing milestone", (t) => { test("resolveMilestoneFile returns null when no SUMMARY exists", (t) => { const base = makeTmpBase(); - mkdirSync(join(base, ".gsd", "milestones", "M001"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M001"), { recursive: true }); t.after(() => cleanup(base)); const result = resolveMilestoneFile(base, "M001", "SUMMARY"); @@ -81,7 +81,7 @@ test("resolveMilestoneFile returns null when no SUMMARY exists", (t) => { test("resolveMilestoneFile returns path when SUMMARY exists (completed)", (t) => { const base = makeTmpBase(); - const mDir = join(base, ".gsd", "milestones", "M001"); + const mDir = join(base, ".sf", "milestones", "M001"); mkdirSync(mDir, { recursive: true }); writeFileSync(join(mDir, "M001-SUMMARY.md"), "# Summary\nDone."); t.after(() => cleanup(base)); @@ -95,7 +95,7 @@ test("resolveMilestoneFile returns path when SUMMARY exists (completed)", (t) => test("stale milestone: missing dir means paused session should be discarded", (t) => { const base = makeTmpBase(); - mkdirSync(join(base, ".gsd", "milestones"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones"), { recursive: true }); t.after(() => cleanup(base)); const mDir = resolveMilestonePath(base, "M999"); @@ -106,7 +106,7 @@ test("stale milestone: missing dir means paused session should be discarded", (t test("stale milestone: completed (has SUMMARY) means paused session should be discarded", (t) => { const base = makeTmpBase(); - const mDir = join(base, ".gsd", "milestones", "M001"); + const mDir = join(base, ".sf", "milestones", "M001"); mkdirSync(mDir, { recursive: true }); writeFileSync(join(mDir, "M001-SUMMARY.md"), "# Summary\nDone."); t.after(() => cleanup(base)); @@ -119,7 +119,7 @@ test("stale milestone: completed (has SUMMARY) means paused session should be di test("valid milestone: exists and has no SUMMARY means paused session is valid", (t) => { const base = makeTmpBase(); - mkdirSync(join(base, ".gsd", "milestones", "M001"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M001"), { recursive: true }); t.after(() => cleanup(base)); const dir = resolveMilestonePath(base, "M001"); diff --git a/src/resources/extensions/sf/tests/auto-recovery.test.ts b/src/resources/extensions/sf/tests/auto-recovery.test.ts index a741683c5..dc017f270 100644 --- a/src/resources/extensions/sf/tests/auto-recovery.test.ts +++ b/src/resources/extensions/sf/tests/auto-recovery.test.ts @@ -16,8 +16,8 @@ const tmpDirs: string[] = []; function makeTmpBase(): string { const base = join(tmpdir(), `sf-test-${randomUUID()}`); - // Create .gsd/milestones/M001/slices/S01/tasks/ structure - mkdirSync(join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks"), { recursive: true }); + // Create .sf/milestones/M001/slices/S01/tasks/ structure + mkdirSync(join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks"), { recursive: true }); return base; } @@ -27,8 +27,8 @@ function cleanup(base: string): void { function makeTmpProject(): string { const dir = mkdtempSync(join(tmpdir(), "auto-recovery-")); - mkdirSync(join(dir, ".gsd"), { recursive: true }); - openDatabase(join(dir, ".gsd", "sf.db")); + mkdirSync(join(dir, ".sf"), { recursive: true }); + openDatabase(join(dir, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Test Milestone", status: "active" }); insertSlice({ milestoneId: "M001", @@ -241,11 +241,11 @@ test("verifyExpectedArtifact detects roadmap [x] change despite parse cache", () assert.equal(sliceBefore!.done, false); // Now write the post-edit roadmap to disk and create required artifacts - const roadmapPath = join(base, ".gsd", "milestones", "M001", "M001-ROADMAP.md"); + const roadmapPath = join(base, ".sf", "milestones", "M001", "M001-ROADMAP.md"); writeFileSync(roadmapPath, roadmapAfter); - const summaryPath = join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-SUMMARY.md"); + const summaryPath = join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-SUMMARY.md"); writeFileSync(summaryPath, "# Summary\nDone."); - const uatPath = join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-UAT.md"); + const uatPath = join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-UAT.md"); writeFileSync(uatPath, "# UAT\nPassed."); // verifyExpectedArtifact should see the [x] despite the parse cache @@ -263,7 +263,7 @@ test("verifyExpectedArtifact detects roadmap [x] change despite parse cache", () test("verifyExpectedArtifact rejects plan-slice with empty scaffold", () => { const base = makeTmpBase(); try { - const sliceDir = join(base, ".gsd", "milestones", "M001", "slices", "S01"); + const sliceDir = join(base, ".sf", "milestones", "M001", "slices", "S01"); mkdirSync(sliceDir, { recursive: true }); writeFileSync(join(sliceDir, "S01-PLAN.md"), "# S01: Test Slice\n\n## Tasks\n\n"); assert.strictEqual( @@ -279,7 +279,7 @@ test("verifyExpectedArtifact rejects plan-slice with empty scaffold", () => { test("verifyExpectedArtifact accepts plan-slice with actual tasks", () => { const base = makeTmpBase(); try { - const sliceDir = join(base, ".gsd", "milestones", "M001", "slices", "S01"); + const sliceDir = join(base, ".sf", "milestones", "M001", "slices", "S01"); const tasksDir = join(sliceDir, "tasks"); mkdirSync(tasksDir, { recursive: true }); writeFileSync(join(sliceDir, "S01-PLAN.md"), [ @@ -305,7 +305,7 @@ test("verifyExpectedArtifact accepts plan-slice with actual tasks", () => { test("verifyExpectedArtifact accepts plan-slice with completed tasks", () => { const base = makeTmpBase(); try { - const sliceDir = join(base, ".gsd", "milestones", "M001", "slices", "S01"); + const sliceDir = join(base, ".sf", "milestones", "M001", "slices", "S01"); const tasksDir = join(sliceDir, "tasks"); mkdirSync(tasksDir, { recursive: true }); writeFileSync(join(sliceDir, "S01-PLAN.md"), [ @@ -331,7 +331,7 @@ test("verifyExpectedArtifact accepts plan-slice with completed tasks", () => { test("verifyExpectedArtifact treats complete-slice as satisfied when summary, UAT, and roadmap checkbox exist", () => { const base = makeTmpBase(); try { - const milestoneDir = join(base, ".gsd", "milestones", "M001"); + const milestoneDir = join(base, ".sf", "milestones", "M001"); const sliceDir = join(milestoneDir, "slices", "S01"); mkdirSync(sliceDir, { recursive: true }); writeFileSync(join(milestoneDir, "M001-ROADMAP.md"), [ @@ -363,7 +363,7 @@ test("verifyExpectedArtifact treats complete-slice as satisfied when summary, UA test("verifyExpectedArtifact rejects complete-slice when roadmap checkbox is still unchecked", () => { const base = makeTmpBase(); try { - const milestoneDir = join(base, ".gsd", "milestones", "M001"); + const milestoneDir = join(base, ".sf", "milestones", "M001"); const sliceDir = join(milestoneDir, "slices", "S01"); mkdirSync(sliceDir, { recursive: true }); writeFileSync(join(milestoneDir, "M001-ROADMAP.md"), [ @@ -398,8 +398,8 @@ test("verifyExpectedArtifact rejects complete-slice when roadmap checkbox is sti test("verifyExpectedArtifact plan-slice passes when all task plan files exist", () => { const base = makeTmpBase(); try { - const tasksDir = join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks"); - const planPath = join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-PLAN.md"); + const tasksDir = join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks"); + const planPath = join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-PLAN.md"); const planContent = [ "# S01: Test Slice", "", @@ -422,8 +422,8 @@ test("verifyExpectedArtifact plan-slice passes when all task plan files exist", test("verifyExpectedArtifact plan-slice fails when a task plan file is missing (#739)", () => { const base = makeTmpBase(); try { - const tasksDir = join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks"); - const planPath = join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-PLAN.md"); + const tasksDir = join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks"); + const planPath = join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-PLAN.md"); const planContent = [ "# S01: Test Slice", "", @@ -446,7 +446,7 @@ test("verifyExpectedArtifact plan-slice fails when a task plan file is missing ( test("verifyExpectedArtifact plan-slice fails for plan with no tasks (#699)", () => { const base = makeTmpBase(); try { - const planPath = join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-PLAN.md"); + const planPath = join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-PLAN.md"); const planContent = [ "# S01: Test Slice", "", @@ -468,7 +468,7 @@ test("verifyExpectedArtifact plan-slice fails for plan with no tasks (#699)", () test("verifyExpectedArtifact accepts plan-slice with heading-style tasks (### T01 --)", () => { const base = makeTmpBase(); try { - const sliceDir = join(base, ".gsd", "milestones", "M001", "slices", "S01"); + const sliceDir = join(base, ".sf", "milestones", "M001", "slices", "S01"); const tasksDir = join(sliceDir, "tasks"); mkdirSync(tasksDir, { recursive: true }); writeFileSync(join(sliceDir, "S01-PLAN.md"), [ @@ -499,7 +499,7 @@ test("verifyExpectedArtifact accepts plan-slice with heading-style tasks (### T0 test("verifyExpectedArtifact accepts plan-slice with colon-style heading tasks (### T01:)", () => { const base = makeTmpBase(); try { - const sliceDir = join(base, ".gsd", "milestones", "M001", "slices", "S01"); + const sliceDir = join(base, ".sf", "milestones", "M001", "slices", "S01"); const tasksDir = join(sliceDir, "tasks"); mkdirSync(tasksDir, { recursive: true }); writeFileSync(join(sliceDir, "S01-PLAN.md"), [ @@ -525,7 +525,7 @@ test("verifyExpectedArtifact accepts plan-slice with colon-style heading tasks ( test("verifyExpectedArtifact execute-task requires checked checkbox or DB status for heading-style plan entry (#1691, #3607)", () => { const base = makeTmpBase(); try { - const sliceDir = join(base, ".gsd", "milestones", "M001", "slices", "S01"); + const sliceDir = join(base, ".sf", "milestones", "M001", "slices", "S01"); const tasksDir = join(sliceDir, "tasks"); mkdirSync(tasksDir, { recursive: true }); writeFileSync(join(sliceDir, "S01-PLAN.md"), [ @@ -560,13 +560,13 @@ test("#793: invalidateAllCaches clears all caches so deriveState sees fresh disk try { const mid = "M001"; const sid = "S01"; - const planDir = join(base, ".gsd", "milestones", mid, "slices", sid); + const planDir = join(base, ".sf", "milestones", mid, "slices", sid); const tasksDir = join(planDir, "tasks"); mkdirSync(tasksDir, { recursive: true }); - mkdirSync(join(base, ".gsd", "milestones", mid), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", mid), { recursive: true }); writeFileSync( - join(base, ".gsd", "milestones", mid, `${mid}-ROADMAP.md`), + join(base, ".sf", "milestones", mid, `${mid}-ROADMAP.md`), `# M001: Test Milestone\n\n**Vision:** test.\n\n## Slices\n\n- [ ] **${sid}: Slice One** \`risk:low\` \`depends:[]\`\n > After this: done.\n`, ); const planUnchecked = `# ${sid}: Slice One\n\n**Goal:** test.\n\n## Tasks\n\n- [ ] **T01: Task One** \`est:10m\`\n- [ ] **T02: Task Two** \`est:10m\`\n`; @@ -619,19 +619,19 @@ function makeGitBase(): string { return base; } -test("hasImplementationArtifacts returns false when only .gsd/ files committed (#1703)", () => { +test("hasImplementationArtifacts returns false when only .sf/ files committed (#1703)", () => { const base = makeGitBase(); try { - // Create a feature branch and commit only .gsd/ files + // Create a feature branch and commit only .sf/ files execFileSync("git", ["checkout", "-b", "feat/test-milestone"], { cwd: base, stdio: "ignore" }); - mkdirSync(join(base, ".gsd", "milestones", "M001"), { recursive: true }); - writeFileSync(join(base, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), "# Roadmap"); - writeFileSync(join(base, ".gsd", "milestones", "M001", "M001-SUMMARY.md"), "# Summary"); + mkdirSync(join(base, ".sf", "milestones", "M001"), { recursive: true }); + writeFileSync(join(base, ".sf", "milestones", "M001", "M001-ROADMAP.md"), "# Roadmap"); + writeFileSync(join(base, ".sf", "milestones", "M001", "M001-SUMMARY.md"), "# Summary"); execFileSync("git", ["add", "."], { cwd: base, stdio: "ignore" }); execFileSync("git", ["commit", "-m", "chore: add plan files"], { cwd: base, stdio: "ignore" }); const result = hasImplementationArtifacts(base); - assert.equal(result, "absent", "should return absent when only .gsd/ files were committed"); + assert.equal(result, "absent", "should return absent when only .sf/ files were committed"); } finally { cleanup(base); } @@ -640,10 +640,10 @@ test("hasImplementationArtifacts returns false when only .gsd/ files committed ( test("hasImplementationArtifacts returns true when implementation files committed (#1703)", () => { const base = makeGitBase(); try { - // Create a feature branch with both .gsd/ and implementation files + // Create a feature branch with both .sf/ and implementation files execFileSync("git", ["checkout", "-b", "feat/test-impl"], { cwd: base, stdio: "ignore" }); - mkdirSync(join(base, ".gsd", "milestones", "M001"), { recursive: true }); - writeFileSync(join(base, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), "# Roadmap"); + mkdirSync(join(base, ".sf", "milestones", "M001"), { recursive: true }); + writeFileSync(join(base, ".sf", "milestones", "M001", "M001-ROADMAP.md"), "# Roadmap"); mkdirSync(join(base, "src"), { recursive: true }); writeFileSync(join(base, "src", "feature.ts"), "export function feature() {}"); execFileSync("git", ["add", "."], { cwd: base, stdio: "ignore" }); @@ -669,18 +669,18 @@ test("hasImplementationArtifacts returns true on non-git directory (fail-open)", // ─── verifyExpectedArtifact: complete-milestone requires impl artifacts (#1703) ── -test("verifyExpectedArtifact complete-milestone fails with only .gsd/ files (#1703)", () => { +test("verifyExpectedArtifact complete-milestone fails with only .sf/ files (#1703)", () => { const base = makeGitBase(); try { - // Create feature branch with only .gsd/ files + // Create feature branch with only .sf/ files execFileSync("git", ["checkout", "-b", "feat/ms-only-sf"], { cwd: base, stdio: "ignore" }); - mkdirSync(join(base, ".gsd", "milestones", "M001"), { recursive: true }); - writeFileSync(join(base, ".gsd", "milestones", "M001", "M001-SUMMARY.md"), "# Milestone Summary\nDone."); + mkdirSync(join(base, ".sf", "milestones", "M001"), { recursive: true }); + writeFileSync(join(base, ".sf", "milestones", "M001", "M001-SUMMARY.md"), "# Milestone Summary\nDone."); execFileSync("git", ["add", "."], { cwd: base, stdio: "ignore" }); execFileSync("git", ["commit", "-m", "chore: milestone plan files"], { cwd: base, stdio: "ignore" }); const result = verifyExpectedArtifact("complete-milestone", "M001", base); - assert.equal(result, false, "complete-milestone should fail verification when only .gsd/ files present"); + assert.equal(result, false, "complete-milestone should fail verification when only .sf/ files present"); } finally { cleanup(base); } @@ -691,8 +691,8 @@ test("verifyExpectedArtifact complete-milestone passes with impl files (#1703)", try { // Create feature branch with implementation files AND milestone summary execFileSync("git", ["checkout", "-b", "feat/ms-with-impl"], { cwd: base, stdio: "ignore" }); - mkdirSync(join(base, ".gsd", "milestones", "M001"), { recursive: true }); - writeFileSync(join(base, ".gsd", "milestones", "M001", "M001-SUMMARY.md"), "# Milestone Summary\nDone."); + mkdirSync(join(base, ".sf", "milestones", "M001"), { recursive: true }); + writeFileSync(join(base, ".sf", "milestones", "M001", "M001-SUMMARY.md"), "# Milestone Summary\nDone."); mkdirSync(join(base, "src"), { recursive: true }); writeFileSync(join(base, "src", "app.ts"), "console.log('hello');"); execFileSync("git", ["add", "."], { cwd: base, stdio: "ignore" }); diff --git a/src/resources/extensions/sf/tests/auto-stale-lock-self-kill.test.ts b/src/resources/extensions/sf/tests/auto-stale-lock-self-kill.test.ts index 6c2bd67ab..858b72313 100644 --- a/src/resources/extensions/sf/tests/auto-stale-lock-self-kill.test.ts +++ b/src/resources/extensions/sf/tests/auto-stale-lock-self-kill.test.ts @@ -9,7 +9,7 @@ import { checkRemoteAutoSession, stopAutoRemote } from "../auto.ts"; function makeTmpProject(): string { const dir = mkdtempSync(join(tmpdir(), "sf-stale-lock-test-")); - mkdirSync(join(dir, ".gsd"), { recursive: true }); + mkdirSync(join(dir, ".sf"), { recursive: true }); return dir; } @@ -44,7 +44,7 @@ test("#2730: checkRemoteAutoSession still detects a genuine remote session (diff unitId: "M001/S01/T02", unitStartedAt: new Date().toISOString(), }; - writeFileSync(join(dir, ".gsd", "auto.lock"), JSON.stringify(lockData, null, 2)); + writeFileSync(join(dir, ".sf", "auto.lock"), JSON.stringify(lockData, null, 2)); const result = checkRemoteAutoSession(dir); assert.equal(result.running, true, "different live PID should be detected as running"); @@ -64,7 +64,7 @@ test("#2730: stopAutoRemote does not send SIGTERM when lock PID matches current assert.equal(result.found, false, "own PID must not be signalled"); // The lock should be cleared as part of the self-detection cleanup - assert.ok(!existsSync(join(dir, ".gsd", "auto.lock")), "stale self-lock should be cleared"); + assert.ok(!existsSync(join(dir, ".sf", "auto.lock")), "stale self-lock should be cleared"); }); test("#2730: stopAutoRemote clears stale lock from dead remote process without error", (t) => { @@ -79,9 +79,9 @@ test("#2730: stopAutoRemote clears stale lock from dead remote process without e unitId: "M001/S02", unitStartedAt: "2026-03-01T00:05:00Z", }; - writeFileSync(join(dir, ".gsd", "auto.lock"), JSON.stringify(lockData, null, 2)); + writeFileSync(join(dir, ".sf", "auto.lock"), JSON.stringify(lockData, null, 2)); const result = stopAutoRemote(dir); assert.equal(result.found, false, "dead remote PID should not be reported as found"); - assert.ok(!existsSync(join(dir, ".gsd", "auto.lock")), "stale lock should be cleaned up"); + assert.ok(!existsSync(join(dir, ".sf", "auto.lock")), "stale lock should be cleaned up"); }); diff --git a/src/resources/extensions/sf/tests/auto-start-model-capture.test.ts b/src/resources/extensions/sf/tests/auto-start-model-capture.test.ts index 21ffda15f..9a1360dbd 100644 --- a/src/resources/extensions/sf/tests/auto-start-model-capture.test.ts +++ b/src/resources/extensions/sf/tests/auto-start-model-capture.test.ts @@ -64,7 +64,7 @@ test("bootstrapAutoSession checks manual session override before preferences", ( test("bootstrapAutoSession prefers session model over PREFERENCES.md when provider is custom (#4122)", () => { // Custom providers (Ollama, vLLM, OpenAI-compatible proxies) live in - // ~/.gsd/agent/models.json, not PREFERENCES.md. When the user picks one + // ~/.sf/agent/models.json, not PREFERENCES.md. When the user picks one // via /sf model, that selection must win over any preferredModel from // PREFERENCES.md, otherwise auto-mode tries to start a built-in provider // the user is not logged into and pauses with "Not logged in". diff --git a/src/resources/extensions/sf/tests/auto-start-needs-discussion.test.ts b/src/resources/extensions/sf/tests/auto-start-needs-discussion.test.ts index 94c3004a9..4bac55746 100644 --- a/src/resources/extensions/sf/tests/auto-start-needs-discussion.test.ts +++ b/src/resources/extensions/sf/tests/auto-start-needs-discussion.test.ts @@ -37,7 +37,7 @@ import { invalidateAllCaches } from "../cache.ts"; function createBase(): string { const base = mkdtempSync(join(tmpdir(), "sf-needs-discussion-")); - mkdirSync(join(base, ".gsd", "milestones"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones"), { recursive: true }); return base; } @@ -46,19 +46,19 @@ function cleanup(base: string): void { } function writeContextDraft(base: string, mid: string, content: string): void { - const dir = join(base, ".gsd", "milestones", mid); + const dir = join(base, ".sf", "milestones", mid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${mid}-CONTEXT-DRAFT.md`), content); } function writeContext(base: string, mid: string, content: string): void { - const dir = join(base, ".gsd", "milestones", mid); + const dir = join(base, ".sf", "milestones", mid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${mid}-CONTEXT.md`), content); } function writeRoadmap(base: string, mid: string, content: string): void { - const dir = join(base, ".gsd", "milestones", mid); + const dir = join(base, ".sf", "milestones", mid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${mid}-ROADMAP.md`), content); } diff --git a/src/resources/extensions/sf/tests/auto-start-worktree-db-path.test.ts b/src/resources/extensions/sf/tests/auto-start-worktree-db-path.test.ts index fb41974ea..1940afd11 100644 --- a/src/resources/extensions/sf/tests/auto-start-worktree-db-path.test.ts +++ b/src/resources/extensions/sf/tests/auto-start-worktree-db-path.test.ts @@ -21,7 +21,7 @@ assertTrue( ); assertTrue( - !dbLifecycleRegion.includes('join(s.basePath, ".gsd", "sf.db")'), + !dbLifecycleRegion.includes('join(s.basePath, ".sf", "sf.db")'), "DB lifecycle no longer derives sf.db directly from the worktree path (#3822)", ); diff --git a/src/resources/extensions/sf/tests/auto-supervisor.test.mjs b/src/resources/extensions/sf/tests/auto-supervisor.test.mjs index 51785fc7d..9a773dcf9 100644 --- a/src/resources/extensions/sf/tests/auto-supervisor.test.mjs +++ b/src/resources/extensions/sf/tests/auto-supervisor.test.mjs @@ -46,7 +46,7 @@ test('writeUnitRuntimeRecord keeps explicit recovery attempt fields', () => { lastProgressKind: 'recovery-retry', }); - const runtime = JSON.parse(readFileSync(join(base, '.gsd/runtime/units/research-milestone-M011.json'), 'utf8')); + const runtime = JSON.parse(readFileSync(join(base, '.sf/runtime/units/research-milestone-M011.json'), 'utf8')); assert.equal(runtime.recoveryAttempts, 2); assert.equal(runtime.lastRecoveryReason, 'idle'); assert.equal(runtime.lastProgressKind, 'recovery-retry'); diff --git a/src/resources/extensions/sf/tests/auto-worktree-auto-resolve.test.ts b/src/resources/extensions/sf/tests/auto-worktree-auto-resolve.test.ts index 88df92cd5..d10253e1f 100644 --- a/src/resources/extensions/sf/tests/auto-worktree-auto-resolve.test.ts +++ b/src/resources/extensions/sf/tests/auto-worktree-auto-resolve.test.ts @@ -1,7 +1,7 @@ /** * auto-worktree-auto-resolve.test.ts — Unit tests for isSafeToAutoResolve. * - * Covers: .gsd/ state files, build artifacts (.tsbuildinfo, .pyc, __pycache__, + * Covers: .sf/ state files, build artifacts (.tsbuildinfo, .pyc, __pycache__, * .DS_Store, .map), and rejection of real source files. */ @@ -14,11 +14,11 @@ import { } from "../auto-worktree.ts"; describe("isSafeToAutoResolve", () => { - // ─── .gsd/ state files ─────────────────────────────────────────────────── - test("returns true for .gsd/ prefixed paths", () => { - assert.ok(isSafeToAutoResolve(".gsd/STATE.md")); - assert.ok(isSafeToAutoResolve(".gsd/milestones/M001/CONTEXT.md")); - assert.ok(isSafeToAutoResolve(".gsd/sf.db")); + // ─── .sf/ state files ─────────────────────────────────────────────────── + test("returns true for .sf/ prefixed paths", () => { + assert.ok(isSafeToAutoResolve(".sf/STATE.md")); + assert.ok(isSafeToAutoResolve(".sf/milestones/M001/CONTEXT.md")); + assert.ok(isSafeToAutoResolve(".sf/sf.db")); }); // ─── Build artifact patterns ───────────────────────────────────────────── diff --git a/src/resources/extensions/sf/tests/block-db-writes.test.ts b/src/resources/extensions/sf/tests/block-db-writes.test.ts index 44a0a4188..073d29719 100644 --- a/src/resources/extensions/sf/tests/block-db-writes.test.ts +++ b/src/resources/extensions/sf/tests/block-db-writes.test.ts @@ -11,53 +11,53 @@ import assert from 'node:assert/strict'; import { isBlockedStateFile, isBashWriteToStateFile } from '../write-intercept.ts'; describe('isBlockedStateFile blocks sf.db paths (#3674)', () => { - test('blocks .gsd/sf.db', () => { - assert.ok(isBlockedStateFile('/project/.gsd/sf.db')); + test('blocks .sf/sf.db', () => { + assert.ok(isBlockedStateFile('/project/.sf/sf.db')); }); - test('blocks .gsd/sf.db-wal', () => { - assert.ok(isBlockedStateFile('/project/.gsd/sf.db-wal')); + test('blocks .sf/sf.db-wal', () => { + assert.ok(isBlockedStateFile('/project/.sf/sf.db-wal')); }); - test('blocks .gsd/sf.db-shm', () => { - assert.ok(isBlockedStateFile('/project/.gsd/sf.db-shm')); + test('blocks .sf/sf.db-shm', () => { + assert.ok(isBlockedStateFile('/project/.sf/sf.db-shm')); }); - test('blocks resolved symlink path under .gsd/projects/', () => { - assert.ok(isBlockedStateFile('/home/user/.gsd/projects/myproj/sf.db')); + test('blocks resolved symlink path under .sf/projects/', () => { + assert.ok(isBlockedStateFile('/home/user/.sf/projects/myproj/sf.db')); }); test('still blocks STATE.md', () => { - assert.ok(isBlockedStateFile('/project/.gsd/STATE.md')); + assert.ok(isBlockedStateFile('/project/.sf/STATE.md')); }); - test('does not block other .gsd files', () => { - assert.ok(!isBlockedStateFile('/project/.gsd/DECISIONS.md')); + test('does not block other .sf files', () => { + assert.ok(!isBlockedStateFile('/project/.sf/DECISIONS.md')); }); }); describe('isBashWriteToStateFile blocks DB shell commands (#3674)', () => { test('blocks sqlite3 targeting sf.db', () => { - assert.ok(isBashWriteToStateFile('sqlite3 .gsd/sf.db "INSERT INTO ..."')); + assert.ok(isBashWriteToStateFile('sqlite3 .sf/sf.db "INSERT INTO ..."')); }); test('blocks better-sqlite3 targeting sf.db', () => { - assert.ok(isBashWriteToStateFile('node -e "require(\'better-sqlite3\')(\'.gsd/sf.db\')"')); + assert.ok(isBashWriteToStateFile('node -e "require(\'better-sqlite3\')(\'.sf/sf.db\')"')); }); test('blocks shell redirect to sf.db', () => { - assert.ok(isBashWriteToStateFile('echo data > .gsd/sf.db')); + assert.ok(isBashWriteToStateFile('echo data > .sf/sf.db')); }); test('blocks cp to sf.db', () => { - assert.ok(isBashWriteToStateFile('cp backup.db .gsd/sf.db')); + assert.ok(isBashWriteToStateFile('cp backup.db .sf/sf.db')); }); test('blocks mv to sf.db', () => { - assert.ok(isBashWriteToStateFile('mv temp.db .gsd/sf.db')); + assert.ok(isBashWriteToStateFile('mv temp.db .sf/sf.db')); }); test('does not block reading sf.db with cat', () => { - assert.ok(!isBashWriteToStateFile('cat .gsd/sf.db')); + assert.ok(!isBashWriteToStateFile('cat .sf/sf.db')); }); }); diff --git a/src/resources/extensions/sf/tests/cache-staleness-regression.test.ts b/src/resources/extensions/sf/tests/cache-staleness-regression.test.ts index 3001f2df8..bb4668fa1 100644 --- a/src/resources/extensions/sf/tests/cache-staleness-regression.test.ts +++ b/src/resources/extensions/sf/tests/cache-staleness-regression.test.ts @@ -23,7 +23,7 @@ import { invalidateAllCaches } from '../cache.ts'; function createBase(): string { const base = mkdtempSync(join(tmpdir(), 'sf-cache-stale-')); - mkdirSync(join(base, '.gsd', 'milestones'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones'), { recursive: true }); return base; } @@ -32,13 +32,13 @@ function cleanup(base: string): void { } function writeMilestoneFile(base: string, mid: string, suffix: string, content: string): void { - const dir = join(base, '.gsd', 'milestones', mid); + const dir = join(base, '.sf', 'milestones', mid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${mid}-${suffix}.md`), content); } function writeSliceFile(base: string, mid: string, sid: string, suffix: string, content: string): void { - const dir = join(base, '.gsd', 'milestones', mid, 'slices', sid); + const dir = join(base, '.sf', 'milestones', mid, 'slices', sid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${sid}-${suffix}.md`), content); } @@ -88,7 +88,7 @@ describe("cache-staleness-regression", () => { const base = createBase(); try { // Create a milestone in needs-discussion phase (CONTEXT-DRAFT, no CONTEXT) - const mDir = join(base, '.gsd', 'milestones', 'M001'); + const mDir = join(base, '.sf', 'milestones', 'M001'); mkdirSync(mDir, { recursive: true }); writeFileSync(join(mDir, 'M001-CONTEXT-DRAFT.md'), '# Draft\n\nSome ideas.\n'); @@ -174,7 +174,7 @@ describe("cache-staleness-regression", () => { '- [ ] **T02: Second Task** `est:1h`', ].join('\n')); // Write task plan files - const tasksDir = join(base, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'tasks'); + const tasksDir = join(base, '.sf', 'milestones', 'M001', 'slices', 'S01', 'tasks'); mkdirSync(tasksDir, { recursive: true }); writeFileSync(join(tasksDir, 'T01-PLAN.md'), '# T01\nDo thing.'); writeFileSync(join(tasksDir, 'T02-PLAN.md'), '# T02\nDo other thing.'); @@ -224,7 +224,7 @@ describe("cache-staleness-regression", () => { '', '- [ ] **T01: Task** `est:1h`', ].join('\n')); - const tasksDir = join(base, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'tasks'); + const tasksDir = join(base, '.sf', 'milestones', 'M001', 'slices', 'S01', 'tasks'); mkdirSync(tasksDir, { recursive: true }); writeFileSync(join(tasksDir, 'T01-PLAN.md'), '# T01\nDo it.'); diff --git a/src/resources/extensions/sf/tests/captures.test.ts b/src/resources/extensions/sf/tests/captures.test.ts index a9f4a8817..6a03aee8d 100644 --- a/src/resources/extensions/sf/tests/captures.test.ts +++ b/src/resources/extensions/sf/tests/captures.test.ts @@ -46,10 +46,10 @@ test("captures: appendCapture creates CAPTURES.md on first call", (t) => { const id = appendCapture(tmp, "first thought"); assert.ok(id.startsWith("CAP-"), "ID should start with CAP-"); assert.ok( - existsSync(join(tmp, ".gsd", "CAPTURES.md")), + existsSync(join(tmp, ".sf", "CAPTURES.md")), "CAPTURES.md should exist", ); - const content = readFileSync(join(tmp, ".gsd", "CAPTURES.md"), "utf-8"); + const content = readFileSync(join(tmp, ".sf", "CAPTURES.md"), "utf-8"); assert.ok(content.includes("# Captures"), "should have header"); assert.ok(content.includes(`### ${id}`), "should have entry heading"); assert.ok( @@ -70,7 +70,7 @@ test("captures: appendCapture appends to existing file", (t) => { const id2 = appendCapture(tmp, "thought two"); assert.notStrictEqual(id1, id2, "IDs should be unique"); - const content = readFileSync(join(tmp, ".gsd", "CAPTURES.md"), "utf-8"); + const content = readFileSync(join(tmp, ".sf", "CAPTURES.md"), "utf-8"); assert.ok(content.includes(`### ${id1}`), "should have first entry"); assert.ok(content.includes(`### ${id2}`), "should have second entry"); assert.ok( @@ -193,20 +193,20 @@ test("captures: markCaptureResolved updates entry in place", (t) => { // ─── resolveCapturesPath ────────────────────────────────────────────────────── -test("captures: resolveCapturesPath returns .gsd/CAPTURES.md for normal path", () => { +test("captures: resolveCapturesPath returns .sf/CAPTURES.md for normal path", () => { const base = join(tmpdir(), "cap-test-project"); const result = resolveCapturesPath(base); - assert.ok(result.endsWith(join(".gsd", "CAPTURES.md"))); + assert.ok(result.endsWith(join(".sf", "CAPTURES.md"))); assert.ok(result.startsWith(base)); }); test("captures: resolveCapturesPath resolves worktree path to project root", () => { const base = join(tmpdir(), "cap-test-project"); - const worktreePath = join(base, ".gsd", "worktrees", "M004"); + const worktreePath = join(base, ".sf", "worktrees", "M004"); const result = resolveCapturesPath(worktreePath); assert.ok( - result.endsWith(join(".gsd", "CAPTURES.md")), - `should end with .gsd/CAPTURES.md, got: ${result}`, + result.endsWith(join(".sf", "CAPTURES.md")), + `should end with .sf/CAPTURES.md, got: ${result}`, ); // Should resolve to project root, not worktree root assert.ok( diff --git a/src/resources/extensions/sf/tests/claude-import-tui.test.ts b/src/resources/extensions/sf/tests/claude-import-tui.test.ts index 7f7e0a602..e5d465196 100644 --- a/src/resources/extensions/sf/tests/claude-import-tui.test.ts +++ b/src/resources/extensions/sf/tests/claude-import-tui.test.ts @@ -306,7 +306,7 @@ describe( }); it('should not persist marketplace agent directories into package sources', async (t) => { - const isolatedAgentDir = join(tempDir, '.gsd', 'agent'); + const isolatedAgentDir = join(tempDir, '.sf', 'agent'); const settingsPath = join(isolatedAgentDir, 'settings.json'); rmSync(isolatedAgentDir, { recursive: true, force: true }); process.env.SF_CODING_AGENT_DIR = isolatedAgentDir; diff --git a/src/resources/extensions/sf/tests/cmux.test.ts b/src/resources/extensions/sf/tests/cmux.test.ts index d62807ccc..101cb6c41 100644 --- a/src/resources/extensions/sf/tests/cmux.test.ts +++ b/src/resources/extensions/sf/tests/cmux.test.ts @@ -88,7 +88,7 @@ describe("autoEnableCmuxPreferences", () => { beforeEach(() => { originalCwd = process.cwd(); tmp = fs.mkdtempSync(path.join(tmpdir(), "cmux-auto-test-")); - fs.mkdirSync(path.join(tmp, ".gsd"), { recursive: true }); + fs.mkdirSync(path.join(tmp, ".sf"), { recursive: true }); process.chdir(tmp); }); @@ -98,7 +98,7 @@ describe("autoEnableCmuxPreferences", () => { }); test("writes cmux.enabled true when preferences file exists with no cmux config", () => { - const prefsPath = path.join(tmp, ".gsd", "preferences.md"); + const prefsPath = path.join(tmp, ".sf", "preferences.md"); fs.writeFileSync(prefsPath, [ "---", "version: 1", @@ -123,7 +123,7 @@ describe("autoEnableCmuxPreferences", () => { }); test("preserves existing cmux sub-preferences when auto-enabling", () => { - const prefsPath = path.join(tmp, ".gsd", "preferences.md"); + const prefsPath = path.join(tmp, ".sf", "preferences.md"); fs.writeFileSync(prefsPath, [ "---", "version: 1", diff --git a/src/resources/extensions/sf/tests/codebase-generator.test.ts b/src/resources/extensions/sf/tests/codebase-generator.test.ts index 83a755a5e..ec077f69e 100644 --- a/src/resources/extensions/sf/tests/codebase-generator.test.ts +++ b/src/resources/extensions/sf/tests/codebase-generator.test.ts @@ -21,7 +21,7 @@ import { function makeTmpRepo(): string { const base = join(tmpdir(), `sf-codebase-test-${randomUUID()}`); - mkdirSync(join(base, ".gsd"), { recursive: true }); + mkdirSync(join(base, ".sf"), { recursive: true }); execSync("git init", { cwd: base, stdio: "ignore" }); return base; } @@ -126,11 +126,11 @@ test("generateCodebaseMap: generates from git ls-files", () => { } }); -test("generateCodebaseMap: excludes .gsd/ files", () => { +test("generateCodebaseMap: excludes .sf/ files", () => { const base = makeTmpRepo(); try { addFile(base, "src/main.ts"); - addFile(base, ".gsd/PROJECT.md"); + addFile(base, ".sf/PROJECT.md"); const result = generateCodebaseMap(base); assert.ok(result.content.includes("`src/main.ts`")); @@ -333,7 +333,7 @@ test("generateCodebaseMap: truncated=true when file count exceeds maxFiles", () test("generateCodebaseMap: returns empty map for non-git directory", () => { const base = join(tmpdir(), `sf-codebase-test-${randomUUID()}`); - mkdirSync(join(base, ".gsd"), { recursive: true }); + mkdirSync(join(base, ".sf"), { recursive: true }); // No git init try { const result = generateCodebaseMap(base); @@ -488,10 +488,10 @@ test("readCodebaseMap: returns null when file missing", () => { } }); -test("writeCodebaseMap: creates .gsd/ directory if missing", () => { +test("writeCodebaseMap: creates .sf/ directory if missing", () => { const base = join(tmpdir(), `sf-codebase-test-${randomUUID()}`); mkdirSync(base, { recursive: true }); - // Intentionally do NOT pre-create .gsd/ + // Intentionally do NOT pre-create .sf/ try { const outPath = writeCodebaseMap(base, "# Codebase Map\n"); assert.ok(existsSync(outPath)); diff --git a/src/resources/extensions/sf/tests/collect-from-manifest.test.ts b/src/resources/extensions/sf/tests/collect-from-manifest.test.ts index 9ca2eecd9..6dbb42ce0 100644 --- a/src/resources/extensions/sf/tests/collect-from-manifest.test.ts +++ b/src/resources/extensions/sf/tests/collect-from-manifest.test.ts @@ -57,7 +57,7 @@ function makeManifest(entries: Partial<SecretsManifestEntry>[]): SecretsManifest async function writeManifestFile(dir: string, manifest: SecretsManifest): Promise<string> { const { formatSecretsManifest } = await loadFilesExports(); - const milestoneDir = join(dir, ".gsd", "milestones", "M001"); + const milestoneDir = join(dir, ".sf", "milestones", "M001"); mkdirSync(milestoneDir, { recursive: true }); const filePath = join(milestoneDir, "M001-SECRETS.md"); writeFileSync(filePath, formatSecretsManifest(manifest)); diff --git a/src/resources/extensions/sf/tests/commands-backlog.test.ts b/src/resources/extensions/sf/tests/commands-backlog.test.ts index f7183a130..3d85e78c3 100644 --- a/src/resources/extensions/sf/tests/commands-backlog.test.ts +++ b/src/resources/extensions/sf/tests/commands-backlog.test.ts @@ -9,7 +9,7 @@ import { randomUUID } from "node:crypto"; function makeTmpBase(): string { const base = join(tmpdir(), `sf-backlog-test-${randomUUID()}`); - mkdirSync(join(base, ".gsd"), { recursive: true }); + mkdirSync(join(base, ".sf"), { recursive: true }); return base; } @@ -18,7 +18,7 @@ function cleanup(base: string): void { } function backlogPath(base: string): string { - return join(base, ".gsd", "BACKLOG.md"); + return join(base, ".sf", "BACKLOG.md"); } function writeBacklog(base: string, content: string): void { diff --git a/src/resources/extensions/sf/tests/commands-extract-learnings.test.ts b/src/resources/extensions/sf/tests/commands-extract-learnings.test.ts index 19315ec22..e9428f195 100644 --- a/src/resources/extensions/sf/tests/commands-extract-learnings.test.ts +++ b/src/resources/extensions/sf/tests/commands-extract-learnings.test.ts @@ -42,13 +42,13 @@ describe("parseExtractLearningsArgs", () => { describe("buildLearningsOutputPath", () => { it("builds the correct output path", () => { - const result = buildLearningsOutputPath("/base/.gsd/milestones/M001", "M001"); - assert.equal(result, "/base/.gsd/milestones/M001/M001-LEARNINGS.md"); + const result = buildLearningsOutputPath("/base/.sf/milestones/M001", "M001"); + assert.equal(result, "/base/.sf/milestones/M001/M001-LEARNINGS.md"); }); it("builds path for different milestone ID", () => { - const result = buildLearningsOutputPath("/project/.gsd/milestones/M005", "M005"); - assert.equal(result, "/project/.gsd/milestones/M005/M005-LEARNINGS.md"); + const result = buildLearningsOutputPath("/project/.sf/milestones/M005", "M005"); + assert.equal(result, "/project/.sf/milestones/M005/M005-LEARNINGS.md"); }); }); @@ -142,8 +142,8 @@ describe("buildExtractLearningsPrompt", () => { const result = buildExtractLearningsPrompt({ milestoneId: "M001", milestoneName: "Test Milestone", - outputPath: "/project/.gsd/milestones/M001/M001-LEARNINGS.md", - relativeOutputPath: ".gsd/milestones/M001/M001-LEARNINGS.md", + outputPath: "/project/.sf/milestones/M001/M001-LEARNINGS.md", + relativeOutputPath: ".sf/milestones/M001/M001-LEARNINGS.md", planContent: "# Plan content", summaryContent: "# Summary content", verificationContent: null, @@ -153,7 +153,7 @@ describe("buildExtractLearningsPrompt", () => { }); assert.ok(result.includes("M001")); - assert.ok(result.includes("/project/.gsd/milestones/M001/M001-LEARNINGS.md")); + assert.ok(result.includes("/project/.sf/milestones/M001/M001-LEARNINGS.md")); }); it("includes all 4 learning categories", () => { @@ -161,7 +161,7 @@ describe("buildExtractLearningsPrompt", () => { milestoneId: "M001", milestoneName: "Test Milestone", outputPath: "/out/M001-LEARNINGS.md", - relativeOutputPath: ".gsd/milestones/M001/M001-LEARNINGS.md", + relativeOutputPath: ".sf/milestones/M001/M001-LEARNINGS.md", planContent: "# Plan", summaryContent: "# Summary", verificationContent: null, @@ -181,7 +181,7 @@ describe("buildExtractLearningsPrompt", () => { milestoneId: "M001", milestoneName: "Test Milestone", outputPath: "/out/M001-LEARNINGS.md", - relativeOutputPath: ".gsd/milestones/M001/M001-LEARNINGS.md", + relativeOutputPath: ".sf/milestones/M001/M001-LEARNINGS.md", planContent: "PLAN_CONTENT_UNIQUE_123", summaryContent: "SUMMARY_CONTENT_UNIQUE_456", verificationContent: null, @@ -199,7 +199,7 @@ describe("buildExtractLearningsPrompt", () => { milestoneId: "M001", milestoneName: "Test Milestone", outputPath: "/out/M001-LEARNINGS.md", - relativeOutputPath: ".gsd/milestones/M001/M001-LEARNINGS.md", + relativeOutputPath: ".sf/milestones/M001/M001-LEARNINGS.md", planContent: "# Plan", summaryContent: "# Summary", verificationContent: "VERIFICATION_UNIQUE_789", @@ -217,7 +217,7 @@ describe("buildExtractLearningsPrompt", () => { milestoneId: "M001", milestoneName: "Test Milestone", outputPath: "/out/M001-LEARNINGS.md", - relativeOutputPath: ".gsd/milestones/M001/M001-LEARNINGS.md", + relativeOutputPath: ".sf/milestones/M001/M001-LEARNINGS.md", planContent: "# Plan", summaryContent: "# Summary", verificationContent: null, @@ -303,7 +303,7 @@ describe("extractProjectName", () => { beforeEach(() => { tmpBase = join(tmpdir(), `sf-projname-test-${randomUUID()}`); - mkdirSync(join(tmpBase, ".gsd"), { recursive: true }); + mkdirSync(join(tmpBase, ".sf"), { recursive: true }); }); afterEach(() => { @@ -312,7 +312,7 @@ describe("extractProjectName", () => { it("reads name from PROJECT.md frontmatter", () => { writeFileSync( - join(tmpBase, ".gsd", "PROJECT.md"), + join(tmpBase, ".sf", "PROJECT.md"), "---\nname: My Cool Project\nversion: 1\n---\n# Project\n", "utf-8", ); @@ -329,7 +329,7 @@ describe("extractProjectName", () => { it("falls back to directory name when PROJECT.md has no name field", () => { writeFileSync( - join(tmpBase, ".gsd", "PROJECT.md"), + join(tmpBase, ".sf", "PROJECT.md"), "---\nversion: 1\n---\n# Project\n", "utf-8", ); diff --git a/src/resources/extensions/sf/tests/commands-inspect-open-db.test.ts b/src/resources/extensions/sf/tests/commands-inspect-open-db.test.ts index c009847c6..08c51f0a9 100644 --- a/src/resources/extensions/sf/tests/commands-inspect-open-db.test.ts +++ b/src/resources/extensions/sf/tests/commands-inspect-open-db.test.ts @@ -19,7 +19,7 @@ test("/sf inspect opens existing database when it was not yet opened in session" fs.rmSync(tmp, { recursive: true, force: true }); }); - const sfDir = path.join(tmp, ".gsd"); + const sfDir = path.join(tmp, ".sf"); fs.mkdirSync(sfDir, { recursive: true }); const dbPath = path.join(sfDir, "sf.db"); diff --git a/src/resources/extensions/sf/tests/commands-logs.test.ts b/src/resources/extensions/sf/tests/commands-logs.test.ts index 8d06a6661..4c772a7f1 100644 --- a/src/resources/extensions/sf/tests/commands-logs.test.ts +++ b/src/resources/extensions/sf/tests/commands-logs.test.ts @@ -10,8 +10,8 @@ import { handleLogs } from "../commands-logs.ts"; function createTestDir(): string { const dir = mkdtempSync(join(tmpdir(), "sf-logs-test-")); - mkdirSync(join(dir, ".gsd", "activity"), { recursive: true }); - mkdirSync(join(dir, ".gsd", "debug"), { recursive: true }); + mkdirSync(join(dir, ".sf", "activity"), { recursive: true }); + mkdirSync(join(dir, ".sf", "debug"), { recursive: true }); return dir; } @@ -32,12 +32,12 @@ function writeActivityLog(dir: string, seq: number, unitType: string, unitId: st const safeId = unitId.replace(/\//g, "-"); const filename = `${String(seq).padStart(3, "0")}-${unitType}-${safeId}.jsonl`; const content = entries.map(e => JSON.stringify(e)).join("\n") + "\n"; - writeFileSync(join(dir, ".gsd", "activity", filename), content); + writeFileSync(join(dir, ".sf", "activity", filename), content); } function writeDebugLog(dir: string, name: string, entries: Record<string, unknown>[]): void { const content = entries.map(e => JSON.stringify(e)).join("\n") + "\n"; - writeFileSync(join(dir, ".gsd", "debug", name), content); + writeFileSync(join(dir, ".sf", "debug", name), content); } // ─── Tests ────────────────────────────────────────────────────────────────── @@ -216,7 +216,7 @@ test("logs clear removes old logs", async (t) => { // Create an old activity log (modify mtime to 10 days ago) writeActivityLog(dir, 1, "execute-task", "M001/S01/T01", [{ type: "toolCall" }]); - const oldFile = join(dir, ".gsd", "activity", "001-execute-task-M001-S01-T01.jsonl"); + const oldFile = join(dir, ".sf", "activity", "001-execute-task-M001-S01-T01.jsonl"); const oldTime = new Date(Date.now() - 10 * 24 * 60 * 60 * 1000); utimesSync(oldFile, oldTime, oldTime); @@ -235,7 +235,7 @@ test("logs clear removes old logs", async (t) => { // Old log should be removed, recent ones kept assert.ok(!existsSync(oldFile), "old log should be removed"); assert.ok( - existsSync(join(dir, ".gsd", "activity", "007-execute-task-M001-S01-T07.jsonl")), + existsSync(join(dir, ".sf", "activity", "007-execute-task-M001-S01-T07.jsonl")), "most recent log should be kept", ); }); diff --git a/src/resources/extensions/sf/tests/commands-pr-branch.test.ts b/src/resources/extensions/sf/tests/commands-pr-branch.test.ts index f388ba313..598382cc6 100644 --- a/src/resources/extensions/sf/tests/commands-pr-branch.test.ts +++ b/src/resources/extensions/sf/tests/commands-pr-branch.test.ts @@ -4,10 +4,10 @@ import assert from "node:assert/strict"; // Test the filtering logic used by /sf pr-branch. // Full integration requires git operations, so we test the path filtering. -test("pr-branch: identifies .gsd/ paths", () => { +test("pr-branch: identifies .sf/ paths", () => { const files = [ - ".gsd/milestones/M001/ROADMAP.md", - ".gsd/metrics.json", + ".sf/milestones/M001/ROADMAP.md", + ".sf/metrics.json", "src/main.ts", "package.json", ".planning/PLAN.md", @@ -15,21 +15,21 @@ test("pr-branch: identifies .gsd/ paths", () => { ]; const codeFiles = files.filter( - (f) => !f.startsWith(".gsd/") && !f.startsWith(".planning/") && f !== "PLAN.md", + (f) => !f.startsWith(".sf/") && !f.startsWith(".planning/") && f !== "PLAN.md", ); assert.deepEqual(codeFiles, ["src/main.ts", "package.json"]); }); -test("pr-branch: all .gsd/ files returns empty", () => { +test("pr-branch: all .sf/ files returns empty", () => { const files = [ - ".gsd/milestones/M001/ROADMAP.md", - ".gsd/metrics.json", - ".gsd/BACKLOG.md", + ".sf/milestones/M001/ROADMAP.md", + ".sf/metrics.json", + ".sf/BACKLOG.md", ]; const codeFiles = files.filter( - (f) => !f.startsWith(".gsd/") && !f.startsWith(".planning/") && f !== "PLAN.md", + (f) => !f.startsWith(".sf/") && !f.startsWith(".planning/") && f !== "PLAN.md", ); assert.equal(codeFiles.length, 0); @@ -37,13 +37,13 @@ test("pr-branch: all .gsd/ files returns empty", () => { test("pr-branch: mixed commits with code changes", () => { const files = [ - ".gsd/milestones/M001/ROADMAP.md", + ".sf/milestones/M001/ROADMAP.md", "src/auth.ts", "src/auth.test.ts", ]; const hasCodeChanges = files.some( - (f) => !f.startsWith(".gsd/") && !f.startsWith(".planning/") && f !== "PLAN.md", + (f) => !f.startsWith(".sf/") && !f.startsWith(".planning/") && f !== "PLAN.md", ); assert.ok(hasCodeChanges); diff --git a/src/resources/extensions/sf/tests/commands-workflow-custom.test.ts b/src/resources/extensions/sf/tests/commands-workflow-custom.test.ts index 83604964f..2e6edb38e 100644 --- a/src/resources/extensions/sf/tests/commands-workflow-custom.test.ts +++ b/src/resources/extensions/sf/tests/commands-workflow-custom.test.ts @@ -74,7 +74,7 @@ function createMockPi() { /** Write a minimal valid workflow definition YAML to the expected location. */ function writeDefinition(basePath: string, name: string, content: string): void { - const defsDir = join(basePath, ".gsd", "workflow-defs"); + const defsDir = join(basePath, ".sf", "workflow-defs"); mkdirSync(defsDir, { recursive: true }); writeFileSync(join(defsDir, `${name}.yaml`), content, "utf-8"); } @@ -141,7 +141,7 @@ describe("workflow catalog registration", () => { writeDefinition(base, "deploy-pipeline", SIMPLE_DEF); writeDefinition(base, "test-suite", SIMPLE_DEF); - // Change cwd so the completion scanner can find `.gsd/workflow-defs/` + // Change cwd so the completion scanner can find `.sf/workflow-defs/` process.chdir(base); const completions = getGsdArgumentCompletions("workflow run "); diff --git a/src/resources/extensions/sf/tests/complete-milestone.test.ts b/src/resources/extensions/sf/tests/complete-milestone.test.ts index 0c5168e3b..dcc927d22 100644 --- a/src/resources/extensions/sf/tests/complete-milestone.test.ts +++ b/src/resources/extensions/sf/tests/complete-milestone.test.ts @@ -7,7 +7,7 @@ import { fileURLToPath } from "node:url"; import { invalidateAllCaches } from '../cache.ts'; import { parseUnitId } from "../unit-id.ts"; -// loadPrompt reads from ~/.gsd/agent/extensions/sf/prompts/ (main checkout). +// loadPrompt reads from ~/.sf/agent/extensions/sf/prompts/ (main checkout). // In a worktree the file may not exist there yet, so we resolve prompts // relative to this test file's location (the worktree copy). const __dirname = dirname(fileURLToPath(import.meta.url)); @@ -30,24 +30,24 @@ function loadPromptFromWorktree(name: string, vars: Record<string, string> = {}) function createFixtureBase(): string { const base = mkdtempSync(join(tmpdir(), "sf-complete-ms-test-")); - mkdirSync(join(base, ".gsd", "milestones"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones"), { recursive: true }); return base; } function writeRoadmap(base: string, mid: string, content: string): void { - const dir = join(base, ".gsd", "milestones", mid); + const dir = join(base, ".sf", "milestones", mid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${mid}-ROADMAP.md`), content); } function writeMilestoneSummary(base: string, mid: string, content: string): void { - const dir = join(base, ".gsd", "milestones", mid); + const dir = join(base, ".sf", "milestones", mid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${mid}-SUMMARY.md`), content); } function writeMilestoneValidation(base: string, mid: string, verdict: string = "pass"): void { - const dir = join(base, ".gsd", "milestones", mid); + const dir = join(base, ".sf", "milestones", mid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${mid}-VALIDATION.md`), `---\nverdict: ${verdict}\nremediation_round: 0\n---\n\n# Validation\nValidated.`); } @@ -70,7 +70,7 @@ describe("complete-milestone", () => { workingDirectory: "/tmp/test-project", milestoneId: "M001", milestoneTitle: "Test Milestone", - roadmapPath: ".gsd/milestones/M001/M001-ROADMAP.md", + roadmapPath: ".sf/milestones/M001/M001-ROADMAP.md", inlinedContext: "test context block", }); } catch (err) { @@ -87,13 +87,13 @@ describe("complete-milestone", () => { workingDirectory: "/tmp/test-project", milestoneId: "M001", milestoneTitle: "Integration Feature", - roadmapPath: ".gsd/milestones/M001/M001-ROADMAP.md", + roadmapPath: ".sf/milestones/M001/M001-ROADMAP.md", inlinedContext: "--- inlined slice summaries and context ---", }); assert.ok(prompt.includes("M001"), "prompt contains milestoneId 'M001'"); assert.ok(prompt.includes("Integration Feature"), "prompt contains milestoneTitle"); - assert.ok(prompt.includes(".gsd/milestones/M001/M001-ROADMAP.md"), "prompt contains roadmapPath"); + assert.ok(prompt.includes(".sf/milestones/M001/M001-ROADMAP.md"), "prompt contains roadmapPath"); assert.ok(prompt.includes("--- inlined slice summaries and context ---"), "prompt contains inlinedContext"); assert.ok(!prompt.includes("{{milestoneId}}"), "no un-substituted {{milestoneId}}"); assert.ok(!prompt.includes("{{milestoneTitle}}"), "no un-substituted {{milestoneTitle}}"); @@ -106,7 +106,7 @@ describe("complete-milestone", () => { workingDirectory: "/tmp/test-project", milestoneId: "M002", milestoneTitle: "Completion Workflow", - roadmapPath: ".gsd/milestones/M002/M002-ROADMAP.md", + roadmapPath: ".sf/milestones/M002/M002-ROADMAP.md", inlinedContext: "context", }); @@ -121,7 +121,7 @@ describe("complete-milestone", () => { workingDirectory: "/tmp/test-project", milestoneId: "M001", milestoneTitle: "Gate Test", - roadmapPath: ".gsd/milestones/M001/M001-ROADMAP.md", + roadmapPath: ".sf/milestones/M001/M001-ROADMAP.md", inlinedContext: "context", }); @@ -242,9 +242,9 @@ describe("complete-milestone", () => { workingDirectory: "/tmp/test-project", milestoneId: "M001", milestoneTitle: "Tool Guidance Test", - roadmapPath: ".gsd/milestones/M001/M001-ROADMAP.md", + roadmapPath: ".sf/milestones/M001/M001-ROADMAP.md", inlinedContext: "context", - milestoneSummaryPath: ".gsd/milestones/M001/M001-SUMMARY.md", + milestoneSummaryPath: ".sf/milestones/M001/M001-SUMMARY.md", skillActivation: "", }); @@ -259,7 +259,7 @@ describe("complete-milestone", () => { // The prompt must NOT leave tool choice ambiguous for PROJECT.md // Verify it mentions the required parameter (`content` or `path`) assert.ok( - prompt.includes("`.gsd/PROJECT.md`") || prompt.includes('".gsd/PROJECT.md"'), + prompt.includes("`.sf/PROJECT.md`") || prompt.includes('".sf/PROJECT.md"'), "step 11 must reference the PROJECT.md path explicitly", ); }); @@ -426,7 +426,7 @@ describe("complete-milestone", () => { // Verify isMilestoneComplete returns true const { loadFile } = await import("../files.ts"); - const roadmapPath = join(base, ".gsd", "milestones", "M001", "M001-ROADMAP.md"); + const roadmapPath = join(base, ".sf", "milestones", "M001", "M001-ROADMAP.md"); const roadmapContent = await loadFile(roadmapPath); const roadmap = parseRoadmap(roadmapContent!); assert.ok(isMilestoneComplete(roadmap), "isMilestoneComplete returns true when all slices are [x]"); diff --git a/src/resources/extensions/sf/tests/complete-slice-gate-closure.test.ts b/src/resources/extensions/sf/tests/complete-slice-gate-closure.test.ts index 25915a140..301af19d1 100644 --- a/src/resources/extensions/sf/tests/complete-slice-gate-closure.test.ts +++ b/src/resources/extensions/sf/tests/complete-slice-gate-closure.test.ts @@ -67,11 +67,11 @@ describe("complete-slice closes complete-slice-owned gates", () => { basePath = fs.mkdtempSync(path.join(os.tmpdir(), "sf-slice-gate-handler-")); const sliceDir = path.join( - basePath, ".gsd", "milestones", "M001", "slices", "S01", "tasks", + basePath, ".sf", "milestones", "M001", "slices", "S01", "tasks", ); fs.mkdirSync(sliceDir, { recursive: true }); fs.writeFileSync( - path.join(basePath, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), + path.join(basePath, ".sf", "milestones", "M001", "M001-ROADMAP.md"), [ "# M001: Test Milestone", "", diff --git a/src/resources/extensions/sf/tests/complete-slice-string-coercion.test.ts b/src/resources/extensions/sf/tests/complete-slice-string-coercion.test.ts index 028f73e15..fe396b85b 100644 --- a/src/resources/extensions/sf/tests/complete-slice-string-coercion.test.ts +++ b/src/resources/extensions/sf/tests/complete-slice-string-coercion.test.ts @@ -174,10 +174,10 @@ describe("handleCompleteSlice with coerced string arrays (#3565)", () => { openDatabase(dbPath); basePath = fs.mkdtempSync(path.join(os.tmpdir(), "sf-coerce-handler-")); - const sliceDir = path.join(basePath, ".gsd", "milestones", "M001", "slices", "S01", "tasks"); + const sliceDir = path.join(basePath, ".sf", "milestones", "M001", "slices", "S01", "tasks"); fs.mkdirSync(sliceDir, { recursive: true }); - const roadmapPath = path.join(basePath, ".gsd", "milestones", "M001", "M001-ROADMAP.md"); + const roadmapPath = path.join(basePath, ".sf", "milestones", "M001", "M001-ROADMAP.md"); fs.writeFileSync( roadmapPath, [ diff --git a/src/resources/extensions/sf/tests/complete-slice.test.ts b/src/resources/extensions/sf/tests/complete-slice.test.ts index 0c58aca98..77044986d 100644 --- a/src/resources/extensions/sf/tests/complete-slice.test.ts +++ b/src/resources/extensions/sf/tests/complete-slice.test.ts @@ -50,15 +50,15 @@ function cleanupDir(dirPath: string): void { } /** - * Create a temp project directory with .gsd structure and roadmap for handler tests. + * Create a temp project directory with .sf structure and roadmap for handler tests. */ function createTempProject(): { basePath: string; roadmapPath: string } { const basePath = fs.mkdtempSync(path.join(os.tmpdir(), 'sf-slice-handler-')); - const sliceDir = path.join(basePath, '.gsd', 'milestones', 'M001', 'slices', 'S01'); + const sliceDir = path.join(basePath, '.sf', 'milestones', 'M001', 'slices', 'S01'); const tasksDir = path.join(sliceDir, 'tasks'); fs.mkdirSync(tasksDir, { recursive: true }); - const roadmapPath = path.join(basePath, '.gsd', 'milestones', 'M001', 'M001-ROADMAP.md'); + const roadmapPath = path.join(basePath, '.sf', 'milestones', 'M001', 'M001-ROADMAP.md'); fs.writeFileSync(roadmapPath, `# M001: Test Milestone ## Slices @@ -384,7 +384,7 @@ console.log('\n=== complete-slice: handler with missing roadmap ==='); // Create a temp dir WITHOUT a roadmap file const basePath = fs.mkdtempSync(path.join(os.tmpdir(), 'sf-no-roadmap-')); - const sliceDir = path.join(basePath, '.gsd', 'milestones', 'M001', 'slices', 'S01'); + const sliceDir = path.join(basePath, '.sf', 'milestones', 'M001', 'slices', 'S01'); fs.mkdirSync(sliceDir, { recursive: true }); // Set up DB state diff --git a/src/resources/extensions/sf/tests/complete-task-rollback-evidence.test.ts b/src/resources/extensions/sf/tests/complete-task-rollback-evidence.test.ts index 9611d0914..226548bc7 100644 --- a/src/resources/extensions/sf/tests/complete-task-rollback-evidence.test.ts +++ b/src/resources/extensions/sf/tests/complete-task-rollback-evidence.test.ts @@ -19,7 +19,7 @@ import { clearParseCache } from "../files.js"; function makeTmpBase(): string { const base = join(tmpdir(), `sf-ct-rollback-${randomUUID()}`); // Create the full tasks directory so the success path works - mkdirSync(join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks"), { recursive: true }); return base; } @@ -55,13 +55,13 @@ describe("complete-task rollback cleans up verification_evidence (#2724)", () => it("inserts verification_evidence rows on success", async () => { base = makeTmpBase(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001" }); insertSlice({ id: "S01", milestoneId: "M001" }); // Write a minimal slice plan so renderPlanCheckboxes doesn't error writeFileSync( - join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-PLAN.md"), + join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-PLAN.md"), "# S01 Plan\n\n## Tasks\n\n- [ ] **T01: Test task**\n", ); @@ -77,12 +77,12 @@ describe("complete-task rollback cleans up verification_evidence (#2724)", () => it("deletes verification_evidence rows on disk-render rollback", async () => { base = makeTmpBase(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001" }); insertSlice({ id: "S01", milestoneId: "M001" }); // Replace the tasks directory with a file so disk write fails (cross-platform) - const tasksDir = join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks"); + const tasksDir = join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks"); rmSync(tasksDir, { recursive: true, force: true }); writeFileSync(tasksDir, "not-a-directory"); diff --git a/src/resources/extensions/sf/tests/complete-task.test.ts b/src/resources/extensions/sf/tests/complete-task.test.ts index a6bc480d0..7e049e4ab 100644 --- a/src/resources/extensions/sf/tests/complete-task.test.ts +++ b/src/resources/extensions/sf/tests/complete-task.test.ts @@ -50,14 +50,14 @@ function cleanupDir(dirPath: string): void { } /** - * Create a temp project directory with .gsd structure for handler tests. + * Create a temp project directory with .sf structure for handler tests. */ function createTempProject(): { basePath: string; planPath: string } { const basePath = fs.mkdtempSync(path.join(os.tmpdir(), 'sf-handler-')); - const tasksDir = path.join(basePath, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'tasks'); + const tasksDir = path.join(basePath, '.sf', 'milestones', 'M001', 'slices', 'S01', 'tasks'); fs.mkdirSync(tasksDir, { recursive: true }); - const planPath = path.join(basePath, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'S01-PLAN.md'); + const planPath = path.join(basePath, '.sf', 'milestones', 'M001', 'slices', 'S01', 'S01-PLAN.md'); fs.writeFileSync(planPath, `# S01: Test Slice ## Tasks @@ -429,7 +429,7 @@ console.log('\n=== complete-task: handler with missing plan file ==='); // Create a temp dir WITHOUT a plan file const basePath = fs.mkdtempSync(path.join(os.tmpdir(), 'sf-no-plan-')); - const tasksDir = path.join(basePath, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'tasks'); + const tasksDir = path.join(basePath, '.sf', 'milestones', 'M001', 'slices', 'S01', 'tasks'); fs.mkdirSync(tasksDir, { recursive: true }); // Seed milestone + slice so state machine guards pass diff --git a/src/resources/extensions/sf/tests/completed-units-metrics-sync.test.ts b/src/resources/extensions/sf/tests/completed-units-metrics-sync.test.ts index fdc98115d..614c2e868 100644 --- a/src/resources/extensions/sf/tests/completed-units-metrics-sync.test.ts +++ b/src/resources/extensions/sf/tests/completed-units-metrics-sync.test.ts @@ -81,7 +81,7 @@ test("#2313: syncWorktreeStateBack should include metrics.json in ROOT_STATE_FIL test("#2313: functional — completed-units archive creates milestone-specific file", () => { const tmpBase = mkdtempSync(join(tmpdir(), "sf-completed-units-")); - const sfDir = join(tmpBase, ".gsd"); + const sfDir = join(tmpBase, ".sf"); mkdirSync(sfDir, { recursive: true }); // Simulate existing completed-units.json with data diff --git a/src/resources/extensions/sf/tests/context-store.test.ts b/src/resources/extensions/sf/tests/context-store.test.ts index 6da54ce72..5bb779b56 100644 --- a/src/resources/extensions/sf/tests/context-store.test.ts +++ b/src/resources/extensions/sf/tests/context-store.test.ts @@ -385,7 +385,7 @@ describe("context-store: queryArtifact", () => { full_content: '# My Project\n\nProject description here.', }); insertArtifact({ - path: '.gsd/milestones/M001/M001-PLAN.md', + path: '.sf/milestones/M001/M001-PLAN.md', artifact_type: 'milestone_plan', milestone_id: 'M001', slice_id: null, @@ -396,7 +396,7 @@ describe("context-store: queryArtifact", () => { const project = queryArtifact('PROJECT.md'); assert.strictEqual(project, '# My Project\n\nProject description here.', 'queryArtifact returns full_content for PROJECT.md'); - const plan = queryArtifact('.gsd/milestones/M001/M001-PLAN.md'); + const plan = queryArtifact('.sf/milestones/M001/M001-PLAN.md'); assert.strictEqual(plan, '# M001 Plan\n\nMilestone content.', 'queryArtifact returns full_content for milestone plan'); }); @@ -475,7 +475,7 @@ Refactor prompt builders to inject relevance-scoped context. `; test("S02 with S01 predecessor includes both rows", () => { - const result = formatRoadmapExcerpt(sampleRoadmap, 'S02', '.gsd/milestones/M005/M005-ROADMAP.md'); + const result = formatRoadmapExcerpt(sampleRoadmap, 'S02', '.sf/milestones/M005/M005-ROADMAP.md'); // Should have header assert.match(result, /\| ID \| Slice \| Risk \| Depends \| Done \| After this \|/, 'has header row'); diff --git a/src/resources/extensions/sf/tests/copy-planning-artifacts-samepath.test.ts b/src/resources/extensions/sf/tests/copy-planning-artifacts-samepath.test.ts index 6a73fef13..417fb0f5a 100644 --- a/src/resources/extensions/sf/tests/copy-planning-artifacts-samepath.test.ts +++ b/src/resources/extensions/sf/tests/copy-planning-artifacts-samepath.test.ts @@ -3,7 +3,7 @@ import assert from "node:assert/strict"; import { readFileSync } from "node:fs"; import { join } from "node:path"; -test("copyPlanningArtifacts skips when source and destination .gsd resolve to the same path", () => { +test("copyPlanningArtifacts skips when source and destination .sf resolve to the same path", () => { const srcPath = join(import.meta.dirname, "..", "auto-worktree.ts"); const src = readFileSync(srcPath, "utf-8"); @@ -15,7 +15,7 @@ test("copyPlanningArtifacts skips when source and destination .gsd resolve to th const guardIdx = fnBody.indexOf("if (isSamePath(srcGsd, dstGsd)) return;"); const copyIdx = fnBody.indexOf("safeCopyRecursive(join(srcGsd, \"milestones\")"); - assert.ok(guardIdx !== -1, "copyPlanningArtifacts should guard same-path .gsd copies"); + assert.ok(guardIdx !== -1, "copyPlanningArtifacts should guard same-path .sf copies"); assert.ok(copyIdx !== -1, "copyPlanningArtifacts should still copy milestones when paths differ"); assert.ok(guardIdx < copyIdx, "same-path guard should run before any copy attempt"); }); diff --git a/src/resources/extensions/sf/tests/crash-handler-secondary.test.ts b/src/resources/extensions/sf/tests/crash-handler-secondary.test.ts index 3ed2b46f6..e468f5f51 100644 --- a/src/resources/extensions/sf/tests/crash-handler-secondary.test.ts +++ b/src/resources/extensions/sf/tests/crash-handler-secondary.test.ts @@ -1,7 +1,7 @@ /** * Regression tests for #3348 secondary issues — crash handler gaps surfaced after #3696 * - * 1. register-extension.ts: writeCrashLog writes to ~/.gsd/crash/ directory + * 1. register-extension.ts: writeCrashLog writes to ~/.sf/crash/ directory * 2. register-extension.ts: _sfRejectionGuard registered for unhandledRejection * 3. register-extension.ts: _sfEpipeGuard exits with code 1 for unrecoverable errors (no log-and-continue) * 4. crash-recovery.ts: emitCrashRecoveredUnitEnd closes open unit-start journal entries @@ -21,7 +21,7 @@ const __dirname = dirname(__filename); function makeTmpBase(): string { const base = join(tmpdir(), `sf-test-${randomUUID()}`); - mkdirSync(join(base, '.gsd'), { recursive: true }); + mkdirSync(join(base, '.sf'), { recursive: true }); return base; } @@ -34,7 +34,7 @@ const registerExtSrc = readFileSync( describe('register-extension crash handler secondary fixes (#3348)', () => { test('writeCrashLog is exported and writes a file to the crash directory', async () => { - // Dynamic import so SF_HOME can be pointed at a temp dir without polluting ~/.gsd + // Dynamic import so SF_HOME can be pointed at a temp dir without polluting ~/.sf const tmpHome = join(tmpdir(), `sf-crash-test-${randomUUID()}`); const origHome = process.env.SF_HOME; process.env.SF_HOME = tmpHome; diff --git a/src/resources/extensions/sf/tests/crash-recovery.test.ts b/src/resources/extensions/sf/tests/crash-recovery.test.ts index 4c9df1d7c..160558b41 100644 --- a/src/resources/extensions/sf/tests/crash-recovery.test.ts +++ b/src/resources/extensions/sf/tests/crash-recovery.test.ts @@ -24,7 +24,7 @@ import type { SFState } from "../types.ts"; function makeTmpBase(): string { const base = join(tmpdir(), `sf-test-${randomUUID()}`); - mkdirSync(join(base, ".gsd"), { recursive: true }); + mkdirSync(join(base, ".sf"), { recursive: true }); return base; } @@ -53,7 +53,7 @@ function writeTestLock( } function writeRoadmap(base: string, checked = false): void { - const milestoneDir = join(base, ".gsd", "milestones", "M001"); + const milestoneDir = join(base, ".sf", "milestones", "M001"); mkdirSync(join(milestoneDir, "slices", "S01", "tasks"), { recursive: true }); writeFileSync( join(milestoneDir, "M001-ROADMAP.md"), @@ -84,14 +84,14 @@ function writeRoadmap(base: string, checked = false): void { } function writeCompleteSliceArtifacts(base: string): void { - const sliceDir = join(base, ".gsd", "milestones", "M001", "slices", "S01"); + const sliceDir = join(base, ".sf", "milestones", "M001", "slices", "S01"); mkdirSync(sliceDir, { recursive: true }); writeFileSync(join(sliceDir, "S01-SUMMARY.md"), "# Summary\nDone.\n", "utf-8"); writeFileSync(join(sliceDir, "S01-UAT.md"), "# UAT\nPassed.\n", "utf-8"); } function writeCompleteMilestoneSummary(base: string): void { - const milestoneDir = join(base, ".gsd", "milestones", "M001"); + const milestoneDir = join(base, ".sf", "milestones", "M001"); mkdirSync(milestoneDir, { recursive: true }); writeFileSync(join(milestoneDir, "M001-SUMMARY.md"), "# Milestone Summary\nDone.\n", "utf-8"); } @@ -104,7 +104,7 @@ function writePausedSession( unitType?: string, unitId?: string, ): void { - const runtimeDir = join(base, ".gsd", "runtime"); + const runtimeDir = join(base, ".sf", "runtime"); mkdirSync(runtimeDir, { recursive: true }); writeFileSync( join(runtimeDir, "paused-session.json"), @@ -114,7 +114,7 @@ function writePausedSession( } function writeActivityLog(base: string, entries: Record<string, unknown>[]): void { - const activityDir = join(base, ".gsd", "activity"); + const activityDir = join(base, ".sf", "activity"); mkdirSync(activityDir, { recursive: true }); writeFileSync( join(activityDir, "001-execute-task-M001-S01-T01.jsonl"), @@ -183,7 +183,7 @@ test("readPausedSessionMetadata handles legacy metadata without unitType/unitId" const base = makeTmpBase(); try { // Write metadata without unitType/unitId (simulates older version) - const runtimeDir = join(base, ".gsd", "runtime"); + const runtimeDir = join(base, ".sf", "runtime"); mkdirSync(runtimeDir, { recursive: true }); writeFileSync( join(runtimeDir, "paused-session.json"), diff --git a/src/resources/extensions/sf/tests/db-access-guardrails.test.ts b/src/resources/extensions/sf/tests/db-access-guardrails.test.ts index c95d5c5d7..d6a8f1427 100644 --- a/src/resources/extensions/sf/tests/db-access-guardrails.test.ts +++ b/src/resources/extensions/sf/tests/db-access-guardrails.test.ts @@ -13,12 +13,12 @@ function readPrompt(name: string): string { // ─── Layer 1: system.md global guardrail ────────────────────────────────────── -test("system.md anti-patterns section prohibits direct .gsd/sf.db access", () => { +test("system.md anti-patterns section prohibits direct .sf/sf.db access", () => { const prompt = readPrompt("system"); assert.match( prompt, - /Never query.*\.gsd\/sf\.db.*directly/i, - "system.md must prohibit direct .gsd/sf.db access in the anti-patterns section", + /Never query.*\.sf\/sf\.db.*directly/i, + "system.md must prohibit direct .sf/sf.db access in the anti-patterns section", ); assert.match(prompt, /sqlite3/, "system.md DB guardrail must name the sqlite3 CLI"); assert.match(prompt, /better-sqlite3/, "system.md DB guardrail must name better-sqlite3"); @@ -36,26 +36,26 @@ test("validate-milestone.md contains DB access safety guardrail with tool redire const prompt = readPrompt("validate-milestone"); assert.match(prompt, /DB access safety/i, "validate-milestone.md must have DB access safety section"); assert.match(prompt, /sf_milestone_status/, "validate-milestone.md must name sf_milestone_status as alternative"); - assert.match(prompt, /Do NOT query.*\.gsd\/sf\.db/i, "validate-milestone.md must prohibit direct DB queries"); + assert.match(prompt, /Do NOT query.*\.sf\/sf\.db/i, "validate-milestone.md must prohibit direct DB queries"); }); test("complete-milestone.md contains DB access safety guardrail with tool redirect", () => { const prompt = readPrompt("complete-milestone"); assert.match(prompt, /DB access safety/i, "complete-milestone.md must have DB access safety section"); assert.match(prompt, /sf_milestone_status/, "complete-milestone.md must name sf_milestone_status as alternative"); - assert.match(prompt, /Do NOT query.*\.gsd\/sf\.db/i, "complete-milestone.md must prohibit direct DB queries"); + assert.match(prompt, /Do NOT query.*\.sf\/sf\.db/i, "complete-milestone.md must prohibit direct DB queries"); }); test("doctor-heal.md contains DB access guardrail naming sf_milestone_status", () => { const prompt = readPrompt("doctor-heal"); assert.match(prompt, /sf_milestone_status/, "doctor-heal.md must name sf_milestone_status as the DB inspection tool"); - assert.match(prompt, /Do NOT query.*\.gsd\/sf\.db/i, "doctor-heal.md must prohibit direct DB queries"); + assert.match(prompt, /Do NOT query.*\.sf\/sf\.db/i, "doctor-heal.md must prohibit direct DB queries"); }); test("forensics.md contains DB inspection guardrail", () => { const prompt = readPrompt("forensics"); assert.match(prompt, /sf_milestone_status/, "forensics.md must name sf_milestone_status as the DB inspection tool"); - assert.match(prompt, /sqlite3.*\.gsd\/sf\.db/i, "forensics.md must prohibit sqlite3 against .gsd/sf.db"); + assert.match(prompt, /sqlite3.*\.sf\/sf\.db/i, "forensics.md must prohibit sqlite3 against .sf/sf.db"); }); test("reassess-roadmap.md contains DB access safety guardrail", () => { @@ -81,8 +81,8 @@ test("no prompt file contains an unguarded sqlite3 command invocation", () => { const trimmed = line.trim(); // Match lines containing sqlite3 targeting sf.db in any common form: - // sqlite3 .gsd/sf.db, sqlite3 ./.gsd/sf.db, sqlite3 "/path/.gsd/sf.db", - // sqlite3 -header .gsd/sf.db, etc. + // sqlite3 .sf/sf.db, sqlite3 ./.sf/sf.db, sqlite3 "/path/.sf/sf.db", + // sqlite3 -header .sf/sf.db, etc. // Guardrail text that says "Never run" or "Do NOT query" is fine — only flag // lines where these appear without a surrounding prohibition keyword. if (/sqlite3\b.*sf\.db/.test(trimmed)) { diff --git a/src/resources/extensions/sf/tests/db-path-worktree-symlink.test.ts b/src/resources/extensions/sf/tests/db-path-worktree-symlink.test.ts index 90a74efc4..83ad2bfdd 100644 --- a/src/resources/extensions/sf/tests/db-path-worktree-symlink.test.ts +++ b/src/resources/extensions/sf/tests/db-path-worktree-symlink.test.ts @@ -4,8 +4,8 @@ * Regression test for the db_unavailable loop in worktree/symlink layouts. * * The path resolver must handle BOTH worktree path families: - * - /.gsd/worktrees/<MID>/... (direct layout) - * - /.gsd/projects/<hash>/worktrees/<MID>/... (symlink-resolved layout) + * - /.sf/worktrees/<MID>/... (direct layout) + * - /.sf/projects/<hash>/worktrees/<MID>/... (symlink-resolved layout) * * When the second layout is not recognised, ensureDbOpen derives a wrong DB * path, the open fails silently, and every completion tool call returns @@ -30,54 +30,54 @@ console.log("\n=== #2517 Part 1: resolveProjectRootDbPath symlink layout ==="); const { resolveProjectRootDbPath } = await import("../bootstrap/dynamic-tools.js"); // Standard worktree layout (already works) -const standardPath = `/home/user/myproject/.gsd/worktrees/M001/work`; +const standardPath = `/home/user/myproject/.sf/worktrees/M001/work`; const standardResult = resolveProjectRootDbPath(standardPath); assertEq( standardResult, - join("/home/user/myproject", ".gsd", "sf.db"), + join("/home/user/myproject", ".sf", "sf.db"), "Standard worktree layout resolves to project root DB path", ); -// Symlink-resolved layout: /.gsd/projects/<hash>/worktrees/... +// Symlink-resolved layout: /.sf/projects/<hash>/worktrees/... // After PR #2952, these paths resolve to the hash-level DB (same as external-state), // because on POSIX getcwd() returns the canonical (symlink-resolved) path anyway, so -// a path like <proj>/.gsd/projects/<hash>/worktrees/ in practice is always -// ~/.gsd/projects/<hash>/worktrees/ after the OS resolves the .gsd symlink. -const symlinkPath = `/home/user/myproject/.gsd/projects/abc123def/worktrees/M001/work`; +// a path like <proj>/.sf/projects/<hash>/worktrees/ in practice is always +// ~/.sf/projects/<hash>/worktrees/ after the OS resolves the .sf symlink. +const symlinkPath = `/home/user/myproject/.sf/projects/abc123def/worktrees/M001/work`; const symlinkResult = resolveProjectRootDbPath(symlinkPath); assertEq( symlinkResult, - join("/home/user/myproject/.gsd/projects/abc123def", "sf.db"), - "/.gsd/projects/<hash>/worktrees/ resolves to hash-level DB (#2517, updated for #2952)", + join("/home/user/myproject/.sf/projects/abc123def", "sf.db"), + "/.sf/projects/<hash>/worktrees/ resolves to hash-level DB (#2517, updated for #2952)", ); // Windows-style separators for symlink layout if (sep === "\\") { - const winSymlinkPath = `C:\\Users\\dev\\project\\.gsd\\projects\\abc123def\\worktrees\\M001\\work`; + const winSymlinkPath = `C:\\Users\\dev\\project\\.sf\\projects\\abc123def\\worktrees\\M001\\work`; const winResult = resolveProjectRootDbPath(winSymlinkPath); assertEq( winResult, - join("C:\\Users\\dev\\project\\.gsd\\projects\\abc123def", "sf.db"), - "Windows /.gsd/projects/<hash>/worktrees/ resolves to hash-level DB", + join("C:\\Users\\dev\\project\\.sf\\projects\\abc123def", "sf.db"), + "Windows /.sf/projects/<hash>/worktrees/ resolves to hash-level DB", ); } else { // On non-Windows, test forward-slash variant explicitly - const fwdSymlinkPath = `/home/user/myproject/.gsd/projects/abc123def/worktrees/M001/work`; + const fwdSymlinkPath = `/home/user/myproject/.sf/projects/abc123def/worktrees/M001/work`; const fwdResult = resolveProjectRootDbPath(fwdSymlinkPath); assertEq( fwdResult, - join("/home/user/myproject/.gsd/projects/abc123def", "sf.db"), - "Forward-slash /.gsd/projects/<hash>/worktrees/ resolves to hash-level DB on POSIX", + join("/home/user/myproject/.sf/projects/abc123def", "sf.db"), + "Forward-slash /.sf/projects/<hash>/worktrees/ resolves to hash-level DB on POSIX", ); } // Edge: deeper nesting under projects/<hash>/worktrees -const deepSymlinkPath = `/home/user/myproject/.gsd/projects/deadbeef42/worktrees/M003/sub/dir`; +const deepSymlinkPath = `/home/user/myproject/.sf/projects/deadbeef42/worktrees/M003/sub/dir`; const deepResult = resolveProjectRootDbPath(deepSymlinkPath); assertEq( deepResult, - join("/home/user/myproject/.gsd/projects/deadbeef42", "sf.db"), - "Deep /.gsd/projects/<hash>/worktrees/ path resolves to hash-level DB (#2952)", + join("/home/user/myproject/.sf/projects/deadbeef42", "sf.db"), + "Deep /.sf/projects/<hash>/worktrees/ path resolves to hash-level DB (#2952)", ); // Non-worktree path should be unchanged @@ -85,7 +85,7 @@ const normalPath = `/home/user/myproject`; const normalResult = resolveProjectRootDbPath(normalPath); assertEq( normalResult, - join("/home/user/myproject", ".gsd", "sf.db"), + join("/home/user/myproject", ".sf", "sf.db"), "Non-worktree path is unchanged", ); diff --git a/src/resources/extensions/sf/tests/db-writer.test.ts b/src/resources/extensions/sf/tests/db-writer.test.ts index bd6eb8e8e..497a3ca86 100644 --- a/src/resources/extensions/sf/tests/db-writer.test.ts +++ b/src/resources/extensions/sf/tests/db-writer.test.ts @@ -34,8 +34,8 @@ import type { Decision, Requirement } from '../types.ts'; function makeTmpDir(): string { const dir = fs.mkdtempSync(path.join(os.tmpdir(), 'sf-dbwriter-')); - // Create .gsd directory structure - fs.mkdirSync(path.join(dir, '.gsd'), { recursive: true }); + // Create .sf directory structure + fs.mkdirSync(path.join(dir, '.sf'), { recursive: true }); return dir; } @@ -68,7 +68,7 @@ const SAMPLE_DECISIONS: Decision[] = [ when_context: 'M001', scope: 'arch', decision: 'DB location', - choice: '.gsd/sf.db', + choice: '.sf/sf.db', rationale: 'Derived state', revisable: 'No', made_by: 'agent', @@ -307,7 +307,7 @@ describe('db-writer', () => { test('saveDecisionToDb', async () => { const tmpDir = makeTmpDir(); - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); openDatabase(dbPath); try { @@ -328,7 +328,7 @@ describe('db-writer', () => { assert.deepStrictEqual(dbDecision?.choice, 'Option A', 'DB decision has correct choice'); // Verify markdown file was written - const mdPath = path.join(tmpDir, '.gsd', 'DECISIONS.md'); + const mdPath = path.join(tmpDir, '.sf', 'DECISIONS.md'); assert.ok(fs.existsSync(mdPath), 'DECISIONS.md file created'); const mdContent = fs.readFileSync(mdPath, 'utf-8'); @@ -365,7 +365,7 @@ describe('db-writer', () => { test('parallel saveDecisionToDb calls produce unique IDs', async () => { const tmpDir = makeTmpDir(); - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); openDatabase(dbPath); try { @@ -406,7 +406,7 @@ describe('db-writer', () => { test('updateRequirementInDb', async () => { const tmpDir = makeTmpDir(); - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); openDatabase(dbPath); try { @@ -441,7 +441,7 @@ describe('db-writer', () => { assert.deepStrictEqual(updated?.description, 'Test requirement', 'description preserved after update'); // Verify markdown file was written - const mdPath = path.join(tmpDir, '.gsd', 'REQUIREMENTS.md'); + const mdPath = path.join(tmpDir, '.sf', 'REQUIREMENTS.md'); assert.ok(fs.existsSync(mdPath), 'REQUIREMENTS.md file created'); const mdContent = fs.readFileSync(mdPath, 'utf-8'); @@ -460,7 +460,7 @@ describe('db-writer', () => { test('updateRequirementInDb — upserts when not found (#2919)', async () => { const tmpDir = makeTmpDir(); - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); openDatabase(dbPath); try { @@ -478,7 +478,7 @@ describe('db-writer', () => { test('updateRequirementInDb — seeds from REQUIREMENTS.md when DB empty (#3346)', async () => { const tmpDir = makeTmpDir(); - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); openDatabase(dbPath); try { @@ -508,7 +508,7 @@ describe('db-writer', () => { '- Source: design', '- Validation: S01 verified', ].join('\n'); - fs.writeFileSync(path.join(tmpDir, '.gsd', 'REQUIREMENTS.md'), reqContent); + fs.writeFileSync(path.join(tmpDir, '.sf', 'REQUIREMENTS.md'), reqContent); // DB is empty — no requirements seeded. Update R005 to "validated". // Before #3346 fix: this would create a skeleton with empty fields. @@ -547,7 +547,7 @@ describe('db-writer', () => { test('saveArtifactToDb', async () => { const tmpDir = makeTmpDir(); - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); openDatabase(dbPath); try { @@ -575,7 +575,7 @@ describe('db-writer', () => { // Verify file on disk const filePath = path.join( - tmpDir, '.gsd', 'milestones', 'M001', 'slices', 'S06', 'tasks', 'T01-SUMMARY.md', + tmpDir, '.sf', 'milestones', 'M001', 'slices', 'S06', 'tasks', 'T01-SUMMARY.md', ); assert.ok(fs.existsSync(filePath), 'artifact file written to disk'); assert.deepStrictEqual(fs.readFileSync(filePath, 'utf-8'), content, 'file content matches'); @@ -587,7 +587,7 @@ describe('db-writer', () => { test('saveArtifactToDb — shrinkage guard preserves larger existing file', async () => { const tmpDir = makeTmpDir(); - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); openDatabase(dbPath); try { @@ -596,7 +596,7 @@ describe('db-writer', () => { // Pre-create the file with full content (simulating a prior `write` tool call) const relPath = 'milestones/M001/M001-RESEARCH.md'; - const filePath = path.join(tmpDir, '.gsd', relPath); + const filePath = path.join(tmpDir, '.sf', relPath); fs.mkdirSync(path.dirname(filePath), { recursive: true }); fs.writeFileSync(filePath, fullContent); @@ -633,7 +633,7 @@ describe('db-writer', () => { test('saveArtifactToDb — allows overwrite when new content is similar size', async () => { const tmpDir = makeTmpDir(); - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); openDatabase(dbPath); try { @@ -641,7 +641,7 @@ describe('db-writer', () => { const newContent = '# Summary v2\n\nUpdated content here with more details.\n'; const relPath = 'milestones/M001/M001-SUMMARY.md'; - const filePath = path.join(tmpDir, '.gsd', relPath); + const filePath = path.join(tmpDir, '.sf', relPath); fs.mkdirSync(path.dirname(filePath), { recursive: true }); fs.writeFileSync(filePath, oldContent); diff --git a/src/resources/extensions/sf/tests/debug-logger.test.ts b/src/resources/extensions/sf/tests/debug-logger.test.ts index 2d23c47e0..660785087 100644 --- a/src/resources/extensions/sf/tests/debug-logger.test.ts +++ b/src/resources/extensions/sf/tests/debug-logger.test.ts @@ -20,7 +20,7 @@ import { function createTempGsdDir(): string { const tmp = mkdtempSync(join(tmpdir(), 'sf-debug-test-')); - mkdirSync(join(tmp, '.gsd'), { recursive: true }); + mkdirSync(join(tmp, '.sf'), { recursive: true }); return tmp; } @@ -39,7 +39,7 @@ test('enableDebug creates log file and sets enabled', () => { assert.ok(logPath, 'log path should be set'); // Normalize path separators for Windows compatibility const normalized = logPath!.replace(/\\/g, '/'); - assert.ok(normalized.includes('.gsd/debug/debug-'), 'log path should be in .gsd/debug/'); + assert.ok(normalized.includes('.sf/debug/debug-'), 'log path should be in .sf/debug/'); assert.ok(logPath!.endsWith('.log'), 'log path should end with .log'); disableDebug(); @@ -156,7 +156,7 @@ test('writeDebugSummary includes all counters and disables debug', () => { test('auto-prunes old debug logs', () => { const tmp = createTempGsdDir(); - const debugDir = join(tmp, '.gsd', 'debug'); + const debugDir = join(tmp, '.sf', 'debug'); mkdirSync(debugDir, { recursive: true }); // Create 6 old log files diff --git a/src/resources/extensions/sf/tests/defer-milestone-stamp.test.ts b/src/resources/extensions/sf/tests/defer-milestone-stamp.test.ts index fbcd919fd..1ada0adfb 100644 --- a/src/resources/extensions/sf/tests/defer-milestone-stamp.test.ts +++ b/src/resources/extensions/sf/tests/defer-milestone-stamp.test.ts @@ -12,7 +12,7 @@ import { appendCapture, markCaptureResolved, loadAllCaptures } from "../captures test("defer captures without milestone ID are stamped as executed (#3542)", async () => { const base = mkdtempSync(join(tmpdir(), "sf-stamp-")); - mkdirSync(join(base, ".gsd", "milestones", "M001"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M001"), { recursive: true }); try { appendCapture(base, "Improve error messages"); const captures = loadAllCaptures(base); diff --git a/src/resources/extensions/sf/tests/deferred-slice-dispatch.test.ts b/src/resources/extensions/sf/tests/deferred-slice-dispatch.test.ts index 603dbaaf4..eb709eda0 100644 --- a/src/resources/extensions/sf/tests/deferred-slice-dispatch.test.ts +++ b/src/resources/extensions/sf/tests/deferred-slice-dispatch.test.ts @@ -30,12 +30,12 @@ import { isDeferredStatus } from "../status-guards.ts"; function createFixtureBase(): string { const base = mkdtempSync(join(tmpdir(), "sf-deferred-dispatch-")); - mkdirSync(join(base, ".gsd", "milestones"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones"), { recursive: true }); return base; } function writeFile(base: string, relativePath: string, content: string): void { - const full = join(base, ".gsd", relativePath); + const full = join(base, ".sf", relativePath); mkdirSync(join(full, ".."), { recursive: true }); writeFileSync(full, content); } diff --git a/src/resources/extensions/sf/tests/derive-state-crossval.test.ts b/src/resources/extensions/sf/tests/derive-state-crossval.test.ts index fd0342749..eaa42072d 100644 --- a/src/resources/extensions/sf/tests/derive-state-crossval.test.ts +++ b/src/resources/extensions/sf/tests/derive-state-crossval.test.ts @@ -27,12 +27,12 @@ import type { SFState } from '../types.ts'; function createFixtureBase(): string { const base = mkdtempSync(join(tmpdir(), 'sf-crossval-')); - mkdirSync(join(base, '.gsd', 'milestones'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones'), { recursive: true }); return base; } function writeFile(base: string, relativePath: string, content: string): void { - const full = join(base, '.gsd', relativePath); + const full = join(base, '.sf', relativePath); mkdirSync(join(full, '..'), { recursive: true }); writeFileSync(full, content); } diff --git a/src/resources/extensions/sf/tests/derive-state-db-disk-reconcile.test.ts b/src/resources/extensions/sf/tests/derive-state-db-disk-reconcile.test.ts index f4681fdb1..217d7445b 100644 --- a/src/resources/extensions/sf/tests/derive-state-db-disk-reconcile.test.ts +++ b/src/resources/extensions/sf/tests/derive-state-db-disk-reconcile.test.ts @@ -2,7 +2,7 @@ * derive-state-db-disk-reconcile.test.ts — #2416 * * After migration to DB-backed state, milestones that exist on disk - * (in .gsd/milestones/) but were never imported into the DB become + * (in .sf/milestones/) but were never imported into the DB become * invisible to deriveStateFromDb(). This test verifies that * deriveStateFromDb reconciles disk milestones with DB milestones. */ @@ -25,12 +25,12 @@ const { assertEq, assertTrue, report } = createTestContext(); function createFixtureBase(): string { const base = mkdtempSync(join(tmpdir(), "sf-disk-reconcile-")); - mkdirSync(join(base, ".gsd", "milestones"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones"), { recursive: true }); return base; } function writeFile(base: string, relativePath: string, content: string): void { - const full = join(base, ".gsd", relativePath); + const full = join(base, ".sf", relativePath); mkdirSync(join(full, ".."), { recursive: true }); writeFileSync(full, content); } @@ -62,7 +62,7 @@ async function main(): Promise<void> { // Set up: M001 in DB, M002 on disk only const base = createFixtureBase(); - const dbPath = join(base, ".gsd", "sf.db"); + const dbPath = join(base, ".sf", "sf.db"); try { openDatabase(dbPath); diff --git a/src/resources/extensions/sf/tests/derive-state-db.test.ts b/src/resources/extensions/sf/tests/derive-state-db.test.ts index 14dae22e7..cb69eff76 100644 --- a/src/resources/extensions/sf/tests/derive-state-db.test.ts +++ b/src/resources/extensions/sf/tests/derive-state-db.test.ts @@ -20,12 +20,12 @@ import { function createFixtureBase(): string { const base = mkdtempSync(join(tmpdir(), 'sf-derive-db-')); - mkdirSync(join(base, '.gsd', 'milestones'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones'), { recursive: true }); return base; } function writeFile(base: string, relativePath: string, content: string): void { - const full = join(base, '.gsd', relativePath); + const full = join(base, '.sf', relativePath); mkdirSync(join(full, '..'), { recursive: true }); writeFileSync(full, content); } @@ -275,7 +275,7 @@ describe('derive-state-db', async () => { const base = createFixtureBase(); try { // Write minimal milestone dir (needed for milestone discovery) - mkdirSync(join(base, '.gsd', 'milestones', 'M001'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones', 'M001'), { recursive: true }); // Write REQUIREMENTS.md to disk (DB content is no longer used by deriveState) writeFile(base, 'REQUIREMENTS.md', REQUIREMENTS_CONTENT); @@ -320,8 +320,8 @@ describe('derive-state-db', async () => { // Create milestone dirs on disk (needed for directory scanning) // Also write roadmap files to disk — resolveMilestoneFile checks file existence // The DB only provides content, not file discovery - mkdirSync(join(base, '.gsd', 'milestones', 'M001'), { recursive: true }); - mkdirSync(join(base, '.gsd', 'milestones', 'M002'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones', 'M001'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones', 'M002'), { recursive: true }); writeFile(base, 'milestones/M001/M001-ROADMAP.md', completedRoadmap); writeFile(base, 'milestones/M001/M001-VALIDATION.md', `---\nverdict: pass\nremediation_round: 0\n---\n\n# Validation\nPassed.`); writeFile(base, 'milestones/M001/M001-SUMMARY.md', summaryContent); @@ -940,8 +940,8 @@ describe('derive-state-db', async () => { const base = createFixtureBase(); try { // Ghost: milestone dir exists with only META.json, no context/roadmap/summary - mkdirSync(join(base, '.gsd', 'milestones', 'M001'), { recursive: true }); - writeFileSync(join(base, '.gsd', 'milestones', 'M001', 'META.json'), '{}'); + mkdirSync(join(base, '.sf', 'milestones', 'M001'), { recursive: true }); + writeFileSync(join(base, '.sf', 'milestones', 'M001', 'META.json'), '{}'); // Real milestone writeFile(base, 'milestones/M002/M002-CONTEXT.md', '# M002: Real\n\nReal milestone.'); @@ -1062,10 +1062,10 @@ describe('derive-state-db', async () => { // M002: queued milestone — directory + slices dir exists, but no content files. // This is what happens when ensureMilestoneDbRow creates M002 but the DB row // is lost during worktree teardown. - mkdirSync(join(base, '.gsd', 'milestones', 'M002', 'slices'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones', 'M002', 'slices'), { recursive: true }); // A worktree exists for M002, proving it's a legitimate milestone - mkdirSync(join(base, '.gsd', 'worktrees', 'M002'), { recursive: true }); + mkdirSync(join(base, '.sf', 'worktrees', 'M002'), { recursive: true }); // isGhostMilestone should NOT treat M002 as ghost when worktree exists assert.ok(!isGhostMilestone(base, 'M002'), 'ghost-wt: M002 with worktree is NOT a ghost'); @@ -1100,7 +1100,7 @@ describe('derive-state-db', async () => { writeFile(base, 'milestones/M001/M001-SUMMARY.md', '# M001 Summary\n\nDone.'); // M002: queued milestone — directory exists with CONTEXT file and DB row - mkdirSync(join(base, '.gsd', 'milestones', 'M002', 'slices'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones', 'M002', 'slices'), { recursive: true }); writeFile(base, 'milestones/M002/M002-CONTEXT.md', '# M002 Context\n\nPlanned milestone.'); // DB has both M001 complete and M002 queued diff --git a/src/resources/extensions/sf/tests/derive-state-deps.test.ts b/src/resources/extensions/sf/tests/derive-state-deps.test.ts index 63293c49b..4e0d9d26a 100644 --- a/src/resources/extensions/sf/tests/derive-state-deps.test.ts +++ b/src/resources/extensions/sf/tests/derive-state-deps.test.ts @@ -9,24 +9,24 @@ import { deriveState } from '../state.ts'; function createFixtureBase(): string { const base = mkdtempSync(join(tmpdir(), 'sf-deps-test-')); - mkdirSync(join(base, '.gsd', 'milestones'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones'), { recursive: true }); return base; } function writeRoadmap(base: string, mid: string, content: string): void { - const dir = join(base, '.gsd', 'milestones', mid); + const dir = join(base, '.sf', 'milestones', mid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${mid}-ROADMAP.md`), content); } function writeMilestoneSummary(base: string, mid: string, content: string): void { - const dir = join(base, '.gsd', 'milestones', mid); + const dir = join(base, '.sf', 'milestones', mid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${mid}-SUMMARY.md`), content); } function writeMilestoneValidation(base: string, mid: string): void { - const dir = join(base, '.gsd', 'milestones', mid); + const dir = join(base, '.sf', 'milestones', mid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${mid}-VALIDATION.md`), `---\nverdict: pass\nremediation_round: 0\n---\n\n# Validation\nPassed.`); } @@ -36,19 +36,19 @@ function writeMilestoneValidation(base: string, mid: string): void { * frontmatter is the raw YAML lines between the --- delimiters. */ function writeContext(base: string, mid: string, frontmatter: string): void { - const dir = join(base, '.gsd', 'milestones', mid); + const dir = join(base, '.sf', 'milestones', mid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${mid}-CONTEXT.md`), `---\n${frontmatter}\n---\n`); } function writeContextDraft(base: string, mid: string, frontmatter: string): void { - const dir = join(base, '.gsd', 'milestones', mid); + const dir = join(base, '.sf', 'milestones', mid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${mid}-CONTEXT-DRAFT.md`), `---\n${frontmatter}\n---\n\n# Draft Context\nThis is a draft.`); } function writeSlicePlan(base: string, mid: string, sid: string, content: string): void { - const dir = join(base, '.gsd', 'milestones', mid, 'slices', sid); + const dir = join(base, '.sf', 'milestones', mid, 'slices', sid); mkdirSync(join(dir, 'tasks'), { recursive: true }); writeFileSync(join(dir, "tasks", "T01-PLAN.md"), "# T01 Plan\n"); writeFileSync(join(dir, `${sid}-PLAN.md`), content); @@ -443,7 +443,7 @@ describe('derive-state-deps', async () => { const base = createFixtureBase(); try { // M001: exists as directory only (no roadmap, no summary) - const m001Dir = join(base, '.gsd', 'milestones', 'M001'); + const m001Dir = join(base, '.sf', 'milestones', 'M001'); mkdirSync(m001Dir, { recursive: true }); // M002: only CONTEXT-DRAFT.md, depends on M001 diff --git a/src/resources/extensions/sf/tests/derive-state-draft.test.ts b/src/resources/extensions/sf/tests/derive-state-draft.test.ts index 157b1c039..a11bcbc9a 100644 --- a/src/resources/extensions/sf/tests/derive-state-draft.test.ts +++ b/src/resources/extensions/sf/tests/derive-state-draft.test.ts @@ -20,43 +20,43 @@ function assertEq<T>(actual: T, expected: T, message: string): void { function createFixtureBase(): string { const base = mkdtempSync(join(tmpdir(), 'sf-draft-test-')); - mkdirSync(join(base, '.gsd', 'milestones'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones'), { recursive: true }); return base; } function writeContextDraft(base: string, mid: string, content: string): void { - const dir = join(base, '.gsd', 'milestones', mid); + const dir = join(base, '.sf', 'milestones', mid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${mid}-CONTEXT-DRAFT.md`), content); } function writeContext(base: string, mid: string, content: string): void { - const dir = join(base, '.gsd', 'milestones', mid); + const dir = join(base, '.sf', 'milestones', mid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${mid}-CONTEXT.md`), content); } function writeRoadmap(base: string, mid: string, content: string): void { - const dir = join(base, '.gsd', 'milestones', mid); + const dir = join(base, '.sf', 'milestones', mid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${mid}-ROADMAP.md`), content); } function writePlan(base: string, mid: string, sid: string, content: string): void { - const dir = join(base, '.gsd', 'milestones', mid, 'slices', sid); + const dir = join(base, '.sf', 'milestones', mid, 'slices', sid); mkdirSync(join(dir, 'tasks'), { recursive: true }); writeFileSync(join(dir, "tasks", "T01-PLAN.md"), "# T01 Plan\n"); writeFileSync(join(dir, `${sid}-PLAN.md`), content); } function writeMilestoneSummary(base: string, mid: string, content: string): void { - const dir = join(base, '.gsd', 'milestones', mid); + const dir = join(base, '.sf', 'milestones', mid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${mid}-SUMMARY.md`), content); } function writeMilestoneValidation(base: string, mid: string): void { - const dir = join(base, '.gsd', 'milestones', mid); + const dir = join(base, '.sf', 'milestones', mid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${mid}-VALIDATION.md`), `---\nverdict: pass\nremediation_round: 0\n---\n\n# Validation\nPassed.`); } @@ -193,8 +193,8 @@ async function main(): Promise<void> { writeContextDraft(base, 'M002', '# M002 Draft\n\nSeed.'); // M003: milestone directory with CONTEXT — should be pending - mkdirSync(join(base, '.gsd', 'milestones', 'M003'), { recursive: true }); - writeFileSync(join(base, '.gsd', 'milestones', 'M003', 'M003-CONTEXT.md'), '# M003\n\nPending milestone.'); + mkdirSync(join(base, '.sf', 'milestones', 'M003'), { recursive: true }); + writeFileSync(join(base, '.sf', 'milestones', 'M003', 'M003-CONTEXT.md'), '# M003\n\nPending milestone.'); const state = await deriveState(base); @@ -254,7 +254,7 @@ async function main(): Promise<void> { const base = createFixtureBase(); try { // M001: just a directory, no files at all — ghost milestone, skipped - mkdirSync(join(base, '.gsd', 'milestones', 'M001'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones', 'M001'), { recursive: true }); const state = await deriveState(base); @@ -274,8 +274,8 @@ async function main(): Promise<void> { const base = createFixtureBase(); try { // M001: has CONTEXT but no roadmap/summary → becomes active first - mkdirSync(join(base, '.gsd', 'milestones', 'M001'), { recursive: true }); - writeFileSync(join(base, '.gsd', 'milestones', 'M001', 'M001-CONTEXT.md'), '# M001\n\nFirst milestone.'); + mkdirSync(join(base, '.sf', 'milestones', 'M001'), { recursive: true }); + writeFileSync(join(base, '.sf', 'milestones', 'M001', 'M001-CONTEXT.md'), '# M001\n\nFirst milestone.'); // M002: has CONTEXT-DRAFT but isn't active (M001 is first) writeContextDraft(base, 'M002', '# M002 Draft\n\nSeed.'); diff --git a/src/resources/extensions/sf/tests/derive-state-helpers.test.ts b/src/resources/extensions/sf/tests/derive-state-helpers.test.ts index 8893d5adf..ea1fc5118 100644 --- a/src/resources/extensions/sf/tests/derive-state-helpers.test.ts +++ b/src/resources/extensions/sf/tests/derive-state-helpers.test.ts @@ -28,12 +28,12 @@ import { function createFixtureBase(): string { const base = mkdtempSync(join(tmpdir(), 'sf-helpers-')); - mkdirSync(join(base, '.gsd', 'milestones'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones'), { recursive: true }); return base; } function writeFile(base: string, relativePath: string, content: string): void { - const full = join(base, '.gsd', relativePath); + const full = join(base, '.sf', relativePath); mkdirSync(join(full, '..'), { recursive: true }); writeFileSync(full, content); } @@ -423,7 +423,7 @@ describe('derive-state-helpers', () => { try { // M003 should come first per queue order, M001 second const queueOrder = JSON.stringify({ order: ['M003', 'M001', 'M002'], updatedAt: new Date().toISOString() }); - writeFileSync(join(base, '.gsd', 'QUEUE-ORDER.json'), queueOrder); + writeFileSync(join(base, '.sf', 'QUEUE-ORDER.json'), queueOrder); writeFile(base, 'milestones/M001/M001-CONTEXT.md', '# M001\n\nContext.'); writeFile(base, 'milestones/M002/M002-CONTEXT.md', '# M002\n\nContext.'); writeFile(base, 'milestones/M003/M003-CONTEXT.md', '# M003\n\nContext.'); @@ -475,7 +475,7 @@ describe('derive-state-helpers', () => { const base = createFixtureBase(); try { // M001: queued shell — no content, no slices - mkdirSync(join(base, '.gsd', 'milestones', 'M001'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones', 'M001'), { recursive: true }); // M002: real milestone with context writeFile(base, 'milestones/M002/M002-CONTEXT.md', '# M002: Real\n\nActive milestone.'); diff --git a/src/resources/extensions/sf/tests/derive-state.test.ts b/src/resources/extensions/sf/tests/derive-state.test.ts index 7f2f11af0..c8fbcb185 100644 --- a/src/resources/extensions/sf/tests/derive-state.test.ts +++ b/src/resources/extensions/sf/tests/derive-state.test.ts @@ -9,18 +9,18 @@ import { deriveState, isSliceComplete, isMilestoneComplete, isGhostMilestone } f function createFixtureBase(): string { const base = mkdtempSync(join(tmpdir(), 'sf-state-test-')); - mkdirSync(join(base, '.gsd', 'milestones'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones'), { recursive: true }); return base; } function writeRoadmap(base: string, mid: string, content: string): void { - const dir = join(base, '.gsd', 'milestones', mid); + const dir = join(base, '.sf', 'milestones', mid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${mid}-ROADMAP.md`), content); } function writePlan(base: string, mid: string, sid: string, content: string): void { - const dir = join(base, '.gsd', 'milestones', mid, 'slices', sid); + const dir = join(base, '.sf', 'milestones', mid, 'slices', sid); const tasksDir = join(dir, 'tasks'); mkdirSync(tasksDir, { recursive: true }); writeFileSync(join(dir, `${sid}-PLAN.md`), content); @@ -35,25 +35,25 @@ function writePlan(base: string, mid: string, sid: string, content: string): voi } function writeContinue(base: string, mid: string, sid: string, content: string): void { - const dir = join(base, '.gsd', 'milestones', mid, 'slices', sid); + const dir = join(base, '.sf', 'milestones', mid, 'slices', sid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${sid}-CONTINUE.md`), content); } function writeMilestoneSummary(base: string, mid: string, content: string): void { - const dir = join(base, '.gsd', 'milestones', mid); + const dir = join(base, '.sf', 'milestones', mid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${mid}-SUMMARY.md`), content); } function writeMilestoneValidation(base: string, mid: string, verdict: string = 'pass'): void { - const dir = join(base, '.gsd', 'milestones', mid); + const dir = join(base, '.sf', 'milestones', mid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${mid}-VALIDATION.md`), `---\nverdict: ${verdict}\nremediation_round: 0\n---\n\n# Validation\nValidated.`); } function writeRequirements(base: string, content: string): void { - writeFileSync(join(base, '.gsd', 'REQUIREMENTS.md'), content); + writeFileSync(join(base, '.sf', 'REQUIREMENTS.md'), content); } function cleanup(base: string): void { @@ -89,8 +89,8 @@ describe('derive-state', async () => { const base = createFixtureBase(); try { // Create M001 directory with CONTEXT but no roadmap file - mkdirSync(join(base, '.gsd', 'milestones', 'M001'), { recursive: true }); - writeFileSync(join(base, '.gsd', 'milestones', 'M001', 'M001-CONTEXT.md'), '# First Milestone\n\nContext for M001.'); + mkdirSync(join(base, '.sf', 'milestones', 'M001'), { recursive: true }); + writeFileSync(join(base, '.sf', 'milestones', 'M001', 'M001-CONTEXT.md'), '# First Milestone\n\nContext for M001.'); const state = await deriveState(base); @@ -484,8 +484,8 @@ Continue from step 2. `); // M003: dir with CONTEXT but no roadmap → pending since M002 is already active - mkdirSync(join(base, '.gsd', 'milestones', 'M003'), { recursive: true }); - writeFileSync(join(base, '.gsd', 'milestones', 'M003', 'M003-CONTEXT.md'), '# Third Milestone\n\nContext for M003.'); + mkdirSync(join(base, '.sf', 'milestones', 'M003'), { recursive: true }); + writeFileSync(join(base, '.sf', 'milestones', 'M003', 'M003-CONTEXT.md'), '# Third Milestone\n\nContext for M003.'); const state = await deriveState(base); @@ -696,11 +696,11 @@ Continue from step 2. const base = createFixtureBase(); try { // M001, M002: completed milestones with summaries but no roadmaps - const m1dir = join(base, '.gsd', 'milestones', 'M001'); + const m1dir = join(base, '.sf', 'milestones', 'M001'); mkdirSync(m1dir, { recursive: true }); writeFileSync(join(m1dir, 'M001-SUMMARY.md'), '---\nid: M001\n---\n# Bootstrap\nDone.'); - const m2dir = join(base, '.gsd', 'milestones', 'M002'); + const m2dir = join(base, '.sf', 'milestones', 'M002'); mkdirSync(m2dir, { recursive: true }); writeFileSync(join(m2dir, 'M002-SUMMARY.md'), '---\nid: M002\n---\n# Core Features\nDone.'); @@ -729,7 +729,7 @@ Continue from step 2. { const base = createFixtureBase(); try { - const m1dir = join(base, '.gsd', 'milestones', 'M001'); + const m1dir = join(base, '.sf', 'milestones', 'M001'); mkdirSync(m1dir, { recursive: true }); writeFileSync(join(m1dir, 'M001-SUMMARY.md'), '---\ntitle: Done\n---\nAll done.'); @@ -869,7 +869,7 @@ slice: S01 writeMilestoneSummary(base, 'M001', '---\nid: M001\n---\n\n# M001: Foundation\n\n**Done.**'); // M002: depends on M001 — should be active since M001 is complete writeRoadmap(base, 'M002', `# M002: Dependent\n\n**Vision:** Depends on M001.\n\n## Slices\n\n- [ ] **S01: Work** \`risk:low\` \`depends:[]\`\n > Work.\n`); - const contextDir = join(base, '.gsd', 'milestones', 'M002'); + const contextDir = join(base, '.sf', 'milestones', 'M002'); mkdirSync(contextDir, { recursive: true }); writeFileSync(join(contextDir, 'M002-CONTEXT.md'), '---\ndepends_on:\n - M001\n---\n\n# M002 Context\n\nDepends on M001.'); @@ -887,7 +887,7 @@ slice: S01 const base = createFixtureBase(); try { // Create a ghost milestone directory with only META.json - const ghostDir = join(base, '.gsd', 'milestones', 'M001'); + const ghostDir = join(base, '.sf', 'milestones', 'M001'); mkdirSync(ghostDir, { recursive: true }); writeFileSync(join(ghostDir, 'META.json'), JSON.stringify({ id: 'M001' })); @@ -909,12 +909,12 @@ slice: S01 const base = createFixtureBase(); try { // M001: ghost (only META.json) - const ghostDir = join(base, '.gsd', 'milestones', 'M001'); + const ghostDir = join(base, '.sf', 'milestones', 'M001'); mkdirSync(ghostDir, { recursive: true }); writeFileSync(join(ghostDir, 'META.json'), JSON.stringify({ id: 'M001' })); // M002: real milestone with a CONTEXT file - const realDir = join(base, '.gsd', 'milestones', 'M002'); + const realDir = join(base, '.sf', 'milestones', 'M002'); mkdirSync(realDir, { recursive: true }); writeFileSync(join(realDir, 'M002-CONTEXT.md'), '# Real Milestone\n\nThis has content.'); @@ -936,11 +936,11 @@ slice: S01 try { // Create a milestone directory with only an empty slices subdir — no content files. // This would normally be a ghost, but it has a worktree directory. - const milestoneDir = join(base, '.gsd', 'milestones', 'M002'); + const milestoneDir = join(base, '.sf', 'milestones', 'M002'); mkdirSync(join(milestoneDir, 'slices'), { recursive: true }); // Create a worktree directory for M002, simulating an active worktree - const worktreeDir = join(base, '.gsd', 'worktrees', 'M002'); + const worktreeDir = join(base, '.sf', 'worktrees', 'M002'); mkdirSync(worktreeDir, { recursive: true }); // isGhostMilestone should return false because the worktree exists diff --git a/src/resources/extensions/sf/tests/detection.test.ts b/src/resources/extensions/sf/tests/detection.test.ts index 796687b01..00d6d9372 100644 --- a/src/resources/extensions/sf/tests/detection.test.ts +++ b/src/resources/extensions/sf/tests/detection.test.ts @@ -49,22 +49,22 @@ test("detectProjectState: empty directory returns state=none", (t) => { assert.equal(result.v2, undefined); }); -test("detectProjectState: directory with .gsd/milestones/M001 returns v2-sf", (t) => { +test("detectProjectState: directory with .sf/milestones/M001 returns v2-sf", (t) => { const dir = makeTempDir("v2-sf"); t.after(() => cleanup(dir)); - mkdirSync(join(dir, ".gsd", "milestones", "M001"), { recursive: true }); + mkdirSync(join(dir, ".sf", "milestones", "M001"), { recursive: true }); const result = detectProjectState(dir); assert.equal(result.state, "v2-sf"); assert.ok(result.v2); assert.equal(result.v2!.milestoneCount, 1); }); -test("detectProjectState: directory with empty .gsd/milestones returns v2-sf-empty", (t) => { +test("detectProjectState: directory with empty .sf/milestones returns v2-sf-empty", (t) => { const dir = makeTempDir("v2-empty"); t.after(() => cleanup(dir)); - mkdirSync(join(dir, ".gsd", "milestones"), { recursive: true }); + mkdirSync(join(dir, ".sf", "milestones"), { recursive: true }); const result = detectProjectState(dir); assert.equal(result.state, "v2-sf-empty"); assert.ok(result.v2); @@ -89,18 +89,18 @@ test("detectProjectState: v2 takes priority over v1 when both exist", (t) => { const dir = makeTempDir("both"); t.after(() => cleanup(dir)); - mkdirSync(join(dir, ".gsd", "milestones", "M001"), { recursive: true }); + mkdirSync(join(dir, ".sf", "milestones", "M001"), { recursive: true }); mkdirSync(join(dir, ".planning"), { recursive: true }); const result = detectProjectState(dir); assert.equal(result.state, "v2-sf"); }); -test("detectProjectState: detects preferences in .gsd/", (t) => { +test("detectProjectState: detects preferences in .sf/", (t) => { const dir = makeTempDir("prefs"); t.after(() => cleanup(dir)); - mkdirSync(join(dir, ".gsd", "milestones"), { recursive: true }); - writeFileSync(join(dir, ".gsd", "PREFERENCES.md"), "---\nversion: 1\n---\n", "utf-8"); + mkdirSync(join(dir, ".sf", "milestones"), { recursive: true }); + writeFileSync(join(dir, ".sf", "PREFERENCES.md"), "---\nversion: 1\n---\n", "utf-8"); const result = detectProjectState(dir); assert.ok(result.v2); assert.equal(result.v2!.hasPreferences, true); @@ -1192,7 +1192,7 @@ test("detectProjectSignals: Spring Boot settings-defined catalog accessor emits // ─── scanProjectFiles: RECURSIVE_SCAN_IGNORED_DIRS ────────────────────── -test("scanProjectFiles: excludes .claude, .gsd, .planning, .plans, .cursor, .vscode directories", () => { +test("scanProjectFiles: excludes .claude, .sf, .planning, .plans, .cursor, .vscode directories", () => { const dir = makeTempDir("scan-ignore-dotdirs"); try { // Create project files that should be included @@ -1201,7 +1201,7 @@ test("scanProjectFiles: excludes .claude, .gsd, .planning, .plans, .cursor, .vsc writeFileSync(join(dir, "README.md"), "# Project\n", "utf-8"); // Create tool directories that should be excluded - const excludedDirs = [".claude", ".gsd", ".planning", ".plans", ".cursor", ".vscode"]; + const excludedDirs = [".claude", ".sf", ".planning", ".plans", ".cursor", ".vscode"]; for (const d of excludedDirs) { mkdirSync(join(dir, d), { recursive: true }); writeFileSync(join(dir, d, "config.json"), "{}\n", "utf-8"); diff --git a/src/resources/extensions/sf/tests/dev-engine-wrapper.test.ts b/src/resources/extensions/sf/tests/dev-engine-wrapper.test.ts index 994ea84dc..2398a66e2 100644 --- a/src/resources/extensions/sf/tests/dev-engine-wrapper.test.ts +++ b/src/resources/extensions/sf/tests/dev-engine-wrapper.test.ts @@ -77,9 +77,9 @@ describe("DevWorkflowEngine", () => { const { DevWorkflowEngine } = await import("../dev-workflow-engine.ts"); const engine = new DevWorkflowEngine(); - // Create a minimal temp .gsd structure for deriveState + // Create a minimal temp .sf structure for deriveState const tempDir = mkdtempSync(join(tmpdir(), "sf-engine-test-")); - mkdirSync(join(tempDir, ".gsd", "milestones"), { recursive: true }); + mkdirSync(join(tempDir, ".sf", "milestones"), { recursive: true }); t.after(() => rmSync(tempDir, { recursive: true, force: true })); diff --git a/src/resources/extensions/sf/tests/discuss-queued-milestones.test.ts b/src/resources/extensions/sf/tests/discuss-queued-milestones.test.ts index 7a36422dc..1f45de091 100644 --- a/src/resources/extensions/sf/tests/discuss-queued-milestones.test.ts +++ b/src/resources/extensions/sf/tests/discuss-queued-milestones.test.ts @@ -30,7 +30,7 @@ import { resolveMilestoneFile } from "../paths.ts"; function createBase(): string { const base = mkdtempSync(join(tmpdir(), "sf-discuss-queued-")); - mkdirSync(join(base, ".gsd", "milestones"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones"), { recursive: true }); return base; } @@ -39,22 +39,22 @@ function cleanup(base: string): void { } function writeMilestoneDir(base: string, mid: string): void { - mkdirSync(join(base, ".gsd", "milestones", mid), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", mid), { recursive: true }); } function writeContext(base: string, mid: string, content: string): void { writeMilestoneDir(base, mid); - writeFileSync(join(base, ".gsd", "milestones", mid, `${mid}-CONTEXT.md`), content); + writeFileSync(join(base, ".sf", "milestones", mid, `${mid}-CONTEXT.md`), content); } function writeContextDraft(base: string, mid: string, content: string): void { writeMilestoneDir(base, mid); - writeFileSync(join(base, ".gsd", "milestones", mid, `${mid}-CONTEXT-DRAFT.md`), content); + writeFileSync(join(base, ".sf", "milestones", mid, `${mid}-CONTEXT-DRAFT.md`), content); } function writeRoadmap(base: string, mid: string, content: string): void { writeMilestoneDir(base, mid); - writeFileSync(join(base, ".gsd", "milestones", mid, `${mid}-ROADMAP.md`), content); + writeFileSync(join(base, ".sf", "milestones", mid, `${mid}-ROADMAP.md`), content); } function readGuidedFlowSource(): string { diff --git a/src/resources/extensions/sf/tests/dispatch-guard.test.ts b/src/resources/extensions/sf/tests/dispatch-guard.test.ts index bacdd15e4..8346aa0cc 100644 --- a/src/resources/extensions/sf/tests/dispatch-guard.test.ts +++ b/src/resources/extensions/sf/tests/dispatch-guard.test.ts @@ -9,8 +9,8 @@ import { openDatabase, closeDatabase, insertMilestone, insertSlice } from "../sf /** Helper: create temp dir and open an in-dir DB for dispatch-guard tests */ function setupRepo(): string { const repo = mkdtempSync(join(tmpdir(), "sf-dispatch-guard-")); - mkdirSync(join(repo, ".gsd"), { recursive: true }); - openDatabase(join(repo, ".gsd", "sf.db")); + mkdirSync(join(repo, ".sf"), { recursive: true }); + openDatabase(join(repo, ".sf", "sf.db")); return repo; } @@ -24,8 +24,8 @@ test("dispatch guard blocks when prior milestone has incomplete slices", (t) => const repo = setupRepo(); t.after(() => teardownRepo(repo)); - mkdirSync(join(repo, ".gsd", "milestones", "M002"), { recursive: true }); - mkdirSync(join(repo, ".gsd", "milestones", "M003"), { recursive: true }); + mkdirSync(join(repo, ".sf", "milestones", "M002"), { recursive: true }); + mkdirSync(join(repo, ".sf", "milestones", "M003"), { recursive: true }); // Seed DB: M002 with S01 complete, S02 pending insertMilestone({ id: "M002", title: "Previous" }); @@ -38,8 +38,8 @@ test("dispatch guard blocks when prior milestone has incomplete slices", (t) => insertSlice({ id: "S02", milestoneId: "M003", title: "Second", status: "pending", depends: ["S01"], sequence: 2 }); // Need ROADMAP files for milestone discovery (findMilestoneIds reads disk) - writeFileSync(join(repo, ".gsd", "milestones", "M002", "M002-ROADMAP.md"), "# M002\n"); - writeFileSync(join(repo, ".gsd", "milestones", "M003", "M003-ROADMAP.md"), "# M003\n"); + writeFileSync(join(repo, ".sf", "milestones", "M002", "M002-ROADMAP.md"), "# M002\n"); + writeFileSync(join(repo, ".sf", "milestones", "M003", "M003-ROADMAP.md"), "# M003\n"); assert.equal( getPriorSliceCompletionBlocker(repo, "main", "plan-slice", "M003/S01"), @@ -51,8 +51,8 @@ test("dispatch guard blocks later slice in same milestone when earlier incomplet const repo = setupRepo(); t.after(() => teardownRepo(repo)); - mkdirSync(join(repo, ".gsd", "milestones", "M002"), { recursive: true }); - mkdirSync(join(repo, ".gsd", "milestones", "M003"), { recursive: true }); + mkdirSync(join(repo, ".sf", "milestones", "M002"), { recursive: true }); + mkdirSync(join(repo, ".sf", "milestones", "M003"), { recursive: true }); insertMilestone({ id: "M002", title: "Previous" }); insertSlice({ id: "S01", milestoneId: "M002", title: "Done", status: "complete", depends: [], sequence: 1 }); @@ -62,8 +62,8 @@ test("dispatch guard blocks later slice in same milestone when earlier incomplet insertSlice({ id: "S01", milestoneId: "M003", title: "First", status: "pending", depends: [], sequence: 1 }); insertSlice({ id: "S02", milestoneId: "M003", title: "Second", status: "pending", depends: ["S01"], sequence: 2 }); - writeFileSync(join(repo, ".gsd", "milestones", "M002", "M002-ROADMAP.md"), "# M002\n"); - writeFileSync(join(repo, ".gsd", "milestones", "M003", "M003-ROADMAP.md"), "# M003\n"); + writeFileSync(join(repo, ".sf", "milestones", "M002", "M002-ROADMAP.md"), "# M002\n"); + writeFileSync(join(repo, ".sf", "milestones", "M003", "M003-ROADMAP.md"), "# M003\n"); assert.equal( getPriorSliceCompletionBlocker(repo, "main", "execute-task", "M003/S02/T01"), @@ -75,13 +75,13 @@ test("dispatch guard allows dispatch when all earlier slices complete", (t) => { const repo = setupRepo(); t.after(() => teardownRepo(repo)); - mkdirSync(join(repo, ".gsd", "milestones", "M003"), { recursive: true }); + mkdirSync(join(repo, ".sf", "milestones", "M003"), { recursive: true }); insertMilestone({ id: "M003", title: "Current" }); insertSlice({ id: "S01", milestoneId: "M003", title: "First", status: "complete", depends: [], sequence: 1 }); insertSlice({ id: "S02", milestoneId: "M003", title: "Second", status: "pending", depends: ["S01"], sequence: 2 }); - writeFileSync(join(repo, ".gsd", "milestones", "M003", "M003-ROADMAP.md"), "# M003\n"); + writeFileSync(join(repo, ".sf", "milestones", "M003", "M003-ROADMAP.md"), "# M003\n"); assert.equal(getPriorSliceCompletionBlocker(repo, "main", "execute-task", "M003/S02/T01"), null); assert.equal(getPriorSliceCompletionBlocker(repo, "main", "plan-milestone", "M003"), null); @@ -94,7 +94,7 @@ test("dispatch guard unblocks slice when positionally-earlier slice depends on i const repo = setupRepo(); t.after(() => teardownRepo(repo)); - mkdirSync(join(repo, ".gsd", "milestones", "M001"), { recursive: true }); + mkdirSync(join(repo, ".sf", "milestones", "M001"), { recursive: true }); insertMilestone({ id: "M001", title: "Test" }); insertSlice({ id: "S01", milestoneId: "M001", title: "Setup", status: "complete", depends: [], sequence: 1 }); @@ -104,7 +104,7 @@ test("dispatch guard unblocks slice when positionally-earlier slice depends on i insertSlice({ id: "S05", milestoneId: "M001", title: "Integration", status: "pending", depends: ["S04", "S06"], sequence: 5 }); insertSlice({ id: "S06", milestoneId: "M001", title: "Data Layer", status: "pending", depends: ["S04"], sequence: 6 }); - writeFileSync(join(repo, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), "# M001\n"); + writeFileSync(join(repo, ".sf", "milestones", "M001", "M001-ROADMAP.md"), "# M001\n"); // S06 depends only on S04 (complete) — should be unblocked assert.equal( @@ -123,14 +123,14 @@ test("dispatch guard falls back to positional ordering when no dependencies decl const repo = setupRepo(); t.after(() => teardownRepo(repo)); - mkdirSync(join(repo, ".gsd", "milestones", "M001"), { recursive: true }); + mkdirSync(join(repo, ".sf", "milestones", "M001"), { recursive: true }); insertMilestone({ id: "M001", title: "Test" }); insertSlice({ id: "S01", milestoneId: "M001", title: "First", status: "complete", depends: [], sequence: 1 }); insertSlice({ id: "S02", milestoneId: "M001", title: "Second", status: "pending", depends: [], sequence: 2 }); insertSlice({ id: "S03", milestoneId: "M001", title: "Third", status: "pending", depends: [], sequence: 3 }); - writeFileSync(join(repo, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), "# M001\n"); + writeFileSync(join(repo, ".sf", "milestones", "M001", "M001-ROADMAP.md"), "# M001\n"); // S03 has no dependencies — positional fallback blocks on S02 assert.equal( @@ -149,14 +149,14 @@ test("dispatch guard ignores positionally-earlier reverse dependents for zero-de const repo = setupRepo(); t.after(() => teardownRepo(repo)); - mkdirSync(join(repo, ".gsd", "milestones", "M015"), { recursive: true }); + mkdirSync(join(repo, ".sf", "milestones", "M015"), { recursive: true }); insertMilestone({ id: "M015", title: "Reverse dependency fallback" }); insertSlice({ id: "S03", milestoneId: "M015", title: "Complete prerequisite", status: "complete", depends: [], sequence: 0 }); insertSlice({ id: "S04", milestoneId: "M015", title: "Depends on S04A", status: "pending", depends: ["S03", "S04A"], sequence: 0 }); insertSlice({ id: "S04A", milestoneId: "M015", title: "No explicit deps", status: "pending", depends: [], sequence: 0 }); - writeFileSync(join(repo, ".gsd", "milestones", "M015", "M015-ROADMAP.md"), "# M015\n"); + writeFileSync(join(repo, ".sf", "milestones", "M015", "M015-ROADMAP.md"), "# M015\n"); // S04A has no declared dependencies and should not be blocked by S04, because // S04 itself depends on S04A. With sequence=0, DB ordering falls back to id. @@ -176,7 +176,7 @@ test("dispatch guard treats zero-dependency slices as independent when a milesto const repo = setupRepo(); t.after(() => teardownRepo(repo)); - mkdirSync(join(repo, ".gsd", "milestones", "M022"), { recursive: true }); + mkdirSync(join(repo, ".sf", "milestones", "M022"), { recursive: true }); insertMilestone({ id: "M022", title: "Mixed dependency milestone" }); insertSlice({ id: "S02", milestoneId: "M022", title: "Core A", status: "complete", depends: [], sequence: 2 }); @@ -185,7 +185,7 @@ test("dispatch guard treats zero-dependency slices as independent when a milesto insertSlice({ id: "S06", milestoneId: "M022", title: "Independent zero-dep slice", status: "pending", depends: [], sequence: 6 }); insertSlice({ id: "S07", milestoneId: "M022", title: "Late prerequisite", status: "pending", depends: ["S02"], sequence: 7 }); - writeFileSync(join(repo, ".gsd", "milestones", "M022", "M022-ROADMAP.md"), "# M022\n"); + writeFileSync(join(repo, ".sf", "milestones", "M022", "M022-ROADMAP.md"), "# M022\n"); assert.equal( getPriorSliceCompletionBlocker(repo, "main", "execute-task", "M022/S06/T02"), @@ -202,7 +202,7 @@ test("dispatch guard allows slice with all declared dependencies complete", (t) const repo = setupRepo(); t.after(() => teardownRepo(repo)); - mkdirSync(join(repo, ".gsd", "milestones", "M001"), { recursive: true }); + mkdirSync(join(repo, ".sf", "milestones", "M001"), { recursive: true }); insertMilestone({ id: "M001", title: "Test" }); insertSlice({ id: "S01", milestoneId: "M001", title: "Setup", status: "complete", depends: [], sequence: 1 }); @@ -210,7 +210,7 @@ test("dispatch guard allows slice with all declared dependencies complete", (t) insertSlice({ id: "S03", milestoneId: "M001", title: "Feature A", status: "pending", depends: ["S01", "S02"], sequence: 3 }); insertSlice({ id: "S04", milestoneId: "M001", title: "Feature B", status: "pending", depends: ["S01"], sequence: 4 }); - writeFileSync(join(repo, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), "# M001\n"); + writeFileSync(join(repo, ".sf", "milestones", "M001", "M001-ROADMAP.md"), "# M001\n"); // S03 depends on S01 (done) and S02 (done) — unblocked assert.equal( @@ -229,8 +229,8 @@ test("dispatch guard skips completed milestone with SUMMARY even if it has unche const repo = setupRepo(); t.after(() => teardownRepo(repo)); - mkdirSync(join(repo, ".gsd", "milestones", "M001"), { recursive: true }); - mkdirSync(join(repo, ".gsd", "milestones", "M002"), { recursive: true }); + mkdirSync(join(repo, ".sf", "milestones", "M001"), { recursive: true }); + mkdirSync(join(repo, ".sf", "milestones", "M002"), { recursive: true }); // M001 is complete (has SUMMARY) but has unchecked remediation slices in DB insertMilestone({ id: "M001", title: "Previous" }); @@ -243,10 +243,10 @@ test("dispatch guard skips completed milestone with SUMMARY even if it has unche insertSlice({ id: "S01", milestoneId: "M002", title: "Start", status: "pending", depends: [], sequence: 1 }); // M001 SUMMARY on disk triggers skip - writeFileSync(join(repo, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), "# M001\n"); - writeFileSync(join(repo, ".gsd", "milestones", "M001", "M001-SUMMARY.md"), + writeFileSync(join(repo, ".sf", "milestones", "M001", "M001-ROADMAP.md"), "# M001\n"); + writeFileSync(join(repo, ".sf", "milestones", "M001", "M001-SUMMARY.md"), "---\nstatus: complete\n---\n# M001 Summary\nDone.\n"); - writeFileSync(join(repo, ".gsd", "milestones", "M002", "M002-ROADMAP.md"), "# M002\n"); + writeFileSync(join(repo, ".sf", "milestones", "M002", "M002-ROADMAP.md"), "# M002\n"); // M001 has SUMMARY — should be skipped, not block M002/S01 assert.equal( @@ -259,13 +259,13 @@ test("dispatch guard works without git repo", (t) => { const repo = setupRepo(); t.after(() => teardownRepo(repo)); - mkdirSync(join(repo, ".gsd", "milestones", "M001"), { recursive: true }); + mkdirSync(join(repo, ".sf", "milestones", "M001"), { recursive: true }); insertMilestone({ id: "M001", title: "Test" }); insertSlice({ id: "S01", milestoneId: "M001", title: "Done", status: "complete", depends: [], sequence: 1 }); insertSlice({ id: "S02", milestoneId: "M001", title: "Pending", status: "pending", depends: ["S01"], sequence: 2 }); - writeFileSync(join(repo, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), "# M001\n"); + writeFileSync(join(repo, ".sf", "milestones", "M001", "M001-ROADMAP.md"), "# M001\n"); assert.equal(getPriorSliceCompletionBlocker(repo, "main", "plan-slice", "M001/S02"), null); }); @@ -277,9 +277,9 @@ test("dispatch guard skips cross-milestone check when SF_MILESTONE_LOCK is set ( teardownRepo(repo); }); - mkdirSync(join(repo, ".gsd", "milestones", "M010"), { recursive: true }); - mkdirSync(join(repo, ".gsd", "milestones", "M011"), { recursive: true }); - mkdirSync(join(repo, ".gsd", "milestones", "M012"), { recursive: true }); + mkdirSync(join(repo, ".sf", "milestones", "M010"), { recursive: true }); + mkdirSync(join(repo, ".sf", "milestones", "M011"), { recursive: true }); + mkdirSync(join(repo, ".sf", "milestones", "M012"), { recursive: true }); // M010 and M011 have incomplete slices insertMilestone({ id: "M010", title: "Analytics" }); @@ -292,9 +292,9 @@ test("dispatch guard skips cross-milestone check when SF_MILESTONE_LOCK is set ( insertSlice({ id: "S01", milestoneId: "M012", title: "Foundation", status: "pending", depends: [], sequence: 1 }); insertSlice({ id: "S02", milestoneId: "M012", title: "Migrate Pages", status: "pending", depends: ["S01"], sequence: 2 }); - writeFileSync(join(repo, ".gsd", "milestones", "M010", "M010-ROADMAP.md"), "# M010\n"); - writeFileSync(join(repo, ".gsd", "milestones", "M011", "M011-ROADMAP.md"), "# M011\n"); - writeFileSync(join(repo, ".gsd", "milestones", "M012", "M012-ROADMAP.md"), "# M012\n"); + writeFileSync(join(repo, ".sf", "milestones", "M010", "M010-ROADMAP.md"), "# M010\n"); + writeFileSync(join(repo, ".sf", "milestones", "M011", "M011-ROADMAP.md"), "# M011\n"); + writeFileSync(join(repo, ".sf", "milestones", "M012", "M012-ROADMAP.md"), "# M012\n"); // Without lock: M012 blocked by M010's incomplete S01 delete process.env.SF_MILESTONE_LOCK; diff --git a/src/resources/extensions/sf/tests/dispatch-missing-task-plans.test.ts b/src/resources/extensions/sf/tests/dispatch-missing-task-plans.test.ts index a7e400429..3c9599f0d 100644 --- a/src/resources/extensions/sf/tests/dispatch-missing-task-plans.test.ts +++ b/src/resources/extensions/sf/tests/dispatch-missing-task-plans.test.ts @@ -45,7 +45,7 @@ function makeContext(basePath: string, stateOverrides?: Partial<SFState>): Dispa // ─── Scaffold helpers ────────────────────────────────────────────────────── function scaffoldSlicePlan(basePath: string, mid: string, sid: string): void { - const dir = join(basePath, ".gsd", "milestones", mid, "slices", sid); + const dir = join(basePath, ".sf", "milestones", mid, "slices", sid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${sid}-PLAN.md`), [ `# ${sid}: Third Slice`, @@ -58,7 +58,7 @@ function scaffoldSlicePlan(basePath: string, mid: string, sid: string): void { } function scaffoldTaskPlan(basePath: string, mid: string, sid: string, tid: string): void { - const dir = join(basePath, ".gsd", "milestones", mid, "slices", sid, "tasks"); + const dir = join(basePath, ".sf", "milestones", mid, "slices", sid, "tasks"); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${tid}-PLAN.md`), [ `# ${tid}: Do something`, diff --git a/src/resources/extensions/sf/tests/dispatch-uat-last-completed.test.ts b/src/resources/extensions/sf/tests/dispatch-uat-last-completed.test.ts index be945f1e9..dcd0f886e 100644 --- a/src/resources/extensions/sf/tests/dispatch-uat-last-completed.test.ts +++ b/src/resources/extensions/sf/tests/dispatch-uat-last-completed.test.ts @@ -15,7 +15,7 @@ function createFixture(): string { const base = mkdtempSync(join(tmpdir(), "sf-dispatch-uat-")); // Milestone M001 with two slices: S01 done, S02 incomplete - const milestoneDir = join(base, ".gsd", "milestones", "M001"); + const milestoneDir = join(base, ".sf", "milestones", "M001"); mkdirSync(milestoneDir, { recursive: true }); writeFileSync( @@ -114,7 +114,7 @@ test("dispatch uat warns when no completed slices exist", async (t) => { const base = mkdtempSync(join(tmpdir(), "sf-dispatch-uat-none-")); invalidateStateCache(); - const milestoneDir = join(base, ".gsd", "milestones", "M001"); + const milestoneDir = join(base, ".sf", "milestones", "M001"); mkdirSync(milestoneDir, { recursive: true }); writeFileSync( diff --git a/src/resources/extensions/sf/tests/doctor-providers.test.ts b/src/resources/extensions/sf/tests/doctor-providers.test.ts index 780868c20..a56c75dd8 100644 --- a/src/resources/extensions/sf/tests/doctor-providers.test.ts +++ b/src/resources/extensions/sf/tests/doctor-providers.test.ts @@ -272,7 +272,7 @@ test("runProviderChecks optional providers show ok when key set", () => { test("runProviderChecks detects key from auth.json", () => { withEnv({ ANTHROPIC_API_KEY: undefined }, () => { const tmpHome = realpathSync(mkdtempSync(join(tmpdir(), "sf-providers-test-"))); - const agentDir = join(tmpHome, ".gsd", "agent"); + const agentDir = join(tmpHome, ".sf", "agent"); mkdirSync(agentDir, { recursive: true }); // AuthStorage persists credentials with provider ID as the top-level key: @@ -297,7 +297,7 @@ test("runProviderChecks detects key from auth.json", () => { test("runProviderChecks ignores empty placeholder keys in auth.json", () => { withEnv({ ANTHROPIC_API_KEY: undefined, ANTHROPIC_OAUTH_TOKEN: undefined, COPILOT_GITHUB_TOKEN: undefined, GH_TOKEN: undefined, GITHUB_TOKEN: undefined }, () => { const tmpHome = realpathSync(mkdtempSync(join(tmpdir(), "sf-providers-test-"))); - const agentDir = join(tmpHome, ".gsd", "agent"); + const agentDir = join(tmpHome, ".sf", "agent"); mkdirSync(agentDir, { recursive: true }); // Empty key — what onboarding writes when user skips @@ -393,7 +393,7 @@ test("runProviderChecks reports ok via Copilot auth.json for Anthropic", () => { GITHUB_TOKEN: undefined, }, () => { const tmpHome = realpathSync(mkdtempSync(join(tmpdir(), "sf-providers-copilot-auth-test-"))); - const agentDir = join(tmpHome, ".gsd", "agent"); + const agentDir = join(tmpHome, ".sf", "agent"); mkdirSync(agentDir, { recursive: true }); // GitHub Copilot OAuth in auth.json @@ -417,9 +417,9 @@ test("runProviderChecks reports ok via Copilot auth.json for Anthropic", () => { test("runProviderChecks uses provider-qualified anthropic-vertex model IDs", () => { const tmpHome = realpathSync(mkdtempSync(join(tmpdir(), "sf-providers-vertex-prefix-home-"))); const repo = realpathSync(mkdtempSync(join(tmpdir(), "sf-providers-vertex-prefix-repo-"))); - mkdirSync(join(repo, ".gsd"), { recursive: true }); + mkdirSync(join(repo, ".sf"), { recursive: true }); writeFileSync( - join(repo, ".gsd", "PREFERENCES.md"), + join(repo, ".sf", "PREFERENCES.md"), [ "---", "models:", @@ -452,9 +452,9 @@ test("runProviderChecks uses provider-qualified anthropic-vertex model IDs", () test("runProviderChecks uses object provider field for anthropic-vertex models", () => { const tmpHome = realpathSync(mkdtempSync(join(tmpdir(), "sf-providers-vertex-provider-home-"))); const repo = realpathSync(mkdtempSync(join(tmpdir(), "sf-providers-vertex-provider-repo-"))); - mkdirSync(join(repo, ".gsd"), { recursive: true }); + mkdirSync(join(repo, ".sf"), { recursive: true }); writeFileSync( - join(repo, ".gsd", "PREFERENCES.md"), + join(repo, ".sf", "PREFERENCES.md"), [ "---", "models:", @@ -489,9 +489,9 @@ test("runProviderChecks uses object provider field for anthropic-vertex models", test("runProviderChecks reports ok for Google via google-gemini-cli auth.json (#2922)", () => { const repo = realpathSync(mkdtempSync(join(tmpdir(), "sf-providers-gemini-cli-repo-"))); - mkdirSync(join(repo, ".gsd"), { recursive: true }); + mkdirSync(join(repo, ".sf"), { recursive: true }); writeFileSync( - join(repo, ".gsd", "PREFERENCES.md"), + join(repo, ".sf", "PREFERENCES.md"), [ "---", "models:", @@ -502,7 +502,7 @@ test("runProviderChecks reports ok for Google via google-gemini-cli auth.json (# ); const tmpHome = realpathSync(mkdtempSync(join(tmpdir(), "sf-providers-gemini-cli-home-"))); - const agentDir = join(tmpHome, ".gsd", "agent"); + const agentDir = join(tmpHome, ".sf", "agent"); mkdirSync(agentDir, { recursive: true }); // google-gemini-cli OAuth in auth.json (no google API key) @@ -531,9 +531,9 @@ test("runProviderChecks reports ok for Google via google-gemini-cli auth.json (# test("runProviderChecks reports ok for OpenAI via openai-codex auth.json (#2922)", () => { const repo = realpathSync(mkdtempSync(join(tmpdir(), "sf-providers-codex-repo-"))); - mkdirSync(join(repo, ".gsd"), { recursive: true }); + mkdirSync(join(repo, ".sf"), { recursive: true }); writeFileSync( - join(repo, ".gsd", "PREFERENCES.md"), + join(repo, ".sf", "PREFERENCES.md"), [ "---", "models:", @@ -544,7 +544,7 @@ test("runProviderChecks reports ok for OpenAI via openai-codex auth.json (#2922) ); const tmpHome = realpathSync(mkdtempSync(join(tmpdir(), "sf-providers-codex-home-"))); - const agentDir = join(tmpHome, ".gsd", "agent"); + const agentDir = join(tmpHome, ".sf", "agent"); mkdirSync(agentDir, { recursive: true }); // openai-codex OAuth in auth.json (no openai API key) @@ -576,9 +576,9 @@ test("runProviderChecks reports ok for OpenAI via openai-codex auth.json (#2922) test("runProviderChecks reports ok for claude-code without any API key", () => { const repo = realpathSync(mkdtempSync(join(tmpdir(), "sf-providers-cc-repo-"))); - mkdirSync(join(repo, ".gsd"), { recursive: true }); + mkdirSync(join(repo, ".sf"), { recursive: true }); writeFileSync( - join(repo, ".gsd", "PREFERENCES.md"), + join(repo, ".sf", "PREFERENCES.md"), [ "---", "models:", diff --git a/src/resources/extensions/sf/tests/doctor-scope-db-unavailable.test.ts b/src/resources/extensions/sf/tests/doctor-scope-db-unavailable.test.ts index e4d333430..0aa436c67 100644 --- a/src/resources/extensions/sf/tests/doctor-scope-db-unavailable.test.ts +++ b/src/resources/extensions/sf/tests/doctor-scope-db-unavailable.test.ts @@ -29,7 +29,7 @@ test("checkEngineHealth reports db_unavailable when sf.db exists but the DB is c const base = mkdtempSync(join(tmpdir(), "sf-doctor-db-unavailable-")); t.after(() => rmSync(base, { recursive: true, force: true })); - const sfDir = join(base, ".gsd"); + const sfDir = join(base, ".sf"); mkdirSync(sfDir, { recursive: true }); writeFileSync(join(sfDir, "sf.db"), ""); @@ -39,5 +39,5 @@ test("checkEngineHealth reports db_unavailable when sf.db exists but the DB is c const dbIssue = issues.find((issue) => issue.code === "db_unavailable"); assert.ok(dbIssue, "doctor should surface degraded DB mode when a DB file exists"); assert.equal(dbIssue.unitId, "project"); - assert.equal(dbIssue.file, ".gsd/sf.db"); + assert.equal(dbIssue.file, ".sf/sf.db"); }); diff --git a/src/resources/extensions/sf/tests/draft-promotion.test.ts b/src/resources/extensions/sf/tests/draft-promotion.test.ts index c12c02a16..f9c484341 100644 --- a/src/resources/extensions/sf/tests/draft-promotion.test.ts +++ b/src/resources/extensions/sf/tests/draft-promotion.test.ts @@ -23,7 +23,7 @@ function assert(condition: boolean, message: string): void { console.log("=== Draft promotion: full state transition ==="); const tmpBase = mkdtempSync(join(tmpdir(), "sf-draft-promotion-test-")); -const sf = join(tmpBase, ".gsd"); +const sf = join(tmpBase, ".sf"); mkdirSync(join(sf, "milestones", "M001"), { recursive: true }); @@ -79,7 +79,7 @@ assert( console.log("=== No-draft cleanup: no-op ==="); const tmpBase2 = mkdtempSync(join(tmpdir(), "sf-draft-promotion-noop-")); -const sf2 = join(tmpBase2, ".gsd"); +const sf2 = join(tmpBase2, ".sf"); mkdirSync(join(sf2, "milestones", "M001"), { recursive: true }); writeFileSync( @@ -106,7 +106,7 @@ assert( console.log("=== Both files: CONTEXT wins, draft cleanable ==="); const tmpBase3 = mkdtempSync(join(tmpdir(), "sf-draft-promotion-both-")); -const sf3 = join(tmpBase3, ".gsd"); +const sf3 = join(tmpBase3, ".sf"); mkdirSync(join(sf3, "milestones", "M001"), { recursive: true }); writeFileSync( diff --git a/src/resources/extensions/sf/tests/ensure-db-open.test.ts b/src/resources/extensions/sf/tests/ensure-db-open.test.ts index b201d865a..9246e14ef 100644 --- a/src/resources/extensions/sf/tests/ensure-db-open.test.ts +++ b/src/resources/extensions/sf/tests/ensure-db-open.test.ts @@ -1,7 +1,7 @@ import { describe, test } from 'node:test'; import assert from 'node:assert/strict'; // ensureDbOpen — Tests that the lazy DB opener creates + migrates the database -// when .gsd/ exists with Markdown content but no sf.db file. +// when .sf/ exists with Markdown content but no sf.db file. // // This covers the bug where interactive (non-auto) sessions got // "SF database is not available" because ensureDbOpen only opened @@ -24,13 +24,13 @@ function cleanupDir(dir: string): void { } // ═══════════════════════════════════════════════════════════════════════════ -// ensureDbOpen creates DB + migrates when .gsd/ has Markdown +// ensureDbOpen creates DB + migrates when .sf/ has Markdown // ═══════════════════════════════════════════════════════════════════════════ describe('ensure-db-open', () => { test('ensureDbOpen: creates DB from Markdown', async () => { const tmpDir = makeTmpDir(); - const sfDir = path.join(tmpDir, '.gsd'); + const sfDir = path.join(tmpDir, '.sf'); fs.mkdirSync(sfDir, { recursive: true }); // Write a minimal DECISIONS.md so migration has content @@ -59,7 +59,7 @@ describe('ensure-db-open', () => { const result = await ensureDbOpen(); - assert.ok(result === true, 'ensureDbOpen should return true when .gsd/ has Markdown'); + assert.ok(result === true, 'ensureDbOpen should return true when .sf/ has Markdown'); assert.ok(fs.existsSync(dbPath), 'DB file should be created after ensureDbOpen'); assert.ok(isDbAvailable(), 'DB should be available after ensureDbOpen'); @@ -79,7 +79,7 @@ describe('ensure-db-open', () => { test('ensureDbOpen: explicit basePath opens target project without cwd override', async () => { const tmpDir = makeTmpDir(); - const sfDir = path.join(tmpDir, '.gsd'); + const sfDir = path.join(tmpDir, '.sf'); fs.mkdirSync(sfDir, { recursive: true }); fs.writeFileSync(path.join(sfDir, 'DECISIONS.md'), `# Decisions @@ -108,12 +108,12 @@ describe('ensure-db-open', () => { }); // ═══════════════════════════════════════════════════════════════════════════ - // ensureDbOpen returns false when no .gsd/ exists + // ensureDbOpen returns false when no .sf/ exists // ═══════════════════════════════════════════════════════════════════════════ - test('ensureDbOpen: no .gsd/ returns false', async () => { + test('ensureDbOpen: no .sf/ returns false', async () => { const tmpDir = makeTmpDir(); - // No .gsd/ directory at all + // No .sf/ directory at all try { closeDatabase(); } catch { /* ok */ } const origCwd = process.cwd; @@ -122,7 +122,7 @@ describe('ensure-db-open', () => { try { const { ensureDbOpen } = await import('../bootstrap/dynamic-tools.ts'); const result = await ensureDbOpen(); - assert.ok(result === false, 'ensureDbOpen should return false when no .gsd/ exists'); + assert.ok(result === false, 'ensureDbOpen should return false when no .sf/ exists'); assert.ok(!isDbAvailable(), 'DB should not be available'); } finally { process.cwd = origCwd; @@ -136,7 +136,7 @@ describe('ensure-db-open', () => { test('ensureDbOpen: opens existing DB', async () => { const tmpDir = makeTmpDir(); - const sfDir = path.join(tmpDir, '.gsd'); + const sfDir = path.join(tmpDir, '.sf'); fs.mkdirSync(sfDir, { recursive: true }); // Create a DB file first @@ -163,14 +163,14 @@ describe('ensure-db-open', () => { }); // ═══════════════════════════════════════════════════════════════════════════ - // ensureDbOpen returns false for empty .gsd/ (no Markdown, no DB) + // ensureDbOpen returns false for empty .sf/ (no Markdown, no DB) // ═══════════════════════════════════════════════════════════════════════════ - test('ensureDbOpen: empty .gsd/ creates empty DB (#2510)', async () => { + test('ensureDbOpen: empty .sf/ creates empty DB (#2510)', async () => { const tmpDir = makeTmpDir(); - const sfDir = path.join(tmpDir, '.gsd'); + const sfDir = path.join(tmpDir, '.sf'); fs.mkdirSync(sfDir, { recursive: true }); - // .gsd/ exists but no DECISIONS.md, REQUIREMENTS.md, or milestones/ + // .sf/ exists but no DECISIONS.md, REQUIREMENTS.md, or milestones/ try { closeDatabase(); } catch { /* ok */ } const origCwd = process.cwd; @@ -179,7 +179,7 @@ describe('ensure-db-open', () => { try { const { ensureDbOpen } = await import('../bootstrap/dynamic-tools.ts'); const result = await ensureDbOpen(); - assert.ok(result === true, 'ensureDbOpen should create empty DB for fresh .gsd/'); + assert.ok(result === true, 'ensureDbOpen should create empty DB for fresh .sf/'); assert.ok(fs.existsSync(path.join(sfDir, 'sf.db')), 'DB file should be created'); assert.ok(isDbAvailable(), 'DB should be available'); } finally { @@ -192,15 +192,15 @@ describe('ensure-db-open', () => { test('ensureDbOpen: switches open database when basePath changes', async () => { const firstDir = makeTmpDir(); const secondDir = makeTmpDir(); - fs.mkdirSync(path.join(firstDir, '.gsd'), { recursive: true }); - fs.mkdirSync(path.join(secondDir, '.gsd'), { recursive: true }); - fs.writeFileSync(path.join(firstDir, '.gsd', 'DECISIONS.md'), `# Decisions + fs.mkdirSync(path.join(firstDir, '.sf'), { recursive: true }); + fs.mkdirSync(path.join(secondDir, '.sf'), { recursive: true }); + fs.writeFileSync(path.join(firstDir, '.sf', 'DECISIONS.md'), `# Decisions | # | When | Scope | Decision | Choice | Rationale | Revisable | |---|------|-------|----------|--------|-----------|-----------| | D101 | M001 | architecture | First DB | First | First rationale | Yes | `); - fs.writeFileSync(path.join(secondDir, '.gsd', 'DECISIONS.md'), `# Decisions + fs.writeFileSync(path.join(secondDir, '.sf', 'DECISIONS.md'), `# Decisions | # | When | Scope | Decision | Choice | Rationale | Revisable | |---|------|-------|----------|--------|-----------|-----------| diff --git a/src/resources/extensions/sf/tests/export-html-all.test.ts b/src/resources/extensions/sf/tests/export-html-all.test.ts index 079340254..9f99c48f2 100644 --- a/src/resources/extensions/sf/tests/export-html-all.test.ts +++ b/src/resources/extensions/sf/tests/export-html-all.test.ts @@ -12,7 +12,7 @@ test("handleExport --html --all generates reports for milestones missing from th const { loadReportsIndex } = await import("../reports.js"); const tmp = join(tmpdir(), `sf-export-all-test-${Date.now()}`); - const sfDir = join(tmp, ".gsd"); + const sfDir = join(tmp, ".sf"); const reportsDir = join(sfDir, "reports"); mkdirSync(reportsDir, { recursive: true }); diff --git a/src/resources/extensions/sf/tests/flag-file-db.test.ts b/src/resources/extensions/sf/tests/flag-file-db.test.ts index ff0eab23f..95acb0262 100644 --- a/src/resources/extensions/sf/tests/flag-file-db.test.ts +++ b/src/resources/extensions/sf/tests/flag-file-db.test.ts @@ -30,12 +30,12 @@ import { function createFixtureBase(): string { const base = mkdtempSync(join(tmpdir(), 'sf-flag-file-db-')); - mkdirSync(join(base, '.gsd', 'milestones'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones'), { recursive: true }); return base; } function writeFile(base: string, relativePath: string, content: string): void { - const full = join(base, '.gsd', relativePath); + const full = join(base, '.sf', relativePath); mkdirSync(join(full, '..'), { recursive: true }); writeFileSync(full, content); } diff --git a/src/resources/extensions/sf/tests/forensics-context-persist.test.ts b/src/resources/extensions/sf/tests/forensics-context-persist.test.ts index de8772c0f..5dcfbb636 100644 --- a/src/resources/extensions/sf/tests/forensics-context-persist.test.ts +++ b/src/resources/extensions/sf/tests/forensics-context-persist.test.ts @@ -61,8 +61,8 @@ describe("forensics context persistence (#2941)", () => { beforeEach(() => { rmSync(tmpBase, { recursive: true, force: true }); - mkdirSync(join(tmpBase, ".gsd", "runtime"), { recursive: true }); - mkdirSync(join(tmpBase, ".gsd", "forensics"), { recursive: true }); + mkdirSync(join(tmpBase, ".sf", "runtime"), { recursive: true }); + mkdirSync(join(tmpBase, ".sf", "forensics"), { recursive: true }); }); afterEach(() => { @@ -72,12 +72,12 @@ describe("forensics context persistence (#2941)", () => { it("writeForensicsMarker creates marker with reportPath and promptContent", async () => { const { writeForensicsMarker } = await import("../forensics.ts"); - const reportPath = join(tmpBase, ".gsd", "forensics", "report-2026-01-01.md"); + const reportPath = join(tmpBase, ".sf", "forensics", "report-2026-01-01.md"); writeFileSync(reportPath, "# Test Report", "utf-8"); writeForensicsMarker(tmpBase, reportPath, "Test forensics prompt content"); - const markerPath = join(tmpBase, ".gsd", "runtime", "active-forensics.json"); + const markerPath = join(tmpBase, ".sf", "runtime", "active-forensics.json"); assert.ok(existsSync(markerPath), "marker file must be created"); const marker = JSON.parse(readFileSync(markerPath, "utf-8")); @@ -96,7 +96,7 @@ describe("forensics context persistence (#2941)", () => { it("readForensicsMarker returns marker data when file exists", async () => { const { readForensicsMarker } = await import("../forensics.ts"); - const markerPath = join(tmpBase, ".gsd", "runtime", "active-forensics.json"); + const markerPath = join(tmpBase, ".sf", "runtime", "active-forensics.json"); const markerData = { reportPath: "/some/report.md", promptContent: "forensics prompt", @@ -113,7 +113,7 @@ describe("forensics context persistence (#2941)", () => { it("clearForensicsMarker removes the marker file", async () => { const { clearForensicsMarker } = await import("../bootstrap/system-context.ts"); - const markerPath = join(tmpBase, ".gsd", "runtime", "active-forensics.json"); + const markerPath = join(tmpBase, ".sf", "runtime", "active-forensics.json"); writeFileSync(markerPath, JSON.stringify({ reportPath: "/x", promptContent: "y", createdAt: new Date().toISOString() }), "utf-8"); assert.ok(existsSync(markerPath), "precondition: marker must exist"); @@ -130,7 +130,7 @@ describe("forensics context persistence (#2941)", () => { it("buildForensicsContextInjection keeps marker for low-entropy resume prompts", async () => { const { buildForensicsContextInjection } = await import("../bootstrap/system-context.ts"); - const markerPath = join(tmpBase, ".gsd", "runtime", "active-forensics.json"); + const markerPath = join(tmpBase, ".sf", "runtime", "active-forensics.json"); writeFileSync(markerPath, JSON.stringify({ reportPath: "/some/report.md", promptContent: "forensics prompt", @@ -145,7 +145,7 @@ describe("forensics context persistence (#2941)", () => { it("buildForensicsContextInjection clears marker on unrelated user prompts", async () => { const { buildForensicsContextInjection } = await import("../bootstrap/system-context.ts"); - const markerPath = join(tmpBase, ".gsd", "runtime", "active-forensics.json"); + const markerPath = join(tmpBase, ".sf", "runtime", "active-forensics.json"); writeFileSync(markerPath, JSON.stringify({ reportPath: "/some/report.md", promptContent: "forensics prompt", diff --git a/src/resources/extensions/sf/tests/freeform-decisions.test.ts b/src/resources/extensions/sf/tests/freeform-decisions.test.ts index 8da24d64d..eb4fb72bc 100644 --- a/src/resources/extensions/sf/tests/freeform-decisions.test.ts +++ b/src/resources/extensions/sf/tests/freeform-decisions.test.ts @@ -20,7 +20,7 @@ import { function makeTmpDir(): string { const dir = fs.mkdtempSync(path.join(os.tmpdir(), 'sf-freeform-')); - fs.mkdirSync(path.join(dir, '.gsd'), { recursive: true }); + fs.mkdirSync(path.join(dir, '.sf'), { recursive: true }); return dir; } @@ -55,8 +55,8 @@ describe('freeform-decisions', () => { test('saveDecisionToDb destroys freeform DECISIONS.md content', async () => { const tmpDir = makeTmpDir(); - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); - const mdPath = path.join(tmpDir, '.gsd', 'DECISIONS.md'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); + const mdPath = path.join(tmpDir, '.sf', 'DECISIONS.md'); openDatabase(dbPath); const freeformContent = `# Project Decisions @@ -151,8 +151,8 @@ describe('freeform-decisions', () => { test('saveDecisionToDb with table-format DECISIONS.md still regenerates normally', async () => { const tmpDir = makeTmpDir(); - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); - const mdPath = path.join(tmpDir, '.gsd', 'DECISIONS.md'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); + const mdPath = path.join(tmpDir, '.sf', 'DECISIONS.md'); openDatabase(dbPath); // Pre-populate with canonical table format @@ -200,8 +200,8 @@ describe('freeform-decisions', () => { test('saveDecisionToDb with no existing DECISIONS.md creates table', async () => { const tmpDir = makeTmpDir(); - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); - const mdPath = path.join(tmpDir, '.gsd', 'DECISIONS.md'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); + const mdPath = path.join(tmpDir, '.sf', 'DECISIONS.md'); openDatabase(dbPath); // No DECISIONS.md exists at all diff --git a/src/resources/extensions/sf/tests/gate-dispatch.test.ts b/src/resources/extensions/sf/tests/gate-dispatch.test.ts index 7e33ec9bd..ef7f57039 100644 --- a/src/resources/extensions/sf/tests/gate-dispatch.test.ts +++ b/src/resources/extensions/sf/tests/gate-dispatch.test.ts @@ -26,8 +26,8 @@ import { invalidateAllCaches } from "../cache.ts"; function setupTestProject(): { tmpDir: string; dbPath: string } { const tmpDir = mkdtempSync(join(tmpdir(), "gate-dispatch-")); - const dbPath = join(tmpDir, ".gsd", "sf.db"); - mkdirSync(join(tmpDir, ".gsd"), { recursive: true }); + const dbPath = join(tmpDir, ".sf", "sf.db"); + mkdirSync(join(tmpDir, ".sf"), { recursive: true }); openDatabase(dbPath); // Create milestone @@ -48,7 +48,7 @@ function setupTestProject(): { tmpDir: string; dbPath: string } { }); // Write roadmap file (required for deriveState) - const milestoneDir = join(tmpDir, ".gsd", "milestones", "M001"); + const milestoneDir = join(tmpDir, ".sf", "milestones", "M001"); mkdirSync(milestoneDir, { recursive: true }); writeFileSync( join(milestoneDir, "M001-ROADMAP.md"), diff --git a/src/resources/extensions/sf/tests/graph-context.test.ts b/src/resources/extensions/sf/tests/graph-context.test.ts index b95867dd9..fe4f5b805 100644 --- a/src/resources/extensions/sf/tests/graph-context.test.ts +++ b/src/resources/extensions/sf/tests/graph-context.test.ts @@ -15,7 +15,7 @@ * approach that avoids all module-level mocking. * * Fixture layout per test: - * <tmpDir>/.gsd/graphs/graph.json + * <tmpDir>/.sf/graphs/graph.json * * builtAt controls staleness: old timestamp → stale, recent → fresh. */ @@ -64,12 +64,12 @@ function freshTimestamp(): string { } /** - * Creates a temp project directory with a .gsd/graphs/graph.json file. + * Creates a temp project directory with a .sf/graphs/graph.json file. * Returns the projectDir path. Caller is responsible for cleanup. */ function makeProjectDir(fixture: GraphFixture): string { const projectDir = mkdtempSync(join(tmpdir(), "graph-ctx-test-")); - const sfDir = join(projectDir, ".gsd"); + const sfDir = join(projectDir, ".sf"); const graphsDir = join(sfDir, "graphs"); mkdirSync(graphsDir, { recursive: true }); @@ -135,7 +135,7 @@ describe("inlineGraphSubgraph — null returns", () => { }); it("returns null (no throw) when graph.json is missing", async () => { - // A project dir with no .gsd directory at all — graphQuery returns zero nodes + // A project dir with no .sf directory at all — graphQuery returns zero nodes const projectDir = mkdtempSync(join(tmpdir(), "graph-ctx-nofile-")); try { const result = await inlineGraphSubgraph(projectDir, "auth", { budget: 3000 }); @@ -237,7 +237,7 @@ describe("inlineGraphSubgraph — correct output", () => { // Write a graph.json with an invalid builtAt — graphStatus will catch and return {exists: false} // inlineGraphSubgraph should still return the node block without stale annotation const projectDir = mkdtempSync(join(tmpdir(), "graph-ctx-corrupt-")); - const sfDir = join(projectDir, ".gsd"); + const sfDir = join(projectDir, ".sf"); const graphsDir = join(sfDir, "graphs"); mkdirSync(graphsDir, { recursive: true }); diff --git a/src/resources/extensions/sf/tests/guided-flow-session-isolation.test.ts b/src/resources/extensions/sf/tests/guided-flow-session-isolation.test.ts index b98187fd3..911427954 100644 --- a/src/resources/extensions/sf/tests/guided-flow-session-isolation.test.ts +++ b/src/resources/extensions/sf/tests/guided-flow-session-isolation.test.ts @@ -103,7 +103,7 @@ describe("#2985 Bug 4 — getDiscussionMilestoneId must be keyed by basePath", ( test("checkAutoStartAfterDiscuss ignores missing manifest for single-milestone discuss on established project", () => { const base = mkdtempSync(join(tmpdir(), "sf-auto-start-manifest-")); try { - const sfDir = join(base, ".gsd"); + const sfDir = join(base, ".sf"); const milestoneDir = join(sfDir, "milestones", "M001"); mkdirSync(milestoneDir, { recursive: true }); mkdirSync(join(sfDir, "milestones", "M002"), { recursive: true }); diff --git a/src/resources/extensions/sf/tests/guided-flow-state-rebuild.test.ts b/src/resources/extensions/sf/tests/guided-flow-state-rebuild.test.ts index 3edffb4e3..abb756129 100644 --- a/src/resources/extensions/sf/tests/guided-flow-state-rebuild.test.ts +++ b/src/resources/extensions/sf/tests/guided-flow-state-rebuild.test.ts @@ -26,12 +26,12 @@ import { function createFixtureBase(): string { const base = mkdtempSync(join(tmpdir(), "sf-guided-state-")); - mkdirSync(join(base, ".gsd", "milestones"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones"), { recursive: true }); return base; } function writeFile(base: string, relativePath: string, content: string): void { - const full = join(base, ".gsd", relativePath); + const full = join(base, ".sf", relativePath); mkdirSync(join(full, ".."), { recursive: true }); writeFileSync(full, content); } diff --git a/src/resources/extensions/sf/tests/headless-query.test.ts b/src/resources/extensions/sf/tests/headless-query.test.ts index 515322b71..be68b4fc7 100644 --- a/src/resources/extensions/sf/tests/headless-query.test.ts +++ b/src/resources/extensions/sf/tests/headless-query.test.ts @@ -19,36 +19,36 @@ import { invalidateStateCache } from '../state.ts' function createFixture(): string { const base = mkdtempSync(join(tmpdir(), 'sf-query-test-')) - mkdirSync(join(base, '.gsd', 'milestones'), { recursive: true }) + mkdirSync(join(base, '.sf', 'milestones'), { recursive: true }) return base } function writeRoadmap(base: string, mid: string, content: string): void { - const dir = join(base, '.gsd', 'milestones', mid) + const dir = join(base, '.sf', 'milestones', mid) mkdirSync(dir, { recursive: true }) writeFileSync(join(dir, `${mid}-ROADMAP.md`), content) } function writeContext(base: string, mid: string): void { - const dir = join(base, '.gsd', 'milestones', mid) + const dir = join(base, '.sf', 'milestones', mid) mkdirSync(dir, { recursive: true }) writeFileSync(join(dir, `${mid}-CONTEXT.md`), `---\ntitle: Test Milestone\n---\n\n# Context\nTest.`) } function writeSlicePlan(base: string, mid: string, sid: string, content: string): void { - const dir = join(base, '.gsd', 'milestones', mid, 'slices', sid) + const dir = join(base, '.sf', 'milestones', mid, 'slices', sid) mkdirSync(join(dir, 'tasks'), { recursive: true }) writeFileSync(join(dir, `${sid}-PLAN.md`), content) } function writeTaskPlan(base: string, mid: string, sid: string, tid: string): void { - const dir = join(base, '.gsd', 'milestones', mid, 'slices', sid, 'tasks') + const dir = join(base, '.sf', 'milestones', mid, 'slices', sid, 'tasks') mkdirSync(dir, { recursive: true }) writeFileSync(join(dir, `${tid}-PLAN.md`), `---\nestimated_steps: 3\nestimated_files: 2\n---\n\n# ${tid}: Test Task\nDo something.`) } function writeParallelStatus(base: string, mid: string, cost: number): void { - const dir = join(base, '.gsd', 'parallel') + const dir = join(base, '.sf', 'parallel') mkdirSync(dir, { recursive: true }) writeFileSync(join(dir, `${mid}.status.json`), JSON.stringify({ milestoneId: mid, @@ -169,7 +169,7 @@ describe('headless query', () => { > Done. `) writeFileSync( - join(base, '.gsd', 'milestones', 'M001', 'M001-SUMMARY.md'), + join(base, '.sf', 'milestones', 'M001', 'M001-SUMMARY.md'), '# M001 Summary\n\nComplete.', ) diff --git a/src/resources/extensions/sf/tests/health-widget.test.ts b/src/resources/extensions/sf/tests/health-widget.test.ts index e493b65c9..a638fbd4c 100644 --- a/src/resources/extensions/sf/tests/health-widget.test.ts +++ b/src/resources/extensions/sf/tests/health-widget.test.ts @@ -43,18 +43,18 @@ function activeData(overrides: Partial<HealthWidgetData> = {}): HealthWidgetData }; } -test("detectHealthWidgetProjectState: no .gsd returns none", (t) => { +test("detectHealthWidgetProjectState: no .sf returns none", (t) => { const dir = makeTempDir("none"); t.after(() => { cleanup(dir); }); assert.equal(detectHealthWidgetProjectState(dir), "none"); }); -test("detectHealthWidgetProjectState: bootstrapped .gsd without milestones returns initialized", (t) => { +test("detectHealthWidgetProjectState: bootstrapped .sf without milestones returns initialized", (t) => { const dir = makeTempDir("initialized"); t.after(() => { cleanup(dir); }); - mkdirSync(join(dir, ".gsd"), { recursive: true }); + mkdirSync(join(dir, ".sf"), { recursive: true }); assert.equal(detectHealthWidgetProjectState(dir), "initialized"); }); @@ -62,7 +62,7 @@ test("detectHealthWidgetProjectState: milestone without metrics returns active", const dir = makeTempDir("active"); t.after(() => { cleanup(dir); }); - mkdirSync(join(dir, ".gsd", "milestones", "M001"), { recursive: true }); + mkdirSync(join(dir, ".sf", "milestones", "M001"), { recursive: true }); assert.equal(detectHealthWidgetProjectState(dir), "active"); }); @@ -170,9 +170,9 @@ test("detectHealthWidgetProjectState: metrics file alone does not imply project" const dir = makeTempDir("metrics-only"); t.after(() => { cleanup(dir); }); - mkdirSync(join(dir, ".gsd"), { recursive: true }); + mkdirSync(join(dir, ".sf"), { recursive: true }); writeFileSync( - join(dir, ".gsd", "metrics.json"), + join(dir, ".sf", "metrics.json"), JSON.stringify({ version: 1, projectStartedAt: Date.now(), units: [] }), "utf-8", ); @@ -181,7 +181,7 @@ test("detectHealthWidgetProjectState: metrics file alone does not imply project" test("session_start bootstraps the health widget alongside notifications", async (t) => { const dir = makeTempDir("bootstrap"); - mkdirSync(join(dir, ".gsd"), { recursive: true }); + mkdirSync(join(dir, ".sf"), { recursive: true }); const originalCwd = process.cwd(); process.chdir(dir); diff --git a/src/resources/extensions/sf/tests/init-wizard.test.ts b/src/resources/extensions/sf/tests/init-wizard.test.ts index 8beb51ffc..041421e15 100644 --- a/src/resources/extensions/sf/tests/init-wizard.test.ts +++ b/src/resources/extensions/sf/tests/init-wizard.test.ts @@ -63,11 +63,11 @@ test("init-wizard: v1 .planning/ triggers v1-planning state", (t) => { } }); -test("init-wizard: existing .gsd/ with milestones skips init", (t) => { +test("init-wizard: existing .sf/ with milestones skips init", (t) => { const dir = makeTempDir("existing"); try { - mkdirSync(join(dir, ".gsd", "milestones", "M001"), { recursive: true }); - mkdirSync(join(dir, ".gsd", "milestones", "M002"), { recursive: true }); + mkdirSync(join(dir, ".sf", "milestones", "M001"), { recursive: true }); + mkdirSync(join(dir, ".sf", "milestones", "M002"), { recursive: true }); const detection = detectProjectState(dir); assert.equal(detection.state, "v2-sf"); @@ -78,10 +78,10 @@ test("init-wizard: existing .gsd/ with milestones skips init", (t) => { } }); -test("init-wizard: empty .gsd/ (no milestones) returns v2-sf-empty", (t) => { +test("init-wizard: empty .sf/ (no milestones) returns v2-sf-empty", (t) => { const dir = makeTempDir("empty-sf"); try { - mkdirSync(join(dir, ".gsd", "milestones"), { recursive: true }); + mkdirSync(join(dir, ".sf", "milestones"), { recursive: true }); const detection = detectProjectState(dir); assert.equal(detection.state, "v2-sf-empty"); @@ -119,11 +119,11 @@ test("init-wizard: project signals populate from Node.js project", (t) => { } }); -test("init-wizard: v2 .gsd/ preferences detected", (t) => { +test("init-wizard: v2 .sf/ preferences detected", (t) => { const dir = makeTempDir("prefs-detect"); try { - mkdirSync(join(dir, ".gsd", "milestones"), { recursive: true }); - writeFileSync(join(dir, ".gsd", "PREFERENCES.md"), "---\nversion: 1\nmode: solo\n---\n", "utf-8"); + mkdirSync(join(dir, ".sf", "milestones"), { recursive: true }); + writeFileSync(join(dir, ".sf", "PREFERENCES.md"), "---\nversion: 1\nmode: solo\n---\n", "utf-8"); const detection = detectProjectState(dir); assert.ok(detection.v2); @@ -136,8 +136,8 @@ test("init-wizard: v2 .gsd/ preferences detected", (t) => { test("init-wizard: v2 uppercase PREFERENCES.md also detected", (t) => { const dir = makeTempDir("prefs-upper"); try { - mkdirSync(join(dir, ".gsd", "milestones"), { recursive: true }); - writeFileSync(join(dir, ".gsd", "PREFERENCES.md"), "---\nversion: 1\n---\n", "utf-8"); + mkdirSync(join(dir, ".sf", "milestones"), { recursive: true }); + writeFileSync(join(dir, ".sf", "PREFERENCES.md"), "---\nversion: 1\n---\n", "utf-8"); const detection = detectProjectState(dir); assert.ok(detection.v2); @@ -150,8 +150,8 @@ test("init-wizard: v2 uppercase PREFERENCES.md also detected", (t) => { test("init-wizard: CONTEXT.md detected in v2", (t) => { const dir = makeTempDir("context"); try { - mkdirSync(join(dir, ".gsd", "milestones"), { recursive: true }); - writeFileSync(join(dir, ".gsd", "CONTEXT.md"), "# Project Context\n", "utf-8"); + mkdirSync(join(dir, ".sf", "milestones"), { recursive: true }); + writeFileSync(join(dir, ".sf", "CONTEXT.md"), "# Project Context\n", "utf-8"); const detection = detectProjectState(dir); assert.ok(detection.v2); @@ -178,11 +178,11 @@ test("init-wizard: multiple project files detected together", (t) => { } }); -test("init-wizard: v1 with both .planning/ and .gsd/ prioritizes v2", (t) => { +test("init-wizard: v1 with both .planning/ and .sf/ prioritizes v2", (t) => { const dir = makeTempDir("both-v1-v2"); try { mkdirSync(join(dir, ".planning", "phases"), { recursive: true }); - mkdirSync(join(dir, ".gsd", "milestones", "M001"), { recursive: true }); + mkdirSync(join(dir, ".sf", "milestones", "M001"), { recursive: true }); const detection = detectProjectState(dir); // v2 should take priority diff --git a/src/resources/extensions/sf/tests/integration-edge.test.ts b/src/resources/extensions/sf/tests/integration-edge.test.ts index 944cea971..812905e5e 100644 --- a/src/resources/extensions/sf/tests/integration-edge.test.ts +++ b/src/resources/extensions/sf/tests/integration-edge.test.ts @@ -49,7 +49,7 @@ function generateDecisionsMarkdown(count: number): string { test('integration-edge: empty project', () => { const base = mkdtempSync(join(tmpdir(), 'sf-int-edge-empty-')); - const sfDir = join(base, '.gsd'); + const sfDir = join(base, '.sf'); mkdirSync(sfDir, { recursive: true }); const dbPath = join(sfDir, 'test-edge-empty.db'); @@ -106,7 +106,7 @@ test('integration-edge: empty project', () => { test('integration-edge: partial migration', () => { const base = mkdtempSync(join(tmpdir(), 'sf-int-edge-partial-')); - const sfDir = join(base, '.gsd'); + const sfDir = join(base, '.sf'); mkdirSync(sfDir, { recursive: true }); // Write DECISIONS.md but NOT REQUIREMENTS.md @@ -160,7 +160,7 @@ test('integration-edge: partial migration', () => { test('integration-edge: fallback mode', () => { const base = mkdtempSync(join(tmpdir(), 'sf-int-edge-fallback-')); - const sfDir = join(base, '.gsd'); + const sfDir = join(base, '.sf'); mkdirSync(sfDir, { recursive: true }); const decisionsMarkdown = generateDecisionsMarkdown(4); diff --git a/src/resources/extensions/sf/tests/integration/all-milestones-complete-merge.test.ts b/src/resources/extensions/sf/tests/integration/all-milestones-complete-merge.test.ts index 9a656baf7..71d956298 100644 --- a/src/resources/extensions/sf/tests/integration/all-milestones-complete-merge.test.ts +++ b/src/resources/extensions/sf/tests/integration/all-milestones-complete-merge.test.ts @@ -51,9 +51,9 @@ function createTempRepo(): string { run("git config user.email test@test.com", dir); run("git config user.name Test", dir); writeFileSync(join(dir, "README.md"), "# test\n"); - // Mirror production: .gsd/worktrees/ is gitignored so autoCommitDirtyState + // Mirror production: .sf/worktrees/ is gitignored so autoCommitDirtyState // doesn't pick up the worktrees directory as dirty state (#1127 fix). - writeFileSync(join(dir, ".gitignore"), ".gsd/worktrees/\n"); + writeFileSync(join(dir, ".gitignore"), ".sf/worktrees/\n"); run("git add .", dir); run("git commit -m init", dir); run("git branch -M main", dir); @@ -61,7 +61,7 @@ function createTempRepo(): string { } function createMilestoneArtifacts(dir: string, mid: string): void { - const msDir = join(dir, ".gsd", "milestones", mid); + const msDir = join(dir, ".sf", "milestones", mid); mkdirSync(msDir, { recursive: true }); writeFileSync(join(msDir, "CONTEXT.md"), `# ${mid} Context\n`); const roadmap = [ @@ -159,7 +159,7 @@ test("single milestone worktree is merged to main when all complete (#962)", (t) // Simulate the fix: merge before stopping (what the "all complete" path now does) const roadmapPath = join( tempDir, - ".gsd", + ".sf", "milestones", "M001", "M001-ROADMAP.md", @@ -220,7 +220,7 @@ test("last milestone worktree is merged when it's the final one (#962)", (t) => run("git add .", wt1); run('git commit -m "feat(M001): m001 work"', wt1); const roadmap1 = readFileSync( - join(tempDir, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), + join(tempDir, ".sf", "milestones", "M001", "M001-ROADMAP.md"), "utf-8", ); mergeMilestoneToMain(tempDir, "M001", roadmap1); @@ -231,7 +231,7 @@ test("last milestone worktree is merged when it's the final one (#962)", (t) => run("git add .", wt2); run('git commit -m "feat(M002): m002 work"', wt2); const roadmap2 = readFileSync( - join(tempDir, ".gsd", "milestones", "M002", "M002-ROADMAP.md"), + join(tempDir, ".sf", "milestones", "M002", "M002-ROADMAP.md"), "utf-8", ); mergeMilestoneToMain(tempDir, "M002", roadmap2); diff --git a/src/resources/extensions/sf/tests/integration/atomic-task-closeout.test.ts b/src/resources/extensions/sf/tests/integration/atomic-task-closeout.test.ts index 7302c36b8..e360a318d 100644 --- a/src/resources/extensions/sf/tests/integration/atomic-task-closeout.test.ts +++ b/src/resources/extensions/sf/tests/integration/atomic-task-closeout.test.ts @@ -19,7 +19,7 @@ function makeTmp(name: string): string { test("doctor does not touch task with checkbox AND summary both present", async () => { const base = makeTmp("doctor-ok"); - const sf = join(base, ".gsd"); + const sf = join(base, ".sf"); const m = join(sf, "milestones", "M001"); const s = join(m, "slices", "S01"); const t = join(s, "tasks"); diff --git a/src/resources/extensions/sf/tests/integration/auto-preflight.test.ts b/src/resources/extensions/sf/tests/integration/auto-preflight.test.ts index 9f89004f4..155856a5a 100644 --- a/src/resources/extensions/sf/tests/integration/auto-preflight.test.ts +++ b/src/resources/extensions/sf/tests/integration/auto-preflight.test.ts @@ -8,7 +8,7 @@ import { runSFDoctor, selectDoctorScope, filterDoctorIssues } from "../../doctor test("auto-preflight scopes to active milestone, ignoring historical", async (t) => { const tmpBase = mkdtempSync(join(tmpdir(), "sf-auto-preflight-test-")); - const sf = join(tmpBase, ".gsd"); + const sf = join(tmpBase, ".sf"); mkdirSync(join(sf, "milestones", "M001", "slices", "S01", "tasks"), { recursive: true }); mkdirSync(join(sf, "milestones", "M009", "slices", "S01", "tasks"), { recursive: true }); diff --git a/src/resources/extensions/sf/tests/integration/auto-recovery.test.ts b/src/resources/extensions/sf/tests/integration/auto-recovery.test.ts index 736da058f..a6447fc0f 100644 --- a/src/resources/extensions/sf/tests/integration/auto-recovery.test.ts +++ b/src/resources/extensions/sf/tests/integration/auto-recovery.test.ts @@ -29,8 +29,8 @@ import { renderPlanFromDb } from "../../markdown-renderer.ts"; function makeTmpBase(): string { const base = join(tmpdir(), `sf-test-${randomUUID()}`); - // Create .gsd/milestones/M001/slices/S01/tasks/ structure - mkdirSync(join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks"), { recursive: true }); + // Create .sf/milestones/M001/slices/S01/tasks/ structure + mkdirSync(join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks"), { recursive: true }); return base; } @@ -153,7 +153,7 @@ test("verifyExpectedArtifact passes for run-uat when ASSESSMENT file exists (#28 t.after(() => cleanup(base)); // Write the ASSESSMENT file (what sf_summary_save actually produces) - const assessPath = join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-ASSESSMENT.md"); + const assessPath = join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-ASSESSMENT.md"); writeFileSync(assessPath, "---\nverdict: PASS\n---\n# UAT Assessment\n"); const verified = verifyExpectedArtifact("run-uat", "M001/S01", base); @@ -260,11 +260,11 @@ test("verifyExpectedArtifact detects roadmap [x] change despite parse cache", (t assert.equal(sliceBefore!.done, false); // Now write the post-edit roadmap to disk and create required artifacts - const roadmapPath = join(base, ".gsd", "milestones", "M001", "M001-ROADMAP.md"); + const roadmapPath = join(base, ".sf", "milestones", "M001", "M001-ROADMAP.md"); writeFileSync(roadmapPath, roadmapAfter); - const summaryPath = join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-SUMMARY.md"); + const summaryPath = join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-SUMMARY.md"); writeFileSync(summaryPath, "# Summary\nDone."); - const uatPath = join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-UAT.md"); + const uatPath = join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-UAT.md"); writeFileSync(uatPath, "# UAT\nPassed."); // verifyExpectedArtifact should see the [x] despite the parse cache @@ -279,7 +279,7 @@ test("verifyExpectedArtifact rejects plan-slice with empty scaffold", (t) => { const base = makeTmpBase(); t.after(() => cleanup(base)); - const sliceDir = join(base, ".gsd", "milestones", "M001", "slices", "S01"); + const sliceDir = join(base, ".sf", "milestones", "M001", "slices", "S01"); mkdirSync(sliceDir, { recursive: true }); writeFileSync(join(sliceDir, "S01-PLAN.md"), "# S01: Test Slice\n\n## Tasks\n\n"); assert.strictEqual( @@ -293,7 +293,7 @@ test("verifyExpectedArtifact accepts plan-slice with actual tasks", (t) => { const base = makeTmpBase(); t.after(() => cleanup(base)); - const sliceDir = join(base, ".gsd", "milestones", "M001", "slices", "S01"); + const sliceDir = join(base, ".sf", "milestones", "M001", "slices", "S01"); const tasksDir = join(sliceDir, "tasks"); mkdirSync(tasksDir, { recursive: true }); writeFileSync(join(sliceDir, "S01-PLAN.md"), [ @@ -317,7 +317,7 @@ test("verifyExpectedArtifact accepts plan-slice with completed tasks", (t) => { const base = makeTmpBase(); t.after(() => cleanup(base)); - const sliceDir = join(base, ".gsd", "milestones", "M001", "slices", "S01"); + const sliceDir = join(base, ".sf", "milestones", "M001", "slices", "S01"); const tasksDir = join(sliceDir, "tasks"); mkdirSync(tasksDir, { recursive: true }); writeFileSync(join(sliceDir, "S01-PLAN.md"), [ @@ -343,8 +343,8 @@ test("verifyExpectedArtifact plan-slice passes when all task plan files exist", const base = makeTmpBase(); t.after(() => cleanup(base)); - const tasksDir = join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks"); - const planPath = join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-PLAN.md"); + const tasksDir = join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks"); + const planPath = join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-PLAN.md"); const planContent = [ "# S01: Test Slice", "", @@ -365,8 +365,8 @@ test("verifyExpectedArtifact plan-slice fails when a task plan file is missing ( const base = makeTmpBase(); t.after(() => cleanup(base)); - const tasksDir = join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks"); - const planPath = join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-PLAN.md"); + const tasksDir = join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks"); + const planPath = join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-PLAN.md"); const planContent = [ "# S01: Test Slice", "", @@ -387,7 +387,7 @@ test("verifyExpectedArtifact plan-slice fails for plan with no tasks (#699)", (t const base = makeTmpBase(); t.after(() => cleanup(base)); - const planPath = join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-PLAN.md"); + const planPath = join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-PLAN.md"); const planContent = [ "# S01: Test Slice", "", @@ -407,7 +407,7 @@ test("verifyExpectedArtifact accepts plan-slice with heading-style tasks (### T0 const base = makeTmpBase(); t.after(() => cleanup(base)); - const sliceDir = join(base, ".gsd", "milestones", "M001", "slices", "S01"); + const sliceDir = join(base, ".sf", "milestones", "M001", "slices", "S01"); const tasksDir = join(sliceDir, "tasks"); mkdirSync(tasksDir, { recursive: true }); writeFileSync(join(sliceDir, "S01-PLAN.md"), [ @@ -436,7 +436,7 @@ test("verifyExpectedArtifact accepts plan-slice with colon-style heading tasks ( const base = makeTmpBase(); t.after(() => cleanup(base)); - const sliceDir = join(base, ".gsd", "milestones", "M001", "slices", "S01"); + const sliceDir = join(base, ".sf", "milestones", "M001", "slices", "S01"); const tasksDir = join(sliceDir, "tasks"); mkdirSync(tasksDir, { recursive: true }); writeFileSync(join(sliceDir, "S01-PLAN.md"), [ @@ -460,7 +460,7 @@ test("verifyExpectedArtifact execute-task rejects heading-style plan without che const base = makeTmpBase(); t.after(() => cleanup(base)); - const sliceDir = join(base, ".gsd", "milestones", "M001", "slices", "S01"); + const sliceDir = join(base, ".sf", "milestones", "M001", "slices", "S01"); const tasksDir = join(sliceDir, "tasks"); mkdirSync(tasksDir, { recursive: true }); writeFileSync(join(sliceDir, "S01-PLAN.md"), [ @@ -484,7 +484,7 @@ test("verifyExpectedArtifact execute-task rejects heading-style plan without che test("verifyExpectedArtifact plan-slice passes for rendered slice/task plan artifacts from DB", async () => { const base = makeTmpBase(); - const dbPath = join(base, ".gsd", "sf.db"); + const dbPath = join(base, ".sf", "sf.db"); openDatabase(dbPath); try { insertMilestone({ id: "M001", title: "Milestone", status: "active" }); @@ -557,7 +557,7 @@ test("verifyExpectedArtifact plan-slice passes for rendered slice/task plan arti test("verifyExpectedArtifact plan-slice fails after deleting a rendered task plan file", async () => { const base = makeTmpBase(); - const dbPath = join(base, ".gsd", "sf.db"); + const dbPath = join(base, ".sf", "sf.db"); openDatabase(dbPath); try { insertMilestone({ id: "M001", title: "Milestone", status: "active" }); @@ -630,13 +630,13 @@ test("#793: invalidateAllCaches clears all caches so deriveState sees fresh disk const mid = "M001"; const sid = "S01"; - const planDir = join(base, ".gsd", "milestones", mid, "slices", sid); + const planDir = join(base, ".sf", "milestones", mid, "slices", sid); const tasksDir = join(planDir, "tasks"); mkdirSync(tasksDir, { recursive: true }); - mkdirSync(join(base, ".gsd", "milestones", mid), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", mid), { recursive: true }); writeFileSync( - join(base, ".gsd", "milestones", mid, `${mid}-ROADMAP.md`), + join(base, ".sf", "milestones", mid, `${mid}-ROADMAP.md`), `# M001: Test Milestone\n\n**Vision:** test.\n\n## Slices\n\n- [ ] **${sid}: Slice One** \`risk:low\` \`depends:[]\`\n > After this: done.\n`, ); const planUnchecked = `# ${sid}: Slice One\n\n**Goal:** test.\n\n## Tasks\n\n- [ ] **T01: Task One** \`est:10m\`\n- [ ] **T02: Task Two** \`est:10m\`\n`; @@ -684,30 +684,30 @@ function makeGitBase(): string { return base; } -test("hasImplementationArtifacts returns 'absent' when only .gsd/ files committed (#1703)", (t) => { +test("hasImplementationArtifacts returns 'absent' when only .sf/ files committed (#1703)", (t) => { const base = makeGitBase(); t.after(() => cleanup(base)); - // Create a feature branch and commit only .gsd/ files + // Create a feature branch and commit only .sf/ files execFileSync("git", ["checkout", "-b", "feat/test-milestone"], { cwd: base, stdio: "ignore" }); - mkdirSync(join(base, ".gsd", "milestones", "M001"), { recursive: true }); - writeFileSync(join(base, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), "# Roadmap"); - writeFileSync(join(base, ".gsd", "milestones", "M001", "M001-SUMMARY.md"), "# Summary"); + mkdirSync(join(base, ".sf", "milestones", "M001"), { recursive: true }); + writeFileSync(join(base, ".sf", "milestones", "M001", "M001-ROADMAP.md"), "# Roadmap"); + writeFileSync(join(base, ".sf", "milestones", "M001", "M001-SUMMARY.md"), "# Summary"); execFileSync("git", ["add", "."], { cwd: base, stdio: "ignore" }); execFileSync("git", ["commit", "-m", "chore: add plan files"], { cwd: base, stdio: "ignore" }); const result = hasImplementationArtifacts(base); - assert.equal(result, "absent", "should return 'absent' when only .gsd/ files were committed"); + assert.equal(result, "absent", "should return 'absent' when only .sf/ files were committed"); }); test("hasImplementationArtifacts returns 'present' when implementation files committed (#1703)", (t) => { const base = makeGitBase(); t.after(() => cleanup(base)); - // Create a feature branch with both .gsd/ and implementation files + // Create a feature branch with both .sf/ and implementation files execFileSync("git", ["checkout", "-b", "feat/test-impl"], { cwd: base, stdio: "ignore" }); - mkdirSync(join(base, ".gsd", "milestones", "M001"), { recursive: true }); - writeFileSync(join(base, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), "# Roadmap"); + mkdirSync(join(base, ".sf", "milestones", "M001"), { recursive: true }); + writeFileSync(join(base, ".sf", "milestones", "M001", "M001-ROADMAP.md"), "# Roadmap"); mkdirSync(join(base, "src"), { recursive: true }); writeFileSync(join(base, "src", "feature.ts"), "export function feature() {}"); execFileSync("git", ["add", "."], { cwd: base, stdio: "ignore" }); @@ -728,19 +728,19 @@ test("hasImplementationArtifacts returns 'unknown' on non-git directory (fail-op // ─── verifyExpectedArtifact: complete-milestone requires impl artifacts (#1703) ── -test("verifyExpectedArtifact complete-milestone fails with only .gsd/ files (#1703)", (t) => { +test("verifyExpectedArtifact complete-milestone fails with only .sf/ files (#1703)", (t) => { const base = makeGitBase(); t.after(() => cleanup(base)); - // Create feature branch with only .gsd/ files + // Create feature branch with only .sf/ files execFileSync("git", ["checkout", "-b", "feat/ms-only-sf"], { cwd: base, stdio: "ignore" }); - mkdirSync(join(base, ".gsd", "milestones", "M001"), { recursive: true }); - writeFileSync(join(base, ".gsd", "milestones", "M001", "M001-SUMMARY.md"), "# Milestone Summary\nDone."); + mkdirSync(join(base, ".sf", "milestones", "M001"), { recursive: true }); + writeFileSync(join(base, ".sf", "milestones", "M001", "M001-SUMMARY.md"), "# Milestone Summary\nDone."); execFileSync("git", ["add", "."], { cwd: base, stdio: "ignore" }); execFileSync("git", ["commit", "-m", "chore: milestone plan files"], { cwd: base, stdio: "ignore" }); const result = verifyExpectedArtifact("complete-milestone", "M001", base); - assert.equal(result, false, "complete-milestone should fail verification when only .gsd/ files present"); + assert.equal(result, false, "complete-milestone should fail verification when only .sf/ files present"); }); // ─── reconcileMergeState: silent nativeCommit failure (#2542) ───────────── @@ -855,8 +855,8 @@ test("verifyExpectedArtifact complete-milestone passes with impl files (#1703)", // Create feature branch with implementation files AND milestone summary execFileSync("git", ["checkout", "-b", "feat/ms-with-impl"], { cwd: base, stdio: "ignore" }); - mkdirSync(join(base, ".gsd", "milestones", "M001"), { recursive: true }); - writeFileSync(join(base, ".gsd", "milestones", "M001", "M001-SUMMARY.md"), "# Milestone Summary\nDone."); + mkdirSync(join(base, ".sf", "milestones", "M001"), { recursive: true }); + writeFileSync(join(base, ".sf", "milestones", "M001", "M001-SUMMARY.md"), "# Milestone Summary\nDone."); mkdirSync(join(base, "src"), { recursive: true }); writeFileSync(join(base, "src", "app.ts"), "console.log('hello');"); execFileSync("git", ["add", "."], { cwd: base, stdio: "ignore" }); diff --git a/src/resources/extensions/sf/tests/integration/auto-secrets-gate.test.ts b/src/resources/extensions/sf/tests/integration/auto-secrets-gate.test.ts index c84415f1a..917ac82aa 100644 --- a/src/resources/extensions/sf/tests/integration/auto-secrets-gate.test.ts +++ b/src/resources/extensions/sf/tests/integration/auto-secrets-gate.test.ts @@ -7,7 +7,7 @@ * 2. Pending keys exist — gate triggers collection * 3. No pending keys — gate skips silently * - * Uses temp directories with real .gsd/milestones/M001/ structure, mirroring + * Uses temp directories with real .sf/milestones/M001/ structure, mirroring * the pattern from manifest-status.test.ts. */ @@ -25,9 +25,9 @@ function makeTempDir(prefix: string): string { return dir; } -/** Create the .gsd/milestones/M001/ directory structure and write a secrets manifest. */ +/** Create the .sf/milestones/M001/ directory structure and write a secrets manifest. */ function writeManifest(base: string, content: string): void { - const mDir = join(base, '.gsd', 'milestones', 'M001'); + const mDir = join(base, '.sf', 'milestones', 'M001'); mkdirSync(mDir, { recursive: true }); writeFileSync(join(mDir, 'M001-SECRETS.md'), content); } @@ -47,7 +47,7 @@ test('secrets gate: no manifest exists — getManifestStatus returns null', asyn const tmp = makeTempDir('gate-no-manifest'); t.after(() => rmSync(tmp, { recursive: true, force: true })); - // No .gsd directory at all + // No .sf directory at all const result = await getManifestStatus(tmp, 'M001'); assert.strictEqual(result, null, 'should return null when no manifest file exists'); }); @@ -124,7 +124,7 @@ test('secrets gate: pending keys exist — gate triggers collection, manifest up // status ("pending") because collectSecretsFromManifest only updates entries // that flow through collectOneSecret. At runtime, getManifestStatus overrides // env-present entries to "existing" regardless of manifest status. - const manifestPath = join(tmp, '.gsd', 'milestones', 'M001', 'M001-SECRETS.md'); + const manifestPath = join(tmp, '.sf', 'milestones', 'M001', 'M001-SECRETS.md'); const updatedContent = readFileSync(manifestPath, 'utf8'); assert.ok( updatedContent.includes('**Status:** skipped'), diff --git a/src/resources/extensions/sf/tests/integration/auto-stash-merge.test.ts b/src/resources/extensions/sf/tests/integration/auto-stash-merge.test.ts index dd5b8563f..867d3b5d6 100644 --- a/src/resources/extensions/sf/tests/integration/auto-stash-merge.test.ts +++ b/src/resources/extensions/sf/tests/integration/auto-stash-merge.test.ts @@ -25,8 +25,8 @@ function createTempRepo(): string { run("git config user.email test@test.com", dir); run("git config user.name Test", dir); writeFileSync(join(dir, "README.md"), "# test\n"); - mkdirSync(join(dir, ".gsd"), { recursive: true }); - writeFileSync(join(dir, ".gsd", "STATE.md"), "# State\n"); + mkdirSync(join(dir, ".sf"), { recursive: true }); + writeFileSync(join(dir, ".sf", "STATE.md"), "# State\n"); run("git add .", dir); run("git commit -m init", dir); run("git branch -M main", dir); diff --git a/src/resources/extensions/sf/tests/integration/auto-worktree-milestone-merge.test.ts b/src/resources/extensions/sf/tests/integration/auto-worktree-milestone-merge.test.ts index 6bd87e1d8..24f990b96 100644 --- a/src/resources/extensions/sf/tests/integration/auto-worktree-milestone-merge.test.ts +++ b/src/resources/extensions/sf/tests/integration/auto-worktree-milestone-merge.test.ts @@ -36,8 +36,8 @@ function createTempRepo(): string { run("git config user.email test@test.com", dir); run("git config user.name Test", dir); writeFileSync(join(dir, "README.md"), "# test\n"); - mkdirSync(join(dir, ".gsd"), { recursive: true }); - writeFileSync(join(dir, ".gsd", "STATE.md"), "# State\n"); + mkdirSync(join(dir, ".sf"), { recursive: true }); + writeFileSync(join(dir, ".sf", "STATE.md"), "# State\n"); run("git add .", dir); run("git commit -m init", dir); run("git branch -M main", dir); @@ -54,7 +54,7 @@ function createTempRepoWithExternalGsd(): { repo: string; externalState: string run("git config user.name Test", repo); mkdirSync(join(externalState, "worktrees"), { recursive: true }); - symlinkSync(externalState, join(repo, ".gsd")); + symlinkSync(externalState, join(repo, ".sf")); writeFileSync(join(repo, "README.md"), "# test\n"); writeFileSync(join(externalState, "STATE.md"), "# State\n"); @@ -81,7 +81,7 @@ function addSliceToMilestone( commits: Array<{ file: string; content: string; message: string }>, ): void { const normalizedPath = wtPath.replaceAll("\\", "/"); - const marker = "/.gsd/worktrees/"; + const marker = "/.sf/worktrees/"; const idx = normalizedPath.indexOf(marker); const worktreeName = idx !== -1 ? normalizedPath.slice(idx + marker.length).split("/")[0] : null; @@ -152,7 +152,7 @@ describe("auto-worktree-milestone-merge", { timeout: 300_000 }, () => { const branches = run("git branch", repo); assert.ok(!branches.includes("milestone/M010"), "milestone branch deleted"); - const worktreeDir = join(repo, ".gsd", "worktrees", "M010"); + const worktreeDir = join(repo, ".sf", "worktrees", "M010"); assert.ok(!existsSync(worktreeDir), "worktree directory removed"); assert.strictEqual(getAutoWorktreeOriginalBase(), null, "originalBase cleared after merge"); @@ -251,7 +251,7 @@ describe("auto-worktree-milestone-merge", { timeout: 300_000 }, () => { assert.strictEqual(typeof result.pushed, "boolean", "pushed flag remains boolean"); }); - test("auto-resolve .gsd/ state file conflicts", () => { + test("auto-resolve .sf/ state file conflicts", () => { const repo = freshRepo(); const wtPath = createAutoWorktree(repo, "M050"); @@ -259,12 +259,12 @@ describe("auto-worktree-milestone-merge", { timeout: 300_000 }, () => { { file: "feature.ts", content: "export const feature = true;\n", message: "add feature" }, ]); - writeFileSync(join(wtPath, ".gsd", "STATE.md"), "# State\n\n## Updated on milestone branch\n"); + writeFileSync(join(wtPath, ".sf", "STATE.md"), "# State\n\n## Updated on milestone branch\n"); run("git add .", wtPath); run('git commit -m "chore: update state on milestone branch"', wtPath); run("git checkout main", repo); - writeFileSync(join(repo, ".gsd", "STATE.md"), "# State\n\n## Updated on main\n"); + writeFileSync(join(repo, ".sf", "STATE.md"), "# State\n\n## Updated on main\n"); run("git add .", repo); run('git commit -m "chore: update state on main"', repo); @@ -277,11 +277,11 @@ describe("auto-worktree-milestone-merge", { timeout: 300_000 }, () => { let threw = false; try { const result = mergeMilestoneToMain(repo, "M050", roadmap); - assert.ok(result.commitMessage.includes("feat:") && result.commitMessage.includes("SF-Milestone: M050"), "merge commit created despite .gsd conflict"); + assert.ok(result.commitMessage.includes("feat:") && result.commitMessage.includes("SF-Milestone: M050"), "merge commit created despite .sf conflict"); } catch (err) { threw = true; } - assert.ok(!threw, "auto-resolves .gsd/ state file conflicts without throwing"); + assert.ok(!threw, "auto-resolves .sf/ state file conflicts without throwing"); assert.ok(existsSync(join(repo, "feature.ts")), "feature.ts merged to main"); }); @@ -318,8 +318,8 @@ describe("auto-worktree-milestone-merge", { timeout: 300_000 }, () => { run("git config user.email test@test.com", dir); run("git config user.name Test", dir); writeFileSync(join(dir, "README.md"), "# master-branch repo\n"); - mkdirSync(join(dir, ".gsd"), { recursive: true }); - writeFileSync(join(dir, ".gsd", "STATE.md"), "# State\n"); + mkdirSync(join(dir, ".sf"), { recursive: true }); + writeFileSync(join(dir, ".sf", "STATE.md"), "# State\n"); run("git add .", dir); run("git commit -m init", dir); const defaultBranch = run("git rev-parse --abbrev-ref HEAD", dir); @@ -330,7 +330,7 @@ describe("auto-worktree-milestone-merge", { timeout: 300_000 }, () => { { file: "master-feature.ts", content: "export const masterFeature = true;\n", message: "add master feature" }, ]); - const metaFile = join(dir, ".gsd", "milestones", "M070", "M070-META.json"); + const metaFile = join(dir, ".sf", "milestones", "M070", "M070-META.json"); assert.ok(!existsSync(metaFile), "no META.json — integration branch not captured"); const roadmap = makeRoadmap("M070", "Master branch milestone", [ @@ -394,7 +394,7 @@ describe("auto-worktree-milestone-merge", { timeout: 300_000 }, () => { assert.ok(!threw, `empty milestone with no code changes should not throw (got: ${errMsg})`); }); - test("#1738 bug 3: synced .gsd/ dirs cleaned before merge", () => { + test("#1738 bug 3: synced .sf/ dirs cleaned before merge", () => { const repo = freshRepo(); const wtPath = createAutoWorktree(repo, "M090"); @@ -402,15 +402,15 @@ describe("auto-worktree-milestone-merge", { timeout: 300_000 }, () => { { file: "sync-test.ts", content: "export const sync = true;\n", message: "add sync-test" }, ]); - const msDir = join(repo, ".gsd", "milestones", "M090", "slices", "S01"); + const msDir = join(repo, ".sf", "milestones", "M090", "slices", "S01"); mkdirSync(msDir, { recursive: true }); writeFileSync(join(msDir, "S01-PLAN.md"), "# synced plan\n"); writeFileSync( - join(repo, ".gsd", "milestones", "M090", "M090-ROADMAP.md"), + join(repo, ".sf", "milestones", "M090", "M090-ROADMAP.md"), "# synced roadmap\n", ); - const runtimeDir = join(repo, ".gsd", "runtime", "units"); + const runtimeDir = join(repo, ".sf", "runtime", "units"); mkdirSync(runtimeDir, { recursive: true }); writeFileSync(join(runtimeDir, "unit-001.json"), '{"stale": true}'); @@ -425,7 +425,7 @@ describe("auto-worktree-milestone-merge", { timeout: 300_000 }, () => { } catch (err: unknown) { threw = true; } - assert.ok(!threw, "#1738 merge does not fail on synced .gsd/ files"); + assert.ok(!threw, "#1738 merge does not fail on synced .sf/ files"); assert.ok(existsSync(join(repo, "sync-test.ts")), "sync-test.ts on main after merge"); }); @@ -646,13 +646,13 @@ describe("auto-worktree-milestone-merge", { timeout: 300_000 }, () => { ); }); - test("#1906: codeFilesChanged=false when only .gsd/ metadata merged", () => { + test("#1906: codeFilesChanged=false when only .sf/ metadata merged", () => { const repo = freshRepo(); const wtPath = createAutoWorktree(repo, "M180"); - mkdirSync(join(wtPath, ".gsd", "milestones", "M180"), { recursive: true }); + mkdirSync(join(wtPath, ".sf", "milestones", "M180"), { recursive: true }); writeFileSync( - join(wtPath, ".gsd", "milestones", "M180", "SUMMARY.md"), + join(wtPath, ".sf", "milestones", "M180", "SUMMARY.md"), "# M180 Summary\n\nThis milestone was planned but not implemented.\n", ); run("git add .", wtPath); @@ -662,7 +662,7 @@ describe("auto-worktree-milestone-merge", { timeout: 300_000 }, () => { const result = mergeMilestoneToMain(repo, "M180", roadmap); assert.strictEqual(result.codeFilesChanged, false, - "#1906: codeFilesChanged must be false when only .gsd/ files were merged"); + "#1906: codeFilesChanged must be false when only .sf/ files were merged"); }); test("#2156: mergeMilestoneToMain removes external-state worktrees using the milestone branch name", () => { @@ -676,15 +676,15 @@ describe("auto-worktree-milestone-merge", { timeout: 300_000 }, () => { const realWtPath = realpathSync(wtPath); assert.ok( realWtPath.startsWith(externalState), - `worktree should be registered under external .gsd state, got ${realWtPath}`, + `worktree should be registered under external .sf state, got ${realWtPath}`, ); - // Recreate the exact divergence from #1852: local .gsd/ is replaced with a + // Recreate the exact divergence from #1852: local .sf/ is replaced with a // stale real directory, so worktreePath() no longer matches git's record. - unlinkSync(join(repo, ".gsd")); - mkdirSync(join(repo, ".gsd", "worktrees", "M215"), { recursive: true }); - writeFileSync(join(repo, ".gsd", "STATE.md"), "# Local stale state\n"); - writeFileSync(join(repo, ".gsd", "worktrees", "M215", "stale.txt"), "stale local artifact\n"); + unlinkSync(join(repo, ".sf")); + mkdirSync(join(repo, ".sf", "worktrees", "M215"), { recursive: true }); + writeFileSync(join(repo, ".sf", "STATE.md"), "# Local stale state\n"); + writeFileSync(join(repo, ".sf", "worktrees", "M215", "stale.txt"), "stale local artifact\n"); const roadmap = makeRoadmap("M215", "External cleanup", [ { id: "S01", title: "External cleanup" }, diff --git a/src/resources/extensions/sf/tests/integration/auto-worktree.test.ts b/src/resources/extensions/sf/tests/integration/auto-worktree.test.ts index 4f924a08b..bda00d201 100644 --- a/src/resources/extensions/sf/tests/integration/auto-worktree.test.ts +++ b/src/resources/extensions/sf/tests/integration/auto-worktree.test.ts @@ -59,8 +59,8 @@ describe("auto-worktree lifecycle", () => { test("create → detect → teardown", () => { tempDir = createTempRepo(); - // Create .gsd/milestones/M003 with a dummy file (simulates planning artifacts) - const msDir = join(tempDir, ".gsd", "milestones", "M003"); + // Create .sf/milestones/M003 with a dummy file (simulates planning artifacts) + const msDir = join(tempDir, ".sf", "milestones", "M003"); mkdirSync(msDir, { recursive: true }); writeFileSync(join(msDir, "CONTEXT.md"), "# M003 Context\n"); run("git add .", tempDir); @@ -76,7 +76,7 @@ describe("auto-worktree lifecycle", () => { assert.strictEqual(branch, "milestone/M003", "git branch is milestone/M003"); assert.ok( - existsSync(join(wtPath, ".gsd", "milestones", "M003", "CONTEXT.md")), + existsSync(join(wtPath, ".sf", "milestones", "M003", "CONTEXT.md")), "planning files inherited in worktree", ); @@ -111,7 +111,7 @@ describe("auto-worktree lifecycle", () => { test("re-entry: create again, exit without teardown, re-enter", () => { tempDir = createTempRepo(); - const msDir = join(tempDir, ".gsd", "milestones", "M003"); + const msDir = join(tempDir, ".sf", "milestones", "M003"); mkdirSync(msDir, { recursive: true }); writeFileSync(join(msDir, "CONTEXT.md"), "# M003 Context\n"); run("git add .", tempDir); @@ -144,7 +144,7 @@ describe("auto-worktree lifecycle", () => { test("coexistence with manual worktree", async () => { tempDir = createTempRepo(); - const msDir = join(tempDir, ".gsd", "milestones", "M003"); + const msDir = join(tempDir, ".sf", "milestones", "M003"); mkdirSync(msDir, { recursive: true }); writeFileSync(join(msDir, "CONTEXT.md"), "# M003 Context\n"); run("git add .", tempDir); @@ -171,7 +171,7 @@ describe("auto-worktree lifecycle", () => { test("split-brain prevention: originalBase cleared after teardown", () => { tempDir = createTempRepo(); - const msDir = join(tempDir, ".gsd", "milestones", "M003"); + const msDir = join(tempDir, ".sf", "milestones", "M003"); mkdirSync(msDir, { recursive: true }); writeFileSync(join(msDir, "CONTEXT.md"), "# M003 Context\n"); run("git add .", tempDir); @@ -185,7 +185,7 @@ describe("auto-worktree lifecycle", () => { test("#1526: getMainBranch returns milestone/<MID> in auto-worktree", async () => { tempDir = createTempRepo(); - const msDir = join(tempDir, ".gsd", "milestones", "M005"); + const msDir = join(tempDir, ".sf", "milestones", "M005"); mkdirSync(msDir, { recursive: true }); writeFileSync(join(msDir, "CONTEXT.md"), "# M005 Context\n"); run("git add .", tempDir); @@ -209,7 +209,7 @@ describe("auto-worktree lifecycle", () => { test("#1713: stale worktree directory without .git file", async () => { tempDir = createTempRepo(); - const msDir = join(tempDir, ".gsd", "milestones", "M010"); + const msDir = join(tempDir, ".sf", "milestones", "M010"); mkdirSync(msDir, { recursive: true }); writeFileSync(join(msDir, "CONTEXT.md"), "# M010 Context\n"); run("git add .", tempDir); @@ -234,14 +234,14 @@ describe("auto-worktree lifecycle", () => { test("#778: reconcile plan checkboxes on re-attach", async () => { tempDir = createTempRepo(); - const msDir = join(tempDir, ".gsd", "milestones", "M003"); + const msDir = join(tempDir, ".sf", "milestones", "M003"); mkdirSync(msDir, { recursive: true }); writeFileSync(join(msDir, "CONTEXT.md"), "# M003 Context\n"); run("git add .", tempDir); run("git commit -m \"add milestone\"", tempDir); - const planRelPath = join(".gsd", "milestones", "M004", "slices", "S01", "S01-PLAN.md"); - const planDir = join(tempDir, ".gsd", "milestones", "M004", "slices", "S01"); + const planRelPath = join(".sf", "milestones", "M004", "slices", "S01", "S01-PLAN.md"); + const planDir = join(tempDir, ".sf", "milestones", "M004", "slices", "S01"); const { mkdirSync: mkdir, writeFileSync: write, readFileSync: read } = await import("node:fs"); // Plan on integration branch (project root): T01 [x], T02 [x] @@ -290,16 +290,16 @@ describe("auto-worktree lifecycle", () => { test("#2791: mcp.json copied into worktree via copyPlanningArtifacts", () => { tempDir = createTempRepo(); - const msDir = join(tempDir, ".gsd", "milestones", "M003"); + const msDir = join(tempDir, ".sf", "milestones", "M003"); mkdirSync(msDir, { recursive: true }); writeFileSync(join(msDir, "CONTEXT.md"), "# M003 Context\n"); run("git add .", tempDir); run("git commit -m \"add milestone\"", tempDir); - // Create mcp.json in .gsd/ AFTER the commit (untracked, like real usage). - // copyPlanningArtifacts should copy it into the worktree's .gsd/. + // Create mcp.json in .sf/ AFTER the commit (untracked, like real usage). + // copyPlanningArtifacts should copy it into the worktree's .sf/. writeFileSync( - join(tempDir, ".gsd", "mcp.json"), + join(tempDir, ".sf", "mcp.json"), JSON.stringify({ servers: { test: { command: "echo" } } }), ); @@ -307,8 +307,8 @@ describe("auto-worktree lifecycle", () => { try { assert.ok( - existsSync(join(wtPath, ".gsd", "mcp.json")), - "mcp.json should be copied into worktree .gsd/ on creation", + existsSync(join(wtPath, ".sf", "mcp.json")), + "mcp.json should be copied into worktree .sf/ on creation", ); } finally { teardownAutoWorktree(tempDir, "M003"); @@ -317,7 +317,7 @@ describe("auto-worktree lifecycle", () => { test("#2791: mcp.json synced via syncSfStateToWorktree (ROOT_STATE_FILES)", () => { tempDir = createTempRepo(); - const msDir = join(tempDir, ".gsd", "milestones", "M003"); + const msDir = join(tempDir, ".sf", "milestones", "M003"); mkdirSync(msDir, { recursive: true }); writeFileSync(join(msDir, "CONTEXT.md"), "# M003 Context\n"); run("git add .", tempDir); @@ -327,9 +327,9 @@ describe("auto-worktree lifecycle", () => { const wtPath = createAutoWorktree(tempDir, "M003"); try { - // Now add mcp.json to the main .gsd/ after worktree was created + // Now add mcp.json to the main .sf/ after worktree was created writeFileSync( - join(tempDir, ".gsd", "mcp.json"), + join(tempDir, ".sf", "mcp.json"), JSON.stringify({ servers: { test: { command: "echo" } } }), ); @@ -338,7 +338,7 @@ describe("auto-worktree lifecycle", () => { assert.ok(synced.includes("mcp.json"), "mcp.json should be in the synced list"); assert.ok( - existsSync(join(wtPath, ".gsd", "mcp.json")), + existsSync(join(wtPath, ".sf", "mcp.json")), "mcp.json should exist in worktree after sync", ); } finally { diff --git a/src/resources/extensions/sf/tests/integration/doctor-completion-deferral.test.ts b/src/resources/extensions/sf/tests/integration/doctor-completion-deferral.test.ts index 65424a1e2..f01a031b9 100644 --- a/src/resources/extensions/sf/tests/integration/doctor-completion-deferral.test.ts +++ b/src/resources/extensions/sf/tests/integration/doctor-completion-deferral.test.ts @@ -19,7 +19,7 @@ function makeTmp(name: string): string { } function buildScaffold(base: string) { - const sf = join(base, ".gsd"); + const sf = join(base, ".sf"); const m = join(sf, "milestones", "M001"); const s = join(m, "slices", "S01", "tasks"); mkdirSync(s, { recursive: true }); @@ -80,9 +80,9 @@ test("doctor does not report any reconciliation issue codes", async (t) => { } // No summary or UAT stubs should be created - const sliceSummaryPath = join(tmp, ".gsd", "milestones", "M001", "slices", "S01", "S01-SUMMARY.md"); + const sliceSummaryPath = join(tmp, ".sf", "milestones", "M001", "slices", "S01", "S01-SUMMARY.md"); assert.ok(!existsSync(sliceSummaryPath), "should NOT have created summary stub"); - const sliceUatPath = join(tmp, ".gsd", "milestones", "M001", "slices", "S01", "S01-UAT.md"); + const sliceUatPath = join(tmp, ".sf", "milestones", "M001", "slices", "S01", "S01-UAT.md"); assert.ok(!existsSync(sliceUatPath), "should NOT have created UAT stub"); }); diff --git a/src/resources/extensions/sf/tests/integration/doctor-delimiter-fix.test.ts b/src/resources/extensions/sf/tests/integration/doctor-delimiter-fix.test.ts index 934e02a50..4aebe6dab 100644 --- a/src/resources/extensions/sf/tests/integration/doctor-delimiter-fix.test.ts +++ b/src/resources/extensions/sf/tests/integration/doctor-delimiter-fix.test.ts @@ -14,7 +14,7 @@ import { runSFDoctor } from "../../doctor.js"; test("doctor fix=true sanitizes em-dash in milestone title", async (t) => { const tmpBase = mkdtempSync(join(tmpdir(), "sf-doctor-delim-")); - const sf = join(tmpBase, ".gsd"); + const sf = join(tmpBase, ".sf"); const mDir = join(sf, "milestones", "M001"); const sDir = join(mDir, "slices", "S01"); const tDir = join(sDir, "tasks"); @@ -60,7 +60,7 @@ test("doctor fix=true sanitizes em-dash in milestone title", async (t) => { test("doctor fix=false still reports delimiter_in_title as warning", async (t) => { const tmpBase = mkdtempSync(join(tmpdir(), "sf-doctor-delim-nf-")); - const sf = join(tmpBase, ".gsd"); + const sf = join(tmpBase, ".sf"); const mDir = join(sf, "milestones", "M001"); const sDir = join(mDir, "slices", "S01"); const tDir = join(sDir, "tasks"); diff --git a/src/resources/extensions/sf/tests/integration/doctor-enhancements.test.ts b/src/resources/extensions/sf/tests/integration/doctor-enhancements.test.ts index 47e7aab93..4b01cdd2b 100644 --- a/src/resources/extensions/sf/tests/integration/doctor-enhancements.test.ts +++ b/src/resources/extensions/sf/tests/integration/doctor-enhancements.test.ts @@ -10,7 +10,7 @@ import { formatDoctorReportJson } from "../../doctor-format.js"; function makeBase(): { base: string; sf: string; mDir: string } { const base = mkdtempSync(join(tmpdir(), "sf-doctor-enh-")); - const sf = join(base, ".gsd"); + const sf = join(base, ".sf"); const mDir = join(sf, "milestones", "M001"); mkdirSync(join(mDir, "slices"), { recursive: true }); return { base, sf, mDir }; diff --git a/src/resources/extensions/sf/tests/integration/doctor-environment-worktree.test.ts b/src/resources/extensions/sf/tests/integration/doctor-environment-worktree.test.ts index 9eac9c0ca..885cb30ad 100644 --- a/src/resources/extensions/sf/tests/integration/doctor-environment-worktree.test.ts +++ b/src/resources/extensions/sf/tests/integration/doctor-environment-worktree.test.ts @@ -8,7 +8,7 @@ import assert from 'node:assert/strict'; * absent by design (worktrees symlink to the project root's node_modules and * the symlink may not yet exist at check time). * - * Fix: when the basePath contains `.gsd/worktrees/`, resolve the project root + * Fix: when the basePath contains `.sf/worktrees/`, resolve the project root * and check its node_modules instead. */ @@ -45,8 +45,8 @@ describe('doctor-environment-worktree', async () => { mkdirSync(join(projectRoot, "node_modules"), { recursive: true }); cleanups.push(projectRoot); - // Simulate a worktree inside .gsd/worktrees/<name>/ - const worktreeDir = join(projectRoot, ".gsd", "worktrees", "slice-abc"); + // Simulate a worktree inside .sf/worktrees/<name>/ + const worktreeDir = join(projectRoot, ".sf", "worktrees", "slice-abc"); mkdirSync(worktreeDir, { recursive: true }); writeFileSync( join(worktreeDir, "package.json"), @@ -73,7 +73,7 @@ describe('doctor-environment-worktree', async () => { cleanups.push(projectRoot); // No node_modules at project root either - const worktreeDir = join(projectRoot, ".gsd", "worktrees", "slice-xyz"); + const worktreeDir = join(projectRoot, ".sf", "worktrees", "slice-xyz"); mkdirSync(worktreeDir, { recursive: true }); writeFileSync( join(worktreeDir, "package.json"), @@ -94,7 +94,7 @@ describe('doctor-environment-worktree', async () => { mkdirSync(join(projectRoot, "node_modules"), { recursive: true }); cleanups.push(projectRoot); - const worktreeDir = join(projectRoot, ".gsd", "worktrees", "slice-pr"); + const worktreeDir = join(projectRoot, ".sf", "worktrees", "slice-pr"); mkdirSync(worktreeDir, { recursive: true }); writeFileSync( join(worktreeDir, "package.json"), @@ -131,7 +131,7 @@ describe('doctor-environment-worktree', async () => { mkdirSync(join(projectRoot, "node_modules"), { recursive: true }); cleanups.push(projectRoot); - // Create a directory that doesn't have .gsd/worktrees in path but + // Create a directory that doesn't have .sf/worktrees in path but // has SF_WORKTREE env pointing to project root const someDir = createDir({ "package.json": JSON.stringify({ name: "test-project" }), diff --git a/src/resources/extensions/sf/tests/integration/doctor-false-positives.test.ts b/src/resources/extensions/sf/tests/integration/doctor-false-positives.test.ts index 77dfee7bd..1864e9152 100644 --- a/src/resources/extensions/sf/tests/integration/doctor-false-positives.test.ts +++ b/src/resources/extensions/sf/tests/integration/doctor-false-positives.test.ts @@ -11,7 +11,7 @@ import { parsePlan } from "../../parsers-legacy.js"; function makeBase(): { base: string; sf: string; mDir: string } { const base = mkdtempSync(join(tmpdir(), "sf-doctor-fp-")); - const sf = join(base, ".gsd"); + const sf = join(base, ".sf"); const mDir = join(sf, "milestones", "M001"); mkdirSync(join(mDir, "slices"), { recursive: true }); return { base, sf, mDir }; @@ -35,17 +35,17 @@ describe('doctor false-positives (#3105)', async () => { // Bug 1: Orphaned worktree directory recreated by appendDoctorHistory // ═══════════════════════════════════════════════════════════════════════════ - test('Bug 1: orphaned worktree check ignores dirs containing only .gsd/doctor-history.jsonl', async () => { - // Simulate: a worktree dir that only contains .gsd/doctor-history.jsonl + test('Bug 1: orphaned worktree check ignores dirs containing only .sf/doctor-history.jsonl', async () => { + // Simulate: a worktree dir that only contains .sf/doctor-history.jsonl // (created by appendDoctorHistory writing to the worktree-scoped path). // The orphan check should NOT warn about this directory. const { base, sf } = makeBase(); writeRoadmap(join(sf, "milestones", "M001"), `# M001: Test\n\n## Slices\n- [ ] **S01: Slice** \`risk:low\` \`depends:[]\`\n > After this: done\n`); writeSlice(join(sf, "milestones", "M001"), "S01", "# S01: Slice\n\n**Goal:** G\n**Demo:** D\n\n## Tasks\n- [ ] **T01: Task** `est:10m`\n Pending.\n"); - // Create a worktree directory that only has .gsd/doctor-history.jsonl + // Create a worktree directory that only has .sf/doctor-history.jsonl const wtDir = join(sf, "worktrees", "M042"); - const wtGsdDir = join(wtDir, ".gsd"); + const wtGsdDir = join(wtDir, ".sf"); mkdirSync(wtGsdDir, { recursive: true }); writeFileSync(join(wtGsdDir, "doctor-history.jsonl"), '{"ts":"2026-01-01","ok":true}\n'); @@ -56,7 +56,7 @@ describe('doctor false-positives (#3105)', async () => { i => i.code === "worktree_directory_orphaned" && i.unitId === "M042" ); assert.equal(orphanIssues.length, 0, - "should not warn about worktree dir that only contains .gsd/doctor-history.jsonl"); + "should not warn about worktree dir that only contains .sf/doctor-history.jsonl"); rmSync(base, { recursive: true, force: true }); }); diff --git a/src/resources/extensions/sf/tests/integration/doctor-fixlevel.test.ts b/src/resources/extensions/sf/tests/integration/doctor-fixlevel.test.ts index 33e99760d..26f669955 100644 --- a/src/resources/extensions/sf/tests/integration/doctor-fixlevel.test.ts +++ b/src/resources/extensions/sf/tests/integration/doctor-fixlevel.test.ts @@ -24,13 +24,13 @@ function makeTmp(name: string): string { } /** - * Build a minimal .gsd structure: milestone with one slice, one task + * Build a minimal .sf structure: milestone with one slice, one task * marked done with a summary — but no slice summary and roadmap unchecked. * Previously this triggered reconciliation; now it should produce no * reconciliation issue codes. */ function buildScaffold(base: string) { - const sf = join(base, ".gsd"); + const sf = join(base, ".sf"); const m = join(sf, "milestones", "M001"); const s = join(m, "slices", "S01", "tasks"); mkdirSync(s, { recursive: true }); @@ -105,11 +105,11 @@ test("fixLevel:all — no reconciliation issue codes are reported", async (t) => } // Summary and UAT stubs should NOT be created (no reconciliation) - const sliceSummaryPath = join(tmp, ".gsd", "milestones", "M001", "slices", "S01", "S01-SUMMARY.md"); + const sliceSummaryPath = join(tmp, ".sf", "milestones", "M001", "slices", "S01", "S01-SUMMARY.md"); assert.ok(!existsSync(sliceSummaryPath), "should NOT have created summary stub"); // Roadmap should remain unchecked (no reconciliation) - const roadmapContent = readFileSync(join(tmp, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), "utf8"); + const roadmapContent = readFileSync(join(tmp, ".sf", "milestones", "M001", "M001-ROADMAP.md"), "utf8"); assert.ok(roadmapContent.includes("- [ ] **S01"), "roadmap should remain unchecked"); }); @@ -123,7 +123,7 @@ test("legacy roadmap fallback: future slices are treated as pending, active slic // Force the legacy parser branch. try { closeDatabase(); } catch { /* noop */ } - const sf = join(tmp, ".gsd"); + const sf = join(tmp, ".sf"); const m = join(sf, "milestones", "M001"); const s01 = join(m, "slices", "S01", "tasks"); mkdirSync(s01, { recursive: true }); @@ -184,7 +184,7 @@ test("db skipped slices do not report missing directories", async (t) => { rmSync(tmp, { recursive: true, force: true }); }); - const sf = join(tmp, ".gsd"); + const sf = join(tmp, ".sf"); const m = join(sf, "milestones", "M001"); mkdirSync(m, { recursive: true }); @@ -232,7 +232,7 @@ test("fixLevel:all — delimiter_in_title still fixable", async (t) => { const tmp = makeTmp("delimiter-fix"); t.after(() => rmSync(tmp, { recursive: true, force: true })); - const sf = join(tmp, ".gsd"); + const sf = join(tmp, ".sf"); const m = join(sf, "milestones", "M001"); const s = join(m, "slices", "S01", "tasks"); mkdirSync(s, { recursive: true }); diff --git a/src/resources/extensions/sf/tests/integration/doctor-git.test.ts b/src/resources/extensions/sf/tests/integration/doctor-git.test.ts index 39b01f2f9..0b04dbc2c 100644 --- a/src/resources/extensions/sf/tests/integration/doctor-git.test.ts +++ b/src/resources/extensions/sf/tests/integration/doctor-git.test.ts @@ -33,8 +33,8 @@ function createRepoWithCompletedMilestone(): string { run("git commit -m init", dir); run("git branch -M main", dir); - // Create .gsd structure with milestone M001 — all slices done → complete - const msDir = join(dir, ".gsd", "milestones", "M001"); + // Create .sf structure with milestone M001 — all slices done → complete + const msDir = join(dir, ".sf", "milestones", "M001"); mkdirSync(msDir, { recursive: true }); writeFileSync(join(msDir, "ROADMAP.md"), `--- id: M001 @@ -57,16 +57,16 @@ Test _None_ `); - // Commit .gsd files + // Commit .sf files run("git add -A", dir); run("git commit -m \"add milestone\"", dir); return dir; } -/** Write a .gsd/PREFERENCES.md with the given git isolation mode. */ +/** Write a .sf/PREFERENCES.md with the given git isolation mode. */ function writePreferencesFile(dir: string, isolation: "none" | "worktree" | "branch"): void { - const sfDir = join(dir, ".gsd"); + const sfDir = join(dir, ".sf"); mkdirSync(sfDir, { recursive: true }); writeFileSync(join(sfDir, "PREFERENCES.md"), `---\ngit:\n isolation: "${isolation}"\n---\n`); } @@ -83,7 +83,7 @@ function createRepoWithActiveMilestone(): string { run("git commit -m init", dir); run("git branch -M main", dir); - const msDir = join(dir, ".gsd", "milestones", "M001"); + const msDir = join(dir, ".sf", "milestones", "M001"); mkdirSync(msDir, { recursive: true }); writeFileSync(join(msDir, "ROADMAP.md"), `--- id: M001 @@ -126,9 +126,9 @@ describe('doctor-git', async () => { const dir = createRepoWithCompletedMilestone(); cleanups.push(dir); - // Create worktree with milestone/M001 branch under .gsd/worktrees/ - mkdirSync(join(dir, ".gsd", "worktrees"), { recursive: true }); - run("git worktree add -b milestone/M001 .gsd/worktrees/M001", dir); + // Create worktree with milestone/M001 branch under .sf/worktrees/ + mkdirSync(join(dir, ".sf", "worktrees"), { recursive: true }); + run("git worktree add -b milestone/M001 .sf/worktrees/M001", dir); const detect = await runSFDoctor(dir, { isolationMode: "worktree" }); const orphanIssues = detect.issues.filter(i => i.code === "orphaned_auto_worktree"); @@ -154,11 +154,11 @@ describe('doctor-git', async () => { const dir = createRepoWithCompletedMilestone(); cleanups.push(dir); - // Create worktree with milestone/M001 branch under .gsd/worktrees/ - mkdirSync(join(dir, ".gsd", "worktrees"), { recursive: true }); - run("git worktree add -b milestone/M001 .gsd/worktrees/M001", dir); + // Create worktree with milestone/M001 branch under .sf/worktrees/ + mkdirSync(join(dir, ".sf", "worktrees"), { recursive: true }); + run("git worktree add -b milestone/M001 .sf/worktrees/M001", dir); - const wtPath = realpathSync(join(dir, ".gsd", "worktrees", "M001")); + const wtPath = realpathSync(join(dir, ".sf", "worktrees", "M001")); // Simulate the deadlock: set cwd inside the orphaned worktree const previousCwd = process.cwd(); @@ -247,10 +247,10 @@ describe('doctor-git', async () => { cleanups.push(dir); // Force-add a runtime file - const activityDir = join(dir, ".gsd", "activity"); + const activityDir = join(dir, ".sf", "activity"); mkdirSync(activityDir, { recursive: true }); writeFileSync(join(activityDir, "test.log"), "log data\n"); - run("git add -f .gsd/activity/test.log", dir); + run("git add -f .sf/activity/test.log", dir); run("git commit -m \"track runtime file\"", dir); const detect = await runSFDoctor(dir); @@ -261,7 +261,7 @@ describe('doctor-git', async () => { assert.ok(fixed.fixesApplied.some(f => f.includes("untracked")), "fix untracks runtime files"); // Verify file is no longer tracked - const tracked = run("git ls-files .gsd/activity/", dir); + const tracked = run("git ls-files .sf/activity/", dir); assert.deepStrictEqual(tracked, "", "runtime file untracked after fix"); }); @@ -270,8 +270,8 @@ describe('doctor-git', async () => { const dir = realpathSync(mkdtempSync(join(tmpdir(), "doc-git-test-"))); cleanups.push(dir); - // Create minimal .gsd structure (no git) - mkdirSync(join(dir, ".gsd"), { recursive: true }); + // Create minimal .sf structure (no git) + mkdirSync(join(dir, ".sf"), { recursive: true }); const result = await runSFDoctor(dir); const gitIssues = result.issues.filter(i => @@ -288,9 +288,9 @@ describe('doctor-git', async () => { const dir = createRepoWithActiveMilestone(); cleanups.push(dir); - // Create worktree for in-progress milestone under .gsd/worktrees/ - mkdirSync(join(dir, ".gsd", "worktrees"), { recursive: true }); - run("git worktree add -b milestone/M001 .gsd/worktrees/M001", dir); + // Create worktree for in-progress milestone under .sf/worktrees/ + mkdirSync(join(dir, ".sf", "worktrees"), { recursive: true }); + run("git worktree add -b milestone/M001 .sf/worktrees/M001", dir); const detect = await runSFDoctor(dir, { isolationMode: "worktree" }); const orphanIssues = detect.issues.filter(i => i.code === "orphaned_auto_worktree"); @@ -302,15 +302,15 @@ describe('doctor-git', async () => { // ─── Test 7: none-mode skips orphaned worktree check ─────────────── // NOTE: loadEffectiveSFPreferences() resolves PROJECT_PREFERENCES_PATH // at module load time from process.cwd(). We write the prefs file to - // the test runner's cwd .gsd/PREFERENCES.md and clean up afterwards. + // the test runner's cwd .sf/PREFERENCES.md and clean up afterwards. if (process.platform !== "win32") { test('none-mode skips orphaned worktree', async () => { const dir = createRepoWithCompletedMilestone(); cleanups.push(dir); - // Create worktree with milestone/M001 branch under .gsd/worktrees/ - mkdirSync(join(dir, ".gsd", "worktrees"), { recursive: true }); - run("git worktree add -b milestone/M001 .gsd/worktrees/M001", dir); + // Create worktree with milestone/M001 branch under .sf/worktrees/ + mkdirSync(join(dir, ".sf", "worktrees"), { recursive: true }); + run("git worktree add -b milestone/M001 .sf/worktrees/M001", dir); const result = await runSFDoctor(dir, { isolationMode: "none" }); const orphanIssues = result.issues.filter(i => i.code === "orphaned_auto_worktree"); @@ -342,7 +342,7 @@ describe('doctor-git', async () => { cleanups.push(dir); // Write integration branch metadata for M001 pointing to a non-existent branch - const metaPath = join(dir, ".gsd", "milestones", "M001", "M001-META.json"); + const metaPath = join(dir, ".sf", "milestones", "M001", "M001-META.json"); writeFileSync(metaPath, JSON.stringify({ integrationBranch: "feat/does-not-exist" }, null, 2)); const detect = await runSFDoctor(dir); @@ -365,7 +365,7 @@ describe('doctor-git', async () => { cleanups.push(dir); // Write integration branch metadata for M001 pointing to "main" (which exists) - const metaPath = join(dir, ".gsd", "milestones", "M001", "M001-META.json"); + const metaPath = join(dir, ".sf", "milestones", "M001", "M001-META.json"); writeFileSync(metaPath, JSON.stringify({ integrationBranch: "main" }, null, 2)); const detect = await runSFDoctor(dir); @@ -380,7 +380,7 @@ describe('doctor-git', async () => { const dir = createRepoWithActiveMilestone(); cleanups.push(dir); - const metaPath = join(dir, ".gsd", "milestones", "M001", "M001-META.json"); + const metaPath = join(dir, ".sf", "milestones", "M001", "M001-META.json"); writeFileSync(metaPath, JSON.stringify({ integrationBranch: "feat/does-not-exist" }, null, 2)); const detect = await runSFDoctor(dir); @@ -409,9 +409,9 @@ describe('doctor-git', async () => { cleanups.push(dir); run("git branch trunk", dir); - writeFileSync(join(dir, ".gsd", "PREFERENCES.md"), `---\ngit:\n isolation: "worktree"\n main_branch: "trunk"\n---\n`); + writeFileSync(join(dir, ".sf", "PREFERENCES.md"), `---\ngit:\n isolation: "worktree"\n main_branch: "trunk"\n---\n`); - const metaPath = join(dir, ".gsd", "milestones", "M001", "M001-META.json"); + const metaPath = join(dir, ".sf", "milestones", "M001", "M001-META.json"); writeFileSync(metaPath, JSON.stringify({ integrationBranch: "feat/does-not-exist" }, null, 2)); const previousCwd = process.cwd(); @@ -447,7 +447,7 @@ describe('doctor-git', async () => { cleanups.push(dir); // Create a worktrees/ dir with an entry that is NOT in git worktree list - const orphanDir = join(dir, ".gsd", "worktrees", "orphan-feature"); + const orphanDir = join(dir, ".sf", "worktrees", "orphan-feature"); mkdirSync(orphanDir, { recursive: true }); writeFileSync(join(orphanDir, "some-file.txt"), "leftover content\n"); @@ -476,9 +476,9 @@ describe('doctor-git', async () => { const dir = createRepoWithActiveMilestone(); cleanups.push(dir); - // Create a real registered worktree under .gsd/worktrees/ - mkdirSync(join(dir, ".gsd", "worktrees"), { recursive: true }); - run("git worktree add -b worktree/feature-1 .gsd/worktrees/feature-1", dir); + // Create a real registered worktree under .sf/worktrees/ + mkdirSync(join(dir, ".sf", "worktrees"), { recursive: true }); + run("git worktree add -b worktree/feature-1 .sf/worktrees/feature-1", dir); const detect = await runSFDoctor(dir); const orphanDirIssues = detect.issues.filter(i => i.code === "worktree_directory_orphaned"); @@ -507,10 +507,10 @@ describe('doctor-git', async () => { cleanups.push(dir); // Force-add a runtime file - const activityDir = join(dir, ".gsd", "activity"); + const activityDir = join(dir, ".sf", "activity"); mkdirSync(activityDir, { recursive: true }); writeFileSync(join(activityDir, "test.log"), "log data\n"); - run("git add -f .gsd/activity/test.log", dir); + run("git add -f .sf/activity/test.log", dir); run("git commit -m \"track runtime file\"", dir); const result = await runSFDoctor(dir, { isolationMode: "none" }); @@ -518,26 +518,26 @@ describe('doctor-git', async () => { assert.ok(trackedIssues.length > 0, "none-mode: tracked runtime files IS detected"); }); - // ─── Test: Symlinked .gsd does not cause false orphan detection ──── + // ─── Test: Symlinked .sf does not cause false orphan detection ──── if (process.platform !== "win32") { - test('worktree_directory_orphaned (symlinked .gsd not false-positive)', async () => { + test('worktree_directory_orphaned (symlinked .sf not false-positive)', async () => { const dir = createRepoWithActiveMilestone(); cleanups.push(dir); - // Move .gsd to an external location and replace with a symlink. - // This simulates the ~/.gsd/projects/<hash> layout where .gsd is a symlink. + // Move .sf to an external location and replace with a symlink. + // This simulates the ~/.sf/projects/<hash> layout where .sf is a symlink. const externalGsd = join(realpathSync(mkdtempSync(join(tmpdir(), "doc-git-symlink-"))), "sf-data"); cleanups.push(externalGsd); - renameSync(join(dir, ".gsd"), externalGsd); - symlinkSync(externalGsd, join(dir, ".gsd")); + renameSync(join(dir, ".sf"), externalGsd); + symlinkSync(externalGsd, join(dir, ".sf")); - // Create a real registered worktree under the (now symlinked) .gsd/worktrees/ - mkdirSync(join(dir, ".gsd", "worktrees"), { recursive: true }); - run("git worktree add -b worktree/symlink-test .gsd/worktrees/symlink-test", dir); + // Create a real registered worktree under the (now symlinked) .sf/worktrees/ + mkdirSync(join(dir, ".sf", "worktrees"), { recursive: true }); + run("git worktree add -b worktree/symlink-test .sf/worktrees/symlink-test", dir); const detect = await runSFDoctor(dir); const orphanDirIssues = detect.issues.filter(i => i.code === "worktree_directory_orphaned"); - assert.deepStrictEqual(orphanDirIssues.length, 0, "registered worktree via symlinked .gsd NOT flagged as orphaned"); + assert.deepStrictEqual(orphanDirIssues.length, 0, "registered worktree via symlinked .sf NOT flagged as orphaned"); }); } else { } @@ -549,9 +549,9 @@ describe('doctor-git', async () => { cleanups.push(dir); // Create a worktree, make a commit, then merge the branch into main - mkdirSync(join(dir, ".gsd", "worktrees"), { recursive: true }); - run("git worktree add -b worktree/merged-feature .gsd/worktrees/merged-feature", dir); - const wtPath = join(dir, ".gsd", "worktrees", "merged-feature"); + mkdirSync(join(dir, ".sf", "worktrees"), { recursive: true }); + run("git worktree add -b worktree/merged-feature .sf/worktrees/merged-feature", dir); + const wtPath = join(dir, ".sf", "worktrees", "merged-feature"); writeFileSync(join(wtPath, "feature.txt"), "feature\n"); run("git add -A", wtPath); run("git -c user.email=test@test.com -c user.name=Test commit -m \"feature work\"", wtPath); @@ -579,9 +579,9 @@ describe('doctor-git', async () => { const dir = createRepoWithActiveMilestone(); cleanups.push(dir); - mkdirSync(join(dir, ".gsd", "worktrees"), { recursive: true }); - run("git worktree add -b milestone/M001 .gsd/worktrees/M001", dir); - const wtPath = join(dir, ".gsd", "worktrees", "M001"); + mkdirSync(join(dir, ".sf", "worktrees"), { recursive: true }); + run("git worktree add -b milestone/M001 .sf/worktrees/M001", dir); + const wtPath = join(dir, ".sf", "worktrees", "M001"); writeFileSync(join(wtPath, "feature.txt"), "feature\n"); run("git add -A", wtPath); run("git -c user.email=test@test.com -c user.name=Test commit -m \"feature work\"", wtPath); @@ -603,9 +603,9 @@ describe('doctor-git', async () => { const dir = createRepoWithActiveMilestone(); cleanups.push(dir); - mkdirSync(join(dir, ".gsd", "worktrees"), { recursive: true }); - run("git worktree add -b worktree/active-feature .gsd/worktrees/active-feature", dir); - const wtPath = join(dir, ".gsd", "worktrees", "active-feature"); + mkdirSync(join(dir, ".sf", "worktrees"), { recursive: true }); + run("git worktree add -b worktree/active-feature .sf/worktrees/active-feature", dir); + const wtPath = join(dir, ".sf", "worktrees", "active-feature"); writeFileSync(join(wtPath, "wip.txt"), "work in progress\n"); run("git add -A", wtPath); run("git -c user.email=test@test.com -c user.name=Test commit -m \"wip\"", wtPath); diff --git a/src/resources/extensions/sf/tests/integration/doctor-proactive.test.ts b/src/resources/extensions/sf/tests/integration/doctor-proactive.test.ts index af04680ca..32db89fad 100644 --- a/src/resources/extensions/sf/tests/integration/doctor-proactive.test.ts +++ b/src/resources/extensions/sf/tests/integration/doctor-proactive.test.ts @@ -37,13 +37,13 @@ function createGitRepo(): string { run("git add .", dir); run("git commit -m init", dir); run("git branch -M main", dir); - mkdirSync(join(dir, ".gsd"), { recursive: true }); + mkdirSync(join(dir, ".sf"), { recursive: true }); return dir; } function createRepoWithActiveMilestone(): string { const dir = createGitRepo(); - const msDir = join(dir, ".gsd", "milestones", "M001"); + const msDir = join(dir, ".sf", "milestones", "M001"); mkdirSync(msDir, { recursive: true }); writeFileSync(join(msDir, "ROADMAP.md"), `--- id: M001 @@ -199,7 +199,7 @@ describe('doctor-proactive', async () => { test('health gate: clean state', async () => { const dir = realpathSync(mkdtempSync(join(tmpdir(), "doc-proactive-"))); cleanups.push(dir); - mkdirSync(join(dir, ".gsd"), { recursive: true }); + mkdirSync(join(dir, ".sf"), { recursive: true }); const result = await preDispatchHealthGate(dir); assert.ok(result.proceed, "gate passes on clean state"); @@ -210,8 +210,8 @@ describe('doctor-proactive', async () => { const dir = realpathSync(mkdtempSync(join(tmpdir(), "doc-proactive-"))); cleanups.push(dir); // Create milestones dir but no STATE.md — mimics fresh worktree - mkdirSync(join(dir, ".gsd", "milestones", "M001"), { recursive: true }); - writeFileSync(join(dir, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), "# Roadmap\n"); + mkdirSync(join(dir, ".sf", "milestones", "M001"), { recursive: true }); + writeFileSync(join(dir, ".sf", "milestones", "M001", "M001-ROADMAP.md"), "# Roadmap\n"); const result = await preDispatchHealthGate(dir); assert.ok(result.proceed, "gate must NOT block when STATE.md is missing (deadlock #889)"); @@ -222,10 +222,10 @@ describe('doctor-proactive', async () => { test('health gate: stale crash lock auto-cleared', async () => { const dir = realpathSync(mkdtempSync(join(tmpdir(), "doc-proactive-"))); cleanups.push(dir); - mkdirSync(join(dir, ".gsd"), { recursive: true }); + mkdirSync(join(dir, ".sf"), { recursive: true }); // Write a stale lock - writeFileSync(join(dir, ".gsd", "auto.lock"), JSON.stringify({ + writeFileSync(join(dir, ".sf", "auto.lock"), JSON.stringify({ pid: 9999999, startedAt: "2026-03-10T00:00:00Z", unitType: "execute-task", unitId: "M001/S01/T01", unitStartedAt: "2026-03-10T00:01:00Z", completedUnits: 3, @@ -234,7 +234,7 @@ describe('doctor-proactive', async () => { const result = await preDispatchHealthGate(dir); assert.ok(result.proceed, "gate passes after auto-clearing stale lock"); assert.ok(result.fixesApplied.some(f => f.includes("cleared stale auto.lock")), "reports lock cleared"); - assert.ok(!existsSync(join(dir, ".gsd", "auto.lock")), "lock file removed"); + assert.ok(!existsSync(join(dir, ".sf", "auto.lock")), "lock file removed"); }); test('health gate: corrupt merge state auto-healed', async () => { @@ -260,10 +260,10 @@ describe('doctor-proactive', async () => { test('health gate: STATE.md missing — auto-healed', async () => { const dir = realpathSync(mkdtempSync(join(tmpdir(), "doc-proactive-"))); cleanups.push(dir); - // Minimal .gsd structure: milestones dir exists but no STATE.md - mkdirSync(join(dir, ".gsd", "milestones"), { recursive: true }); + // Minimal .sf structure: milestones dir exists but no STATE.md + mkdirSync(join(dir, ".sf", "milestones"), { recursive: true }); - const stateFile = join(dir, ".gsd", "STATE.md"); + const stateFile = join(dir, ".sf", "STATE.md"); assert.ok(!existsSync(stateFile), "STATE.md does not exist before gate"); const result = await preDispatchHealthGate(dir); @@ -280,7 +280,7 @@ describe('doctor-proactive', async () => { const dir = createRepoWithActiveMilestone(); cleanups.push(dir); - const metaPath = join(dir, ".gsd", "milestones", "M001", "M001-META.json"); + const metaPath = join(dir, ".sf", "milestones", "M001", "M001-META.json"); writeFileSync(metaPath, JSON.stringify({ integrationBranch: "feature/missing" }, null, 2)); const result = await preDispatchHealthGate(dir); @@ -297,8 +297,8 @@ describe('doctor-proactive', async () => { cleanups.push(dir); run("git branch trunk", dir); - writeFileSync(join(dir, ".gsd", "PREFERENCES.md"), `---\ngit:\n main_branch: "trunk"\n---\n`); - const metaPath = join(dir, ".gsd", "milestones", "M001", "M001-META.json"); + writeFileSync(join(dir, ".sf", "PREFERENCES.md"), `---\ngit:\n main_branch: "trunk"\n---\n`); + const metaPath = join(dir, ".sf", "milestones", "M001", "M001-META.json"); writeFileSync(metaPath, JSON.stringify({ integrationBranch: "feature/missing" }, null, 2)); const previousCwd = process.cwd(); diff --git a/src/resources/extensions/sf/tests/integration/doctor-roadmap-summary-atomicity.test.ts b/src/resources/extensions/sf/tests/integration/doctor-roadmap-summary-atomicity.test.ts index 779d5ed7e..f4e725482 100644 --- a/src/resources/extensions/sf/tests/integration/doctor-roadmap-summary-atomicity.test.ts +++ b/src/resources/extensions/sf/tests/integration/doctor-roadmap-summary-atomicity.test.ts @@ -21,7 +21,7 @@ function makeTmp(name: string): string { } function buildScaffold(base: string) { - const sf = join(base, ".gsd"); + const sf = join(base, ".sf"); const m = join(sf, "milestones", "M001"); const s = join(m, "slices", "S01", "tasks"); mkdirSync(s, { recursive: true }); @@ -67,14 +67,14 @@ test("fixLevel:task — roadmap checkbox is never toggled by doctor (reconciliat const report = await runSFDoctor(tmp, { fix: true, fixLevel: "task" }); // Roadmap must remain unchecked — doctor no longer touches checkboxes - const roadmapContent = readFileSync(join(tmp, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), "utf8"); + const roadmapContent = readFileSync(join(tmp, ".sf", "milestones", "M001", "M001-ROADMAP.md"), "utf8"); assert.ok( roadmapContent.includes("- [ ] **S01"), "roadmap should remain unchecked — doctor no longer toggles checkboxes" ); // No summary or UAT stubs created - const sliceSummaryPath = join(tmp, ".gsd", "milestones", "M001", "slices", "S01", "S01-SUMMARY.md"); + const sliceSummaryPath = join(tmp, ".sf", "milestones", "M001", "slices", "S01", "S01-SUMMARY.md"); assert.ok(!existsSync(sliceSummaryPath), "summary should NOT be created"); }); @@ -87,13 +87,13 @@ test("fixLevel:all — roadmap checkbox is never toggled by doctor (reconciliati const report = await runSFDoctor(tmp, { fix: true }); // Even at fixLevel:all, doctor no longer creates stubs or toggles checkboxes - const roadmapContent = readFileSync(join(tmp, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), "utf8"); + const roadmapContent = readFileSync(join(tmp, ".sf", "milestones", "M001", "M001-ROADMAP.md"), "utf8"); assert.ok( roadmapContent.includes("- [ ] **S01"), "roadmap should remain unchecked — reconciliation removed" ); - const sliceSummaryPath = join(tmp, ".gsd", "milestones", "M001", "slices", "S01", "S01-SUMMARY.md"); + const sliceSummaryPath = join(tmp, ".sf", "milestones", "M001", "slices", "S01", "S01-SUMMARY.md"); assert.ok(!existsSync(sliceSummaryPath), "summary should NOT be created"); }); diff --git a/src/resources/extensions/sf/tests/integration/doctor-runtime.test.ts b/src/resources/extensions/sf/tests/integration/doctor-runtime.test.ts index 142a7451d..d62544f81 100644 --- a/src/resources/extensions/sf/tests/integration/doctor-runtime.test.ts +++ b/src/resources/extensions/sf/tests/integration/doctor-runtime.test.ts @@ -19,10 +19,10 @@ function run(cmd: string, cwd: string): string { return execSync(cmd, { cwd, stdio: ["ignore", "pipe", "pipe"], encoding: "utf-8" }).trim(); } -/** Create a minimal .gsd project with a milestone for STATE.md tests. */ +/** Create a minimal .sf project with a milestone for STATE.md tests. */ function createMinimalProject(): string { const dir = realpathSync(mkdtempSync(join(tmpdir(), "doc-runtime-test-"))); - const msDir = join(dir, ".gsd", "milestones", "M001"); + const msDir = join(dir, ".sf", "milestones", "M001"); mkdirSync(msDir, { recursive: true }); writeFileSync(join(msDir, "M001-ROADMAP.md"), `# M001: Test @@ -42,7 +42,7 @@ function createMinimalProject(): string { return dir; } -/** Create a minimal git repo with .gsd for gitignore tests. */ +/** Create a minimal git repo with .sf for gitignore tests. */ function createGitProject(): string { const dir = realpathSync(mkdtempSync(join(tmpdir(), "doc-runtime-git-"))); run("git init", dir); @@ -73,7 +73,7 @@ describe('doctor-runtime', async () => { unitStartedAt: "2026-03-10T00:01:00Z", completedUnits: 3, }; - writeFileSync(join(dir, ".gsd", "auto.lock"), JSON.stringify(lockData, null, 2)); + writeFileSync(join(dir, ".sf", "auto.lock"), JSON.stringify(lockData, null, 2)); const detect = await runSFDoctor(dir); const lockIssues = detect.issues.filter(i => i.code === "stale_crash_lock"); @@ -83,7 +83,7 @@ describe('doctor-runtime', async () => { const fixed = await runSFDoctor(dir, { fix: true }); assert.ok(fixed.fixesApplied.some(f => f.includes("cleared stale auto.lock")), "fix clears stale lock"); - assert.ok(!existsSync(join(dir, ".gsd", "auto.lock")), "auto.lock removed after fix"); + assert.ok(!existsSync(join(dir, ".sf", "auto.lock")), "auto.lock removed after fix"); }); // ─── Test 2: No false positive for missing lock ─────────────────── @@ -109,7 +109,7 @@ describe('doctor-runtime', async () => { }, savedAt: "2026-03-10T00:00:00Z", }; - writeFileSync(join(dir, ".gsd", "hook-state.json"), JSON.stringify(hookState, null, 2)); + writeFileSync(join(dir, ".sf", "hook-state.json"), JSON.stringify(hookState, null, 2)); const detect = await runSFDoctor(dir); const hookIssues = detect.issues.filter(i => i.code === "stale_hook_state"); @@ -120,7 +120,7 @@ describe('doctor-runtime', async () => { assert.ok(fixed.fixesApplied.some(f => f.includes("cleared stale hook-state.json")), "fix clears hook state"); // Verify the file was cleaned - const content = JSON.parse(readFileSync(join(dir, ".gsd", "hook-state.json"), "utf-8")); + const content = JSON.parse(readFileSync(join(dir, ".sf", "hook-state.json"), "utf-8")); assert.deepStrictEqual(Object.keys(content.cycleCounts).length, 0, "hook state cycle counts cleared"); }); @@ -130,7 +130,7 @@ describe('doctor-runtime', async () => { cleanups.push(dir); // Create an activity dir with > 500 files - const activityDir = join(dir, ".gsd", "activity"); + const activityDir = join(dir, ".sf", "activity"); mkdirSync(activityDir, { recursive: true }); for (let i = 0; i < 510; i++) { writeFileSync(join(activityDir, `${String(i).padStart(3, "0")}-execute-task-M001-S01-T01.jsonl`), `{"test":${i}}\n`); @@ -148,7 +148,7 @@ describe('doctor-runtime', async () => { cleanups.push(dir); // No STATE.md exists by default in our minimal setup - const stateFilePath = join(dir, ".gsd", "STATE.md"); + const stateFilePath = join(dir, ".sf", "STATE.md"); assert.ok(!existsSync(stateFilePath), "STATE.md does not exist initially"); const detect = await runSFDoctor(dir); @@ -173,7 +173,7 @@ describe('doctor-runtime', async () => { cleanups.push(dir); // Write a STATE.md with wrong phase/milestone info - const stateFilePath = join(dir, ".gsd", "STATE.md"); + const stateFilePath = join(dir, ".sf", "STATE.md"); writeFileSync(stateFilePath, `# SF State **Active Milestone:** None @@ -211,8 +211,8 @@ None const dir = createGitProject(); cleanups.push(dir); - // Create .gsd dir so checks can run - mkdirSync(join(dir, ".gsd"), { recursive: true }); + // Create .sf dir so checks can run + mkdirSync(join(dir, ".sf"), { recursive: true }); // Write a .gitignore missing SF runtime patterns writeFileSync(join(dir, ".gitignore"), `node_modules/ @@ -222,32 +222,32 @@ None const detect = await runSFDoctor(dir); const gitignoreIssues = detect.issues.filter(i => i.code === "gitignore_missing_patterns"); assert.ok(gitignoreIssues.length > 0, "detects missing gitignore patterns"); - assert.ok(gitignoreIssues[0]?.message.includes(".gsd"), "message lists missing .gsd pattern"); + assert.ok(gitignoreIssues[0]?.message.includes(".sf"), "message lists missing .sf pattern"); const fixed = await runSFDoctor(dir, { fix: true }); assert.ok(fixed.fixesApplied.some(f => f.includes("added missing SF runtime patterns")), "fix adds patterns"); - // Verify .gsd entry was added (external state symlink) + // Verify .sf entry was added (external state symlink) const content = readFileSync(join(dir, ".gitignore"), "utf-8"); - assert.ok(content.includes(".gsd"), "gitignore now has .gsd entry"); + assert.ok(content.includes(".sf"), "gitignore now has .sf entry"); }); } else { } - // ─── Test 8: No false positive when gitignore has blanket .gsd/ ─── + // ─── Test 8: No false positive when gitignore has blanket .sf/ ─── if (process.platform !== "win32") { - test('gitignore — blanket .gsd/', async () => { + test('gitignore — blanket .sf/', async () => { const dir = createGitProject(); cleanups.push(dir); - mkdirSync(join(dir, ".gsd"), { recursive: true }); - writeFileSync(join(dir, ".gitignore"), `.gsd/ + mkdirSync(join(dir, ".sf"), { recursive: true }); + writeFileSync(join(dir, ".gitignore"), `.sf/ node_modules/ `); const detect = await runSFDoctor(dir); const gitignoreIssues = detect.issues.filter(i => i.code === "gitignore_missing_patterns"); - assert.deepStrictEqual(gitignoreIssues.length, 0, "no missing patterns when blanket .gsd/ present"); + assert.deepStrictEqual(gitignoreIssues.length, 0, "no missing patterns when blanket .sf/ present"); }); } else { } @@ -262,7 +262,7 @@ node_modules/ "execute-task/M001/S01/T99", // T99 doesn't exist "complete-slice/M001/S99", // S99 doesn't exist ]; - writeFileSync(join(dir, ".gsd", "completed-units.json"), JSON.stringify(completedKeys)); + writeFileSync(join(dir, ".sf", "completed-units.json"), JSON.stringify(completedKeys)); const detect = await runSFDoctor(dir); const orphanIssues = detect.issues.filter(i => i.code === "orphaned_completed_units"); @@ -273,13 +273,13 @@ node_modules/ assert.ok(fixed.fixesApplied.some(f => f.includes("removed") && f.includes("orphaned")), "fix removes orphaned keys"); // Verify keys were cleaned - const content = JSON.parse(readFileSync(join(dir, ".gsd", "completed-units.json"), "utf-8")); + const content = JSON.parse(readFileSync(join(dir, ".sf", "completed-units.json"), "utf-8")); assert.deepStrictEqual(content.length, 0, "all orphaned keys removed"); }); // ─── Test: Stranded lock directory detection & fix ──────────────── // Skip on Windows: proper-lockfile uses advisory file locking on Windows, - // not the directory-based mechanism. The .gsd.lock/ directory pattern is + // not the directory-based mechanism. The .sf.lock/ directory pattern is // a POSIX-specific lockfile implementation detail. if (process.platform !== "win32") { test('stranded_lock_directory', async () => { @@ -287,8 +287,8 @@ node_modules/ cleanups.push(dir); // Create the proper-lockfile lock directory without a live lock holder. - // The lock dir sits at <parent of .gsd>/.gsd.lock (i.e., <basePath>/.gsd.lock). - const lockDir = join(dir, ".gsd.lock"); + // The lock dir sits at <parent of .sf>/.sf.lock (i.e., <basePath>/.sf.lock). + const lockDir = join(dir, ".sf.lock"); mkdirSync(lockDir, { recursive: true }); const detect = await runSFDoctor(dir); @@ -311,7 +311,7 @@ node_modules/ cleanups.push(dir); // Create lock dir + auto.lock with PID 1 (init/launchd — always alive, never our own PID) - const lockDir = join(dir, ".gsd.lock"); + const lockDir = join(dir, ".sf.lock"); mkdirSync(lockDir, { recursive: true }); const liveLockData = { pid: 1, @@ -321,7 +321,7 @@ node_modules/ unitStartedAt: new Date().toISOString(), completedUnits: 1, }; - writeFileSync(join(dir, ".gsd", "auto.lock"), JSON.stringify(liveLockData, null, 2)); + writeFileSync(join(dir, ".sf", "auto.lock"), JSON.stringify(liveLockData, null, 2)); const detect = await runSFDoctor(dir); const strandedIssues = detect.issues.filter(i => i.code === "stranded_lock_directory"); @@ -344,7 +344,7 @@ node_modules/ "execute-task/M001/S01/T99", // artifact missing "complete-slice/M001/S99", // artifact missing ]; - writeFileSync(join(dir, ".gsd", "completed-units.json"), JSON.stringify(completedKeys)); + writeFileSync(join(dir, ".sf", "completed-units.json"), JSON.stringify(completedKeys)); // fixLevel="task" — the level used by auto-post-unit after every task const taskLevelFix = await runSFDoctor(dir, { fix: true, fixLevel: "task" }); @@ -352,7 +352,7 @@ node_modules/ assert.ok(taskLevelOrphan.length > 0, "orphaned_completed_units detected at task fixLevel"); // Verify keys were NOT removed — the fix must be suppressed at task level - const afterTaskFix = JSON.parse(readFileSync(join(dir, ".gsd", "completed-units.json"), "utf-8")); + const afterTaskFix = JSON.parse(readFileSync(join(dir, ".sf", "completed-units.json"), "utf-8")); assert.deepStrictEqual(afterTaskFix.length, 2, "completed-unit keys preserved at fixLevel=task (data loss prevention)"); assert.ok( !taskLevelFix.fixesApplied.some(f => f.includes("orphaned")), @@ -365,7 +365,7 @@ node_modules/ allLevelFix.fixesApplied.some(f => f.includes("orphaned")), "orphaned-units fix applied at fixLevel=all (manual doctor)", ); - const afterAllFix = JSON.parse(readFileSync(join(dir, ".gsd", "completed-units.json"), "utf-8")); + const afterAllFix = JSON.parse(readFileSync(join(dir, ".sf", "completed-units.json"), "utf-8")); assert.deepStrictEqual(afterAllFix.length, 0, "orphaned keys removed at fixLevel=all"); }); diff --git a/src/resources/extensions/sf/tests/integration/doctor.test.ts b/src/resources/extensions/sf/tests/integration/doctor.test.ts index 91f2f4841..056a1ff1b 100644 --- a/src/resources/extensions/sf/tests/integration/doctor.test.ts +++ b/src/resources/extensions/sf/tests/integration/doctor.test.ts @@ -6,7 +6,7 @@ import { tmpdir } from "node:os"; import { formatDoctorReport, runSFDoctor, summarizeDoctorIssues, filterDoctorIssues, selectDoctorScope, validateTitle } from "../../doctor.js"; const tmpBase = mkdtempSync(join(tmpdir(), "sf-doctor-test-")); -const sf = join(tmpBase, ".gsd"); +const sf = join(tmpBase, ".sf"); const mDir = join(sf, "milestones", "M001"); const sDir = join(mDir, "slices", "S01"); const tDir = join(sDir, "tasks"); @@ -96,7 +96,7 @@ describe('doctor', async () => { // ─── Milestone summary detection: missing summary ────────────────────── test('doctor detects missing milestone summary', async () => { const msBase = mkdtempSync(join(tmpdir(), "sf-doctor-ms-test-")); - const msGsd = join(msBase, ".gsd"); + const msGsd = join(msBase, ".sf"); const msMDir = join(msGsd, "milestones", "M001"); const msSDir = join(msMDir, "slices", "S01"); const msTDir = join(msSDir, "tasks"); @@ -163,7 +163,7 @@ parent: M001 // ─── Milestone summary detection: summary present (no false positive) ── test('doctor does NOT flag milestone with summary', async () => { const msBase = mkdtempSync(join(tmpdir(), "sf-doctor-ms-ok-test-")); - const msGsd = join(msBase, ".gsd"); + const msGsd = join(msBase, ".sf"); const msMDir = join(msGsd, "milestones", "M001"); const msSDir = join(msMDir, "slices", "S01"); const msTDir = join(msSDir, "tasks"); @@ -222,7 +222,7 @@ parent: M001 // ─── blocker_discovered_no_replan detection ──────────────────────────── test('doctor detects blocker_discovered_no_replan', async () => { const bBase = mkdtempSync(join(tmpdir(), "sf-doctor-blocker-test-")); - const bGsd = join(bBase, ".gsd"); + const bGsd = join(bBase, ".sf"); const bMDir = join(bGsd, "milestones", "M001"); const bSDir = join(bMDir, "slices", "S01"); const bTDir = join(bSDir, "tasks"); @@ -288,7 +288,7 @@ Discovered an issue. // ─── blocker_discovered with REPLAN.md (no false positive) ───────────── test('doctor does NOT flag blocker when REPLAN.md exists', async () => { const bBase = mkdtempSync(join(tmpdir(), "sf-doctor-blocker-ok-test-")); - const bGsd = join(bBase, ".gsd"); + const bGsd = join(bBase, ".sf"); const bMDir = join(bGsd, "milestones", "M001"); const bSDir = join(bMDir, "slices", "S01"); const bTDir = join(bSDir, "tasks"); @@ -344,7 +344,7 @@ Discovered an issue. // ─── Must-have verification: all addressed → no issue ───────────────── test('doctor: done task with must-haves all addressed → no issue', async () => { const mhBase = mkdtempSync(join(tmpdir(), "sf-doctor-mh-ok-")); - const mhGsd = join(mhBase, ".gsd"); + const mhGsd = join(mhBase, ".sf"); const mhMDir = join(mhGsd, "milestones", "M001"); const mhSDir = join(mhMDir, "slices", "S01"); const mhTDir = join(mhSDir, "tasks"); @@ -371,7 +371,7 @@ Discovered an issue. // ─── Must-have verification: not addressed → warning fired ─────────── test('doctor: done task with must-haves NOT addressed → warning', async () => { const mhBase = mkdtempSync(join(tmpdir(), "sf-doctor-mh-fail-")); - const mhGsd = join(mhBase, ".gsd"); + const mhGsd = join(mhBase, ".sf"); const mhMDir = join(mhGsd, "milestones", "M001"); const mhSDir = join(mhMDir, "slices", "S01"); const mhTDir = join(mhSDir, "tasks"); @@ -401,7 +401,7 @@ Discovered an issue. // ─── Must-have verification: no task plan → no issue ───────────────── test('doctor: done task with no task plan file → no issue', async () => { const mhBase = mkdtempSync(join(tmpdir(), "sf-doctor-mh-noplan-")); - const mhGsd = join(mhBase, ".gsd"); + const mhGsd = join(mhBase, ".sf"); const mhMDir = join(mhGsd, "milestones", "M001"); const mhSDir = join(mhMDir, "slices", "S01"); const mhTDir = join(mhSDir, "tasks"); @@ -425,7 +425,7 @@ Discovered an issue. // ─── Must-have verification: plan exists but no Must-Haves section → no issue test('doctor: done task with plan but no Must-Haves section → no issue', async () => { const mhBase = mkdtempSync(join(tmpdir(), "sf-doctor-mh-nosect-")); - const mhGsd = join(mhBase, ".gsd"); + const mhGsd = join(mhBase, ".sf"); const mhMDir = join(mhGsd, "milestones", "M001"); const mhSDir = join(mhMDir, "slices", "S01"); const mhTDir = join(mhSDir, "tasks"); @@ -484,7 +484,7 @@ Discovered an issue. // ─── doctor detects delimiter_in_title for milestone ─────────────────── test('doctor detects em dash in milestone title', async () => { const dtBase = mkdtempSync(join(tmpdir(), "sf-doctor-dt-test-")); - const dtGsd = join(dtBase, ".gsd"); + const dtGsd = join(dtBase, ".sf"); const dtMDir = join(dtGsd, "milestones", "M001"); const dtSDir = join(dtMDir, "slices", "S01"); const dtTDir = join(dtSDir, "tasks"); @@ -511,7 +511,7 @@ Discovered an issue. // ─── doctor detects delimiter_in_title for slice ──────────────────────── test('doctor detects em dash in slice title', async () => { const dtBase = mkdtempSync(join(tmpdir(), "sf-doctor-dt-slice-")); - const dtGsd = join(dtBase, ".gsd"); + const dtGsd = join(dtBase, ".sf"); const dtMDir = join(dtGsd, "milestones", "M001"); const dtSDir = join(dtMDir, "slices", "S01"); const dtTDir = join(dtSDir, "tasks"); @@ -536,7 +536,7 @@ Discovered an issue. // ─── doctor does NOT flag clean titles ────────────────────────────────── test('doctor does NOT flag milestone with clean title', async () => { const dtBase = mkdtempSync(join(tmpdir(), "sf-doctor-dt-clean-")); - const dtGsd = join(dtBase, ".gsd"); + const dtGsd = join(dtBase, ".sf"); const dtMDir = join(dtGsd, "milestones", "M001"); const dtSDir = join(dtMDir, "slices", "S01"); const dtTDir = join(dtSDir, "tasks"); @@ -559,7 +559,7 @@ Discovered an issue. // Simulate a roadmap where expandDependencies did NOT expand (pre-fix stored artifact) // by writing a dep that looks like a range but doesn't match any real slice. const base = mkdtempSync(join(tmpdir(), "sf-doctor-udep-")); - const mDir2 = join(base, ".gsd", "milestones", "M001"); + const mDir2 = join(base, ".sf", "milestones", "M001"); const sDir2 = join(mDir2, "slices", "S01"); const tDir2 = join(sDir2, "tasks"); mkdirSync(tDir2, { recursive: true }); @@ -587,7 +587,7 @@ Discovered an issue. // ─── unresolvable_dependency: valid deps do not warn ───────────────── test('doctor: no unresolvable_dependency for valid deps', async () => { const base = mkdtempSync(join(tmpdir(), "sf-doctor-udep-ok-")); - const mDir2 = join(base, ".gsd", "milestones", "M001"); + const mDir2 = join(base, ".sf", "milestones", "M001"); const sDir2 = join(mDir2, "slices", "S01"); const tDir2 = join(sDir2, "tasks"); mkdirSync(tDir2, { recursive: true }); diff --git a/src/resources/extensions/sf/tests/integration/e2e-workflow-pipeline-integration.test.ts b/src/resources/extensions/sf/tests/integration/e2e-workflow-pipeline-integration.test.ts index 4b3ae61be..e0345b93f 100644 --- a/src/resources/extensions/sf/tests/integration/e2e-workflow-pipeline-integration.test.ts +++ b/src/resources/extensions/sf/tests/integration/e2e-workflow-pipeline-integration.test.ts @@ -148,7 +148,7 @@ function setupProject(overrides?: Record<string, string>): { runDir: string; } { const basePath = makeTmpDir(); - const defsDir = join(basePath, ".gsd", "workflow-defs"); + const defsDir = join(basePath, ".sf", "workflow-defs"); mkdirSync(defsDir, { recursive: true }); writeFileSync(join(defsDir, "e2e-pipeline.yaml"), E2E_DEFINITION_YAML, "utf-8"); diff --git a/src/resources/extensions/sf/tests/integration/feature-branch-lifecycle-integration.test.ts b/src/resources/extensions/sf/tests/integration/feature-branch-lifecycle-integration.test.ts index e9504d4c7..f524ef2b2 100644 --- a/src/resources/extensions/sf/tests/integration/feature-branch-lifecycle-integration.test.ts +++ b/src/resources/extensions/sf/tests/integration/feature-branch-lifecycle-integration.test.ts @@ -75,9 +75,9 @@ function createFeatureBranchRepo(featureBranch: string): string { writeFileSync(join(dir, "README.md"), "# project\n"); // Mirror production: SF runtime dirs are gitignored so autoCommitDirtyState // doesn't pick up the worktrees directory as dirty state (#1127 fix). - writeFileSync(join(dir, ".gitignore"), ".gsd/worktrees/\n"); - mkdirSync(join(dir, ".gsd"), { recursive: true }); - writeFileSync(join(dir, ".gsd", "STATE.md"), "# State\n"); + writeFileSync(join(dir, ".gitignore"), ".sf/worktrees/\n"); + mkdirSync(join(dir, ".sf"), { recursive: true }); + writeFileSync(join(dir, ".sf", "STATE.md"), "# State\n"); run("git add .", dir); run("git commit -m init", dir); run("git branch -M main", dir); @@ -111,7 +111,7 @@ function addSliceToMilestone( commits: Array<{ file: string; content: string; message: string }>, ): void { const normalizedPath = wtPath.replaceAll("\\", "/"); - const marker = "/.gsd/worktrees/"; + const marker = "/.sf/worktrees/"; const idx = normalizedPath.indexOf(marker); const worktreeName = idx !== -1 ? normalizedPath.slice(idx + marker.length).split("/")[0] @@ -175,7 +175,7 @@ describe('feature-branch-lifecycle-integration', async () => { assert.ok(statusBefore.includes("wip-types.ts"), "wip-types.ts is uncommitted"); // ── Simulate what startAuto does: commit dirty state, capture integration branch ── - // startAuto bootstraps .gsd/ which commits .gsd/ files. It also calls + // startAuto bootstraps .sf/ which commits .sf/ files. It also calls // captureIntegrationBranch which commits META.json. But user's dirty // files need to be committed first so the worktree branches from a // commit that includes them. @@ -183,12 +183,12 @@ describe('feature-branch-lifecycle-integration', async () => { // In production, the first dispatch unit (research-milestone) would // auto-commit via autoCommitCurrentBranch. But the worktree is created // BEFORE any unit runs. So we simulate the pre-worktree state: - // SF bootstraps .gsd/ and captureIntegrationBranch commits metadata. + // SF bootstraps .sf/ and captureIntegrationBranch commits metadata. // The user's dirty files are NOT auto-committed pre-worktree — they // stay in the original working directory. // Create milestone directory (happens during guided-flow) - mkdirSync(join(repo, ".gsd", "milestones", milestoneId), { recursive: true }); + mkdirSync(join(repo, ".sf", "milestones", milestoneId), { recursive: true }); // Write integration branch metadata (what captureIntegrationBranch does) writeIntegrationBranch(repo, milestoneId, featureBranch); @@ -285,7 +285,7 @@ describe('feature-branch-lifecycle-integration', async () => { run(`git checkout ${featureBranch}`, repo); // ── Assert: worktree cleaned up ── - const worktreeDir = join(repo, ".gsd", "worktrees", milestoneId); + const worktreeDir = join(repo, ".sf", "worktrees", milestoneId); assert.ok(!existsSync(worktreeDir), "worktree directory removed"); // Milestone branch deleted @@ -305,9 +305,9 @@ describe('feature-branch-lifecycle-integration', async () => { }); // ================================================================ - // Test 2: Uncommitted .gsd/ planning files are available in worktree + // Test 2: Uncommitted .sf/ planning files are available in worktree // - // When auto-mode starts, .gsd/ files may be untracked/uncommitted. + // When auto-mode starts, .sf/ files may be untracked/uncommitted. // Planning artifacts should be carried into the worktree even if // they weren't committed on the feature branch. // ================================================================ @@ -317,17 +317,17 @@ describe('feature-branch-lifecycle-integration', async () => { const milestoneId = nextMilestoneId([], true); // Write planning files that are NOT committed - mkdirSync(join(repo, ".gsd", "milestones", milestoneId, "slices", "S01", "tasks"), { recursive: true }); + mkdirSync(join(repo, ".sf", "milestones", milestoneId, "slices", "S01", "tasks"), { recursive: true }); writeFileSync( - join(repo, ".gsd", "milestones", milestoneId, `${milestoneId}-ROADMAP.md`), + join(repo, ".sf", "milestones", milestoneId, `${milestoneId}-ROADMAP.md`), makeRoadmap(milestoneId, "Planning test", [{ id: "S01", title: "First" }]), ); writeFileSync( - join(repo, ".gsd", "milestones", milestoneId, "slices", "S01", "S01-PLAN.md"), + join(repo, ".sf", "milestones", milestoneId, "slices", "S01", "S01-PLAN.md"), "# S01: First\n\n**Goal:** Test\n**Demo:** Test\n\n## Tasks\n- [ ] **T01: Do it** `est:10m`\n", ); - writeFileSync(join(repo, ".gsd", "PROJECT.md"), "# Planning Test Project\n"); - writeFileSync(join(repo, ".gsd", "DECISIONS.md"), "# Decisions\n\n## D001\nTest decision.\n"); + writeFileSync(join(repo, ".sf", "PROJECT.md"), "# Planning Test Project\n"); + writeFileSync(join(repo, ".sf", "DECISIONS.md"), "# Decisions\n\n## D001\nTest decision.\n"); // These files are untracked assert.ok(run("git status --short", repo).length > 0, "repo has untracked files"); @@ -337,10 +337,10 @@ describe('feature-branch-lifecycle-integration', async () => { const wtPath = createAutoWorktree(repo, milestoneId); tempDirs.push(wtPath); - // With external state, worktree .gsd is a symlink to shared state. + // With external state, worktree .sf is a symlink to shared state. // Verify symlink was created (planning files are shared, not copied). - const wtGsd = join(wtPath, ".gsd"); - assert.ok(existsSync(wtGsd), "worktree .gsd exists (symlink or dir)"); + const wtGsd = join(wtPath, ".sf"); + assert.ok(existsSync(wtGsd), "worktree .sf exists (symlink or dir)"); // Clean up: chdir back before teardown process.chdir(savedCwd); @@ -360,7 +360,7 @@ describe('feature-branch-lifecycle-integration', async () => { // First milestone const mid1 = nextMilestoneId([], true); - mkdirSync(join(repo, ".gsd", "milestones", mid1), { recursive: true }); + mkdirSync(join(repo, ".sf", "milestones", mid1), { recursive: true }); writeIntegrationBranch(repo, mid1, featureBranch); const wt1 = createAutoWorktree(repo, mid1); @@ -379,7 +379,7 @@ describe('feature-branch-lifecycle-integration', async () => { assert.ok(mid1 !== mid2, "second milestone has different ID"); assert.match(mid2, /^M002-[a-z0-9]{6}$/, "second milestone is M002-xxxxxx"); - mkdirSync(join(repo, ".gsd", "milestones", mid2), { recursive: true }); + mkdirSync(join(repo, ".sf", "milestones", mid2), { recursive: true }); writeIntegrationBranch(repo, mid2, featureBranch); const wt2 = createAutoWorktree(repo, mid2); diff --git a/src/resources/extensions/sf/tests/integration/git-locale.test.ts b/src/resources/extensions/sf/tests/integration/git-locale.test.ts index 100cfddd0..aac372009 100644 --- a/src/resources/extensions/sf/tests/integration/git-locale.test.ts +++ b/src/resources/extensions/sf/tests/integration/git-locale.test.ts @@ -55,13 +55,13 @@ describe('git-locale', async () => { // ─── nativeAddAllWithExclusions: non-English locale does not throw ─── test('nativeAddAllWithExclusions: non-English locale does not throw', () => { - // Simulate what happens on a German system: .gsd is gitignored, + // Simulate what happens on a German system: .sf is gitignored, // exclusion pathspecs trigger an advisory warning exit code 1. // With LC_ALL=C the English stderr guard should match and suppress. const repo = initTempRepo(); - writeFileSync(join(repo, ".gitignore"), ".gsd\n"); - createFile(repo, ".gsd/STATE.md", "# State"); + writeFileSync(join(repo, ".gitignore"), ".sf\n"); + createFile(repo, ".sf/STATE.md", "# State"); createFile(repo, "src/app.ts", "export const x = 1;"); // Save original LC_ALL / LANG and force German locale env @@ -86,7 +86,7 @@ describe('git-locale', async () => { assert.ok( !threw, - "nativeAddAllWithExclusions must not throw on non-English locale when .gsd is gitignored (#1997)" + "nativeAddAllWithExclusions must not throw on non-English locale when .sf is gitignored (#1997)" ); const staged = git(repo, "diff", "--cached", "--name-only"); diff --git a/src/resources/extensions/sf/tests/integration/git-service.test.ts b/src/resources/extensions/sf/tests/integration/git-service.test.ts index 1677940aa..a2adf3fd1 100644 --- a/src/resources/extensions/sf/tests/integration/git-service.test.ts +++ b/src/resources/extensions/sf/tests/integration/git-service.test.ts @@ -253,21 +253,21 @@ describe('git-service', async () => { ); const expectedPaths = [ - ".gsd/activity/", - ".gsd/forensics/", - ".gsd/runtime/", - ".gsd/worktrees/", - ".gsd/parallel/", - ".gsd/auto.lock", - ".gsd/metrics.json", - ".gsd/completed-units*.json", - ".gsd/state-manifest.json", - ".gsd/STATE.md", - ".gsd/sf.db*", - ".gsd/journal/", - ".gsd/doctor-history.jsonl", - ".gsd/event-log.jsonl", - ".gsd/DISCUSSION-MANIFEST.json", + ".sf/activity/", + ".sf/forensics/", + ".sf/runtime/", + ".sf/worktrees/", + ".sf/parallel/", + ".sf/auto.lock", + ".sf/metrics.json", + ".sf/completed-units*.json", + ".sf/state-manifest.json", + ".sf/STATE.md", + ".sf/sf.db*", + ".sf/journal/", + ".sf/doctor-history.jsonl", + ".sf/event-log.jsonl", + ".sf/DISCUSSION-MANIFEST.json", ]; assert.deepStrictEqual( @@ -277,12 +277,12 @@ describe('git-service', async () => { ); assert.ok( - RUNTIME_EXCLUSION_PATHS.includes(".gsd/activity/"), - "includes .gsd/activity/" + RUNTIME_EXCLUSION_PATHS.includes(".sf/activity/"), + "includes .sf/activity/" ); assert.ok( - RUNTIME_EXCLUSION_PATHS.includes(".gsd/STATE.md"), - "includes .gsd/STATE.md" + RUNTIME_EXCLUSION_PATHS.includes(".sf/STATE.md"), + "includes .sf/STATE.md" ); // ─── runGit ──────────────────────────────────────────────────────────── @@ -353,12 +353,12 @@ describe('git-service', async () => { const svc = new GitServiceImpl(repo); // Create runtime files (should be excluded from staging) - createFile(repo, ".gsd/activity/log.jsonl", "log data"); - createFile(repo, ".gsd/runtime/state.json", '{"state":true}'); - createFile(repo, ".gsd/STATE.md", "# State"); - createFile(repo, ".gsd/auto.lock", "lock"); - createFile(repo, ".gsd/metrics.json", "{}"); - createFile(repo, ".gsd/worktrees/wt/file.txt", "wt data"); + createFile(repo, ".sf/activity/log.jsonl", "log data"); + createFile(repo, ".sf/runtime/state.json", '{"state":true}'); + createFile(repo, ".sf/STATE.md", "# State"); + createFile(repo, ".sf/auto.lock", "lock"); + createFile(repo, ".sf/metrics.json", "{}"); + createFile(repo, ".sf/worktrees/wt/file.txt", "wt data"); // Create a real file (should be staged) createFile(repo, "src/code.ts", 'console.log("hello");'); @@ -370,20 +370,20 @@ describe('git-service', async () => { // Verify only src/code.ts is in the commit const showStat = run("git show --stat --format= HEAD", repo); assert.ok(showStat.includes("src/code.ts"), "src/code.ts is in the commit"); - assert.ok(!showStat.includes(".gsd/activity"), ".gsd/activity/ excluded from commit"); - assert.ok(!showStat.includes(".gsd/runtime"), ".gsd/runtime/ excluded from commit"); - assert.ok(!showStat.includes("STATE.md"), ".gsd/STATE.md excluded from commit"); - assert.ok(!showStat.includes("auto.lock"), ".gsd/auto.lock excluded from commit"); - assert.ok(!showStat.includes("metrics.json"), ".gsd/metrics.json excluded from commit"); - assert.ok(!showStat.includes(".gsd/worktrees"), ".gsd/worktrees/ excluded from commit"); + assert.ok(!showStat.includes(".sf/activity"), ".sf/activity/ excluded from commit"); + assert.ok(!showStat.includes(".sf/runtime"), ".sf/runtime/ excluded from commit"); + assert.ok(!showStat.includes("STATE.md"), ".sf/STATE.md excluded from commit"); + assert.ok(!showStat.includes("auto.lock"), ".sf/auto.lock excluded from commit"); + assert.ok(!showStat.includes("metrics.json"), ".sf/metrics.json excluded from commit"); + assert.ok(!showStat.includes(".sf/worktrees"), ".sf/worktrees/ excluded from commit"); // Verify runtime files are still untracked - // git status --short may collapse to "?? .gsd/" or show individual files + // git status --short may collapse to "?? .sf/" or show individual files // Use --untracked-files=all to force individual listing const statusOut = run("git status --short --untracked-files=all", repo); - assert.ok(statusOut.includes(".gsd/activity/"), "activity still untracked after commit"); - assert.ok(statusOut.includes(".gsd/runtime/"), "runtime still untracked after commit"); - assert.ok(statusOut.includes(".gsd/STATE.md"), "STATE.md still untracked after commit"); + assert.ok(statusOut.includes(".sf/activity/"), "activity still untracked after commit"); + assert.ok(statusOut.includes(".sf/runtime/"), "runtime still untracked after commit"); + assert.ok(statusOut.includes(".sf/STATE.md"), "STATE.md still untracked after commit"); rmSync(repo, { recursive: true, force: true }); }); @@ -391,43 +391,43 @@ describe('git-service', async () => { // ─── GitServiceImpl: smart staging excludes tracked runtime files ────── test('GitServiceImpl: smart staging excludes tracked runtime files', () => { - // Reproduces the real bug: .gsd/ runtime files that are already tracked - // (in the git index) must be excluded from staging even when .gsd/ is + // Reproduces the real bug: .sf/ runtime files that are already tracked + // (in the git index) must be excluded from staging even when .sf/ is // in .gitignore. The old pathspec-exclude approach failed silently in // this case and fell back to `git add -A`, staging everything. // // The fix has three layers: // 1. Auto-cleanup: git rm --cached removes tracked runtime files from index // 2. Stage-then-unstage: git add -A + git reset HEAD replaces pathspec excludes - // 3. Pre-checkout discard: git checkout -- .gsd/ clears dirty runtime files + // 3. Pre-checkout discard: git checkout -- .sf/ clears dirty runtime files const repo = initTempRepo(); const svc = new GitServiceImpl(repo); - // Simulate a repo where .gsd/ files were previously force-added - createFile(repo, ".gsd/metrics.json", '{"version":1}'); - createFile(repo, ".gsd/completed-units.json", '["unit1"]'); - createFile(repo, ".gsd/activity/log.jsonl", '{"ts":1}'); + // Simulate a repo where .sf/ files were previously force-added + createFile(repo, ".sf/metrics.json", '{"version":1}'); + createFile(repo, ".sf/completed-units.json", '["unit1"]'); + createFile(repo, ".sf/activity/log.jsonl", '{"ts":1}'); createFile(repo, "src/real.ts", "real code"); - // Force-add .gsd/ files to simulate historical tracking - runGit(repo, ["add", "-f", ".gsd/metrics.json", ".gsd/completed-units.json", ".gsd/activity/log.jsonl", "src/real.ts"]); + // Force-add .sf/ files to simulate historical tracking + runGit(repo, ["add", "-f", ".sf/metrics.json", ".sf/completed-units.json", ".sf/activity/log.jsonl", "src/real.ts"]); runGit(repo, ["commit", "-F", "-"], { input: "init with tracked runtime files" }); - // Add .gitignore with .gsd/ (matches real-world setup from ensureGitignore) - createFile(repo, ".gitignore", ".gsd/\n"); + // Add .gitignore with .sf/ (matches real-world setup from ensureGitignore) + createFile(repo, ".gitignore", ".sf/\n"); runGit(repo, ["add", ".gitignore"]); runGit(repo, ["commit", "-F", "-"], { input: "add gitignore" }); // Verify runtime files are tracked (precondition) - const tracked = run("git ls-files .gsd/", repo); + const tracked = run("git ls-files .sf/", repo); assert.ok(tracked.includes("metrics.json"), "precondition: metrics.json tracked"); assert.ok(tracked.includes("completed-units.json"), "precondition: completed-units.json tracked"); assert.ok(tracked.includes("activity/log.jsonl"), "precondition: activity log tracked"); // Now modify both runtime and real files - createFile(repo, ".gsd/metrics.json", '{"version":2}'); - createFile(repo, ".gsd/completed-units.json", '["unit1","unit2"]'); - createFile(repo, ".gsd/activity/log.jsonl", '{"ts":2}'); + createFile(repo, ".sf/metrics.json", '{"version":2}'); + createFile(repo, ".sf/completed-units.json", '["unit1","unit2"]'); + createFile(repo, ".sf/activity/log.jsonl", '{"ts":2}'); createFile(repo, "src/real.ts", "updated code"); // autoCommit should commit real.ts. The first call also runs auto-cleanup @@ -440,12 +440,12 @@ describe('git-service', async () => { // After the commit, runtime files must no longer be in the git index. // They remain on disk but are untracked (protected by .gitignore). - const trackedAfter = run("git ls-files .gsd/", repo); - assert.deepStrictEqual(trackedAfter, "", "no .gsd/ runtime files remain in the index"); + const trackedAfter = run("git ls-files .sf/", repo); + assert.deepStrictEqual(trackedAfter, "", "no .sf/ runtime files remain in the index"); // Verify a second autoCommit with changed runtime files does NOT stage them - createFile(repo, ".gsd/metrics.json", '{"version":3}'); - createFile(repo, ".gsd/completed-units.json", '["unit1","unit2","unit3"]'); + createFile(repo, ".sf/metrics.json", '{"version":3}'); + createFile(repo, ".sf/completed-units.json", '["unit1","unit2","unit3"]'); createFile(repo, "src/real.ts", "third version"); const msg2 = svc.autoCommit("execute-task", "M001/S01/T02"); @@ -511,7 +511,7 @@ describe('git-service', async () => { const svc = new GitServiceImpl(repo); // Create only runtime files - createFile(repo, ".gsd/activity/x.jsonl", "data"); + createFile(repo, ".sf/activity/x.jsonl", "data"); const result = svc.autoCommit("task", "T02"); assert.deepStrictEqual(result, null, "autoCommit returns null when only runtime files are dirty"); @@ -529,35 +529,35 @@ describe('git-service', async () => { const repo = initTempRepo(); const svc = new GitServiceImpl(repo); - // Create both a .gsd/ planning file and a regular source file - createFile(repo, ".gsd/milestones/M001/M001-ROADMAP.md", "- [x] S01"); + // Create both a .sf/ planning file and a regular source file + createFile(repo, ".sf/milestones/M001/M001-ROADMAP.md", "- [x] S01"); createFile(repo, "src/feature.ts", "export const y = 2;"); - // Auto-commit with .gsd/ excluded (simulates pre-switch) - const msg = svc.autoCommit("pre-switch", "main", [".gsd/"]); - assert.deepStrictEqual(msg, "chore: auto-commit after pre-switch\n\nSF-Unit: main", "pre-switch autoCommit with .gsd/ exclusion commits"); + // Auto-commit with .sf/ excluded (simulates pre-switch) + const msg = svc.autoCommit("pre-switch", "main", [".sf/"]); + assert.deepStrictEqual(msg, "chore: auto-commit after pre-switch\n\nSF-Unit: main", "pre-switch autoCommit with .sf/ exclusion commits"); - // Verify .gsd/ file was NOT committed + // Verify .sf/ file was NOT committed const show = run("git show --stat HEAD", repo); - assert.ok(!show.includes("ROADMAP"), ".gsd/ files excluded from pre-switch auto-commit"); - assert.ok(show.includes("feature.ts"), "non-.gsd/ files included in pre-switch auto-commit"); + assert.ok(!show.includes("ROADMAP"), ".sf/ files excluded from pre-switch auto-commit"); + assert.ok(show.includes("feature.ts"), "non-.sf/ files included in pre-switch auto-commit"); rmSync(repo, { recursive: true, force: true }); }); - // ─── GitServiceImpl: autoCommit extraExclusions — only .gsd/ dirty ──── + // ─── GitServiceImpl: autoCommit extraExclusions — only .sf/ dirty ──── - test('GitServiceImpl: autoCommit extraExclusions — only .gsd/ dirty', () => { + test('GitServiceImpl: autoCommit extraExclusions — only .sf/ dirty', () => { const repo = initTempRepo(); const svc = new GitServiceImpl(repo); - // Create only .gsd/ planning files - createFile(repo, ".gsd/milestones/M001/M001-ROADMAP.md", "- [x] S01"); - createFile(repo, ".gsd/STATE.md", "state content"); + // Create only .sf/ planning files + createFile(repo, ".sf/milestones/M001/M001-ROADMAP.md", "- [x] S01"); + createFile(repo, ".sf/STATE.md", "state content"); - // Auto-commit with .gsd/ excluded — nothing else to commit - const result = svc.autoCommit("pre-switch", "main", [".gsd/"]); - assert.deepStrictEqual(result, null, "autoCommit returns null when only .gsd/ files are dirty and excluded"); + // Auto-commit with .sf/ excluded — nothing else to commit + const result = svc.autoCommit("pre-switch", "main", [".sf/"]); + assert.deepStrictEqual(result, null, "autoCommit returns null when only .sf/ files are dirty and excluded"); rmSync(repo, { recursive: true, force: true }); }); @@ -1122,19 +1122,19 @@ describe('git-service', async () => { runGit(repo, ["config", "user.name", "Test"]); // Create and track runtime files (simulates pre-.gitignore state) - mkdirSync(join(repo, ".gsd", "activity"), { recursive: true }); - mkdirSync(join(repo, ".gsd", "runtime"), { recursive: true }); - writeFileSync(join(repo, ".gsd", "completed-units.json"), '["u1"]'); - writeFileSync(join(repo, ".gsd", "metrics.json"), '{}'); - writeFileSync(join(repo, ".gsd", "STATE.md"), "# State"); - writeFileSync(join(repo, ".gsd", "activity", "log.jsonl"), "{}"); - writeFileSync(join(repo, ".gsd", "runtime", "data.json"), "{}"); + mkdirSync(join(repo, ".sf", "activity"), { recursive: true }); + mkdirSync(join(repo, ".sf", "runtime"), { recursive: true }); + writeFileSync(join(repo, ".sf", "completed-units.json"), '["u1"]'); + writeFileSync(join(repo, ".sf", "metrics.json"), '{}'); + writeFileSync(join(repo, ".sf", "STATE.md"), "# State"); + writeFileSync(join(repo, ".sf", "activity", "log.jsonl"), "{}"); + writeFileSync(join(repo, ".sf", "runtime", "data.json"), "{}"); writeFileSync(join(repo, "src.ts"), "code"); runGit(repo, ["add", "-A"]); runGit(repo, ["commit", "-m", "init"]); // Precondition: runtime files are tracked - const trackedBefore = run("git ls-files .gsd/", repo); + const trackedBefore = run("git ls-files .sf/", repo); assert.ok(trackedBefore.includes("completed-units.json"), "untrack: precondition — completed-units tracked"); assert.ok(trackedBefore.includes("metrics.json"), "untrack: precondition — metrics tracked"); @@ -1142,7 +1142,7 @@ describe('git-service', async () => { untrackRuntimeFiles(repo); // Runtime files should be removed from the index - const trackedAfter = run("git ls-files .gsd/", repo); + const trackedAfter = run("git ls-files .sf/", repo); assert.deepStrictEqual(trackedAfter, "", "untrack: all runtime files removed from index"); // Non-runtime files remain tracked @@ -1150,9 +1150,9 @@ describe('git-service', async () => { assert.ok(srcTracked.includes("src.ts"), "untrack: non-runtime files remain tracked"); // Files still exist on disk - assert.ok(existsSync(join(repo, ".gsd", "completed-units.json")), + assert.ok(existsSync(join(repo, ".sf", "completed-units.json")), "untrack: completed-units.json still on disk"); - assert.ok(existsSync(join(repo, ".gsd", "metrics.json")), + assert.ok(existsSync(join(repo, ".sf", "metrics.json")), "untrack: metrics.json still on disk"); // Idempotent — running again doesn't error @@ -1173,18 +1173,18 @@ describe('git-service', async () => { runGit(repo, ["add", "-A"]); runGit(repo, ["commit", "-m", "init"]); - // Create .gsd/ runtime files + milestone artifacts + a normal source file - mkdirSync(join(repo, ".gsd", "milestones", "M001"), { recursive: true }); - mkdirSync(join(repo, ".gsd", "runtime"), { recursive: true }); - mkdirSync(join(repo, ".gsd", "activity"), { recursive: true }); - writeFileSync(join(repo, ".gsd", "milestones", "M001", "ROADMAP.md"), "# Roadmap"); - writeFileSync(join(repo, ".gsd", "PREFERENCES.md"), "---\nversion: 1\n---"); - writeFileSync(join(repo, ".gsd", "STATE.md"), "# State"); - writeFileSync(join(repo, ".gsd", "runtime", "units.json"), "{}"); - writeFileSync(join(repo, ".gsd", "activity", "log.jsonl"), "{}"); + // Create .sf/ runtime files + milestone artifacts + a normal source file + mkdirSync(join(repo, ".sf", "milestones", "M001"), { recursive: true }); + mkdirSync(join(repo, ".sf", "runtime"), { recursive: true }); + mkdirSync(join(repo, ".sf", "activity"), { recursive: true }); + writeFileSync(join(repo, ".sf", "milestones", "M001", "ROADMAP.md"), "# Roadmap"); + writeFileSync(join(repo, ".sf", "PREFERENCES.md"), "---\nversion: 1\n---"); + writeFileSync(join(repo, ".sf", "STATE.md"), "# State"); + writeFileSync(join(repo, ".sf", "runtime", "units.json"), "{}"); + writeFileSync(join(repo, ".sf", "activity", "log.jsonl"), "{}"); writeFileSync(join(repo, "src.ts"), "const x = 1;"); - // smartStage excludes only runtime paths, not all of .gsd/ (#1326) + // smartStage excludes only runtime paths, not all of .sf/ (#1326) const svc = new GitServiceImpl(repo); const msg = svc.commit({ message: "test commit" }); assert.ok(msg !== null, "smartStage: commit succeeds"); @@ -1192,11 +1192,11 @@ describe('git-service', async () => { const committed = run("git show --name-only HEAD", repo); assert.ok(committed.includes("src.ts"), "smartStage: source files ARE in commit"); // Runtime files should NOT be committed - assert.ok(!committed.includes(".gsd/STATE.md"), "smartStage: STATE.md excluded (runtime)"); - assert.ok(!committed.includes(".gsd/runtime/"), "smartStage: runtime/ excluded"); - assert.ok(!committed.includes(".gsd/activity/"), "smartStage: activity/ excluded"); + assert.ok(!committed.includes(".sf/STATE.md"), "smartStage: STATE.md excluded (runtime)"); + assert.ok(!committed.includes(".sf/runtime/"), "smartStage: runtime/ excluded"); + assert.ok(!committed.includes(".sf/activity/"), "smartStage: activity/ excluded"); // Milestone artifacts SHOULD be committed when not gitignored (#1326) - assert.ok(committed.includes(".gsd/milestones/"), "smartStage: milestone artifacts ARE committed"); + assert.ok(committed.includes(".sf/milestones/"), "smartStage: milestone artifacts ARE committed"); rmSync(repo, { recursive: true, force: true }); }); @@ -1213,7 +1213,7 @@ describe('git-service', async () => { assert.deepStrictEqual(readIntegrationBranch(repo, "M001"), "f-123-new-thing", "writeIntegrationBranch: metadata file exists on disk"); - // No commit — .gsd/ is managed externally + // No commit — .sf/ is managed externally const commitsAfter = run("git rev-list --count HEAD", repo); assert.deepStrictEqual(commitsBefore, commitsAfter, "writeIntegrationBranch: no git commit created for integration branch"); @@ -1221,20 +1221,20 @@ describe('git-service', async () => { rmSync(repo, { recursive: true, force: true }); }); - // ─── ensureGitignore: always adds .gsd to gitignore ────────────────── + // ─── ensureGitignore: always adds .sf to gitignore ────────────────── - test('ensureGitignore: adds .gsd entry', async () => { + test('ensureGitignore: adds .sf entry', async () => { const { ensureGitignore } = await import("../../gitignore.ts"); const repo = mkdtempSync(join(tmpdir(), "sf-gitignore-external-state-")); - // Should add .gsd to gitignore (external state dir is a symlink) + // Should add .sf to gitignore (external state dir is a symlink) const modified = ensureGitignore(repo); assert.ok(modified, "ensureGitignore: gitignore was modified"); const { readFileSync } = await import("node:fs"); const content = readFileSync(join(repo, ".gitignore"), "utf-8"); const lines = content.split("\n").map(l => l.trim()).filter(l => l && !l.startsWith("#")); - assert.ok(lines.includes(".gsd"), "ensureGitignore: .gitignore contains .gsd"); + assert.ok(lines.includes(".sf"), "ensureGitignore: .gitignore contains .sf"); // Idempotent — calling again doesn't add duplicates const modified2 = ensureGitignore(repo); @@ -1243,25 +1243,25 @@ describe('git-service', async () => { rmSync(repo, { recursive: true, force: true }); }); - // ─── nativeAddAllWithExclusions: symlinked .gsd fallback ─────────────── + // ─── nativeAddAllWithExclusions: symlinked .sf fallback ─────────────── - test('nativeAddAllWithExclusions: symlinked .gsd fallback', () => { - // When .gsd is a symlink, git rejects `:!.gsd/...` pathspecs with + test('nativeAddAllWithExclusions: symlinked .sf fallback', () => { + // When .sf is a symlink, git rejects `:!.sf/...` pathspecs with // "fatal: pathspec '...' is beyond a symbolic link". The fix falls // back to `git add -u` (tracked files only), NOT `git add -A`. const repo = initTempRepo(); - // Create the real .gsd directory outside the repo, then symlink it + // Create the real .sf directory outside the repo, then symlink it const externalGsd = mkdtempSync(join(tmpdir(), "sf-external-")); mkdirSync(join(externalGsd, "activity"), { recursive: true }); writeFileSync(join(externalGsd, "activity", "log.jsonl"), "log data"); writeFileSync(join(externalGsd, "STATE.md"), "# State"); - // Symlink .gsd -> external directory - symlinkSync(externalGsd, join(repo, ".gsd")); + // Symlink .sf -> external directory + symlinkSync(externalGsd, join(repo, ".sf")); - // Add .gitignore so .gsd/ is ignored - writeFileSync(join(repo, ".gitignore"), ".gsd\n"); + // Add .gitignore so .sf/ is ignored + writeFileSync(join(repo, ".gitignore"), ".sf\n"); // Create a tracked file and commit it, then modify it createFile(repo, "src/app.ts", "export const x = 1;"); @@ -1273,7 +1273,7 @@ describe('git-service', async () => { // This is the key scenario: large untracked dirs that git add -A would traverse createFile(repo, "data/large-model.bin", "pretend this is 10GB"); - // nativeAddAllWithExclusions should NOT throw despite .gsd being a symlink + // nativeAddAllWithExclusions should NOT throw despite .sf being a symlink let threw = false; try { nativeAddAllWithExclusions(repo, RUNTIME_EXCLUSION_PATHS); @@ -1281,31 +1281,31 @@ describe('git-service', async () => { threw = true; console.error(" unexpected error:", e); } - assert.ok(!threw, "nativeAddAllWithExclusions does not throw with symlinked .gsd"); + assert.ok(!threw, "nativeAddAllWithExclusions does not throw with symlinked .sf"); // Verify the tracked modified file was staged const staged = run("git diff --cached --name-only", repo); - assert.ok(staged.includes("src/app.ts"), "modified tracked file staged despite symlinked .gsd"); + assert.ok(staged.includes("src/app.ts"), "modified tracked file staged despite symlinked .sf"); // CRITICAL: untracked files must NOT be staged — the symlink fallback // should use `git add -u` (tracked only), not `git add -A` (all files). // Using `git add -A` on a repo with large untracked data dirs hangs. (#1977) assert.ok(!staged.includes("data/large-model.bin"), "symlink fallback must not stage untracked files (would hang on large repos)"); - assert.ok(!staged.includes(".gsd"), ".gsd content not staged"); + assert.ok(!staged.includes(".sf"), ".sf content not staged"); rmSync(repo, { recursive: true, force: true }); rmSync(externalGsd, { recursive: true, force: true }); }); - // ─── nativeAddAllWithExclusions: non-symlinked .gsd still works ─────── + // ─── nativeAddAllWithExclusions: non-symlinked .sf still works ─────── - test('nativeAddAllWithExclusions: non-symlinked .gsd still works', () => { + test('nativeAddAllWithExclusions: non-symlinked .sf still works', () => { // Verify the normal (non-symlink) case still works with pathspec exclusions const repo = initTempRepo(); - createFile(repo, ".gsd/activity/log.jsonl", "log data"); - createFile(repo, ".gsd/STATE.md", "# State"); + createFile(repo, ".sf/activity/log.jsonl", "log data"); + createFile(repo, ".sf/STATE.md", "# State"); createFile(repo, "src/code.ts", "export const y = 2;"); let threw = false; @@ -1314,10 +1314,10 @@ describe('git-service', async () => { } catch { threw = true; } - assert.ok(!threw, "nativeAddAllWithExclusions works with normal .gsd directory"); + assert.ok(!threw, "nativeAddAllWithExclusions works with normal .sf directory"); const staged = run("git diff --cached --name-only", repo); - assert.ok(staged.includes("src/code.ts"), "real file staged with normal .gsd"); + assert.ok(staged.includes("src/code.ts"), "real file staged with normal .sf"); rmSync(repo, { recursive: true, force: true }); }); @@ -1429,24 +1429,24 @@ describe('git-service', async () => { rmSync(repo, { recursive: true, force: true }); }); - // ─── autoCommit: symlinked .gsd does NOT stage milestone artifacts (#2247) ── + // ─── autoCommit: symlinked .sf does NOT stage milestone artifacts (#2247) ── - test('autoCommit: symlinked .gsd does NOT stage milestone artifacts (#2247)', () => { - // When .gsd is a symlink (external state project), .gsd/ files live outside + test('autoCommit: symlinked .sf does NOT stage milestone artifacts (#2247)', () => { + // When .sf is a symlink (external state project), .sf/ files live outside // the repo by design. smartStage() must NOT force-stage them into git — the // .gitignore exclusion is correct and intentional. const repo = initTempRepo(); - // Create an external .gsd directory and symlink it into the repo + // Create an external .sf directory and symlink it into the repo const externalGsd = mkdtempSync(join(tmpdir(), "sf-external-symlink-")); mkdirSync(join(externalGsd, "milestones", "M009"), { recursive: true }); mkdirSync(join(externalGsd, "activity"), { recursive: true }); mkdirSync(join(externalGsd, "runtime"), { recursive: true }); - symlinkSync(externalGsd, join(repo, ".gsd")); + symlinkSync(externalGsd, join(repo, ".sf")); - // .gitignore blocks .gsd (as ensureGitignore would do for symlink projects) - writeFileSync(join(repo, ".gitignore"), ".gsd\n"); + // .gitignore blocks .sf (as ensureGitignore would do for symlink projects) + writeFileSync(join(repo, ".gitignore"), ".sf\n"); run('git add .gitignore', repo); run('git commit -m "add gitignore"', repo); @@ -1471,8 +1471,8 @@ describe('git-service', async () => { const committed = run("git show --name-only HEAD", repo); assert.ok(committed.includes("src/feature.ts"), "symlink autoCommit: source file committed"); - assert.ok(!committed.includes(".gsd/milestones/"), - "symlink autoCommit: .gsd/milestones/ files are NOT staged (external state stays external)"); + assert.ok(!committed.includes(".sf/milestones/"), + "symlink autoCommit: .sf/milestones/ files are NOT staged (external state stays external)"); try { rmSync(repo, { recursive: true, force: true }); } catch {} try { rmSync(externalGsd, { recursive: true, force: true }); } catch {} diff --git a/src/resources/extensions/sf/tests/integration/gitignore-staging-2570.test.ts b/src/resources/extensions/sf/tests/integration/gitignore-staging-2570.test.ts index 200a429f7..7b02fcd5f 100644 --- a/src/resources/extensions/sf/tests/integration/gitignore-staging-2570.test.ts +++ b/src/resources/extensions/sf/tests/integration/gitignore-staging-2570.test.ts @@ -2,8 +2,8 @@ * gitignore-staging-2570.test.ts — Regression tests for #2570. * * Verifies that: - * 1. isGsdGitignored() detects when .gsd is covered by .gitignore - * 2. The rethink prompt uses {{commitInstruction}} instead of hardcoded git add .gsd/ + * 1. isGsdGitignored() detects when .sf is covered by .gitignore + * 2. The rethink prompt uses {{commitInstruction}} instead of hardcoded git add .sf/ * 3. rethink.ts passes the correct commitInstruction based on gitignore state * * Uses real temporary git repos — no mocks. @@ -53,25 +53,25 @@ function cleanup(dir: string): void { // ─── isGsdGitignored ───────────────────────────────────────────────── -test("isGsdGitignored returns true when .gsd is in .gitignore (#2570)", (t) => { +test("isGsdGitignored returns true when .sf is in .gitignore (#2570)", (t) => { const dir = makeTempRepo(); t.after(() => { cleanup(dir); }); - writeFileSync(join(dir, ".gitignore"), ".gsd\n"); + writeFileSync(join(dir, ".gitignore"), ".sf\n"); assert.equal(isGsdGitignored(dir), true); }); -test("isGsdGitignored returns true when .gsd/ (with slash) is in .gitignore", (t) => { +test("isGsdGitignored returns true when .sf/ (with slash) is in .gitignore", (t) => { const dir = makeTempRepo(); t.after(() => { cleanup(dir); }); - writeFileSync(join(dir, ".gitignore"), ".gsd/\n"); - // Create .gsd directory so git check-ignore can match the directory-only pattern - mkdirSync(join(dir, ".gsd"), { recursive: true }); + writeFileSync(join(dir, ".gitignore"), ".sf/\n"); + // Create .sf directory so git check-ignore can match the directory-only pattern + mkdirSync(join(dir, ".sf"), { recursive: true }); assert.equal(isGsdGitignored(dir), true); }); -test("isGsdGitignored returns false when .gsd is NOT in .gitignore", (t) => { +test("isGsdGitignored returns false when .sf is NOT in .gitignore", (t) => { const dir = makeTempRepo(); t.after(() => { cleanup(dir); }); @@ -89,7 +89,7 @@ test("isGsdGitignored returns false when no .gitignore exists", (t) => { // ─── rethink.md prompt template ───────────────────────────────────── -test("rethink.md prompt uses {{commitInstruction}} not hardcoded git add .gsd/ (#2570)", () => { +test("rethink.md prompt uses {{commitInstruction}} not hardcoded git add .sf/ (#2570)", () => { const promptPath = join( import.meta.dirname!, "..", @@ -99,10 +99,10 @@ test("rethink.md prompt uses {{commitInstruction}} not hardcoded git add .gsd/ ( ); const content = readFileSync(promptPath, "utf-8"); - // Must NOT contain hardcoded `git add .gsd/` + // Must NOT contain hardcoded `git add .sf/` assert.ok( - !content.includes("git add .gsd/"), - `rethink.md must not contain hardcoded "git add .gsd/" — use {{commitInstruction}} instead.\nFound: ${content.match(/.*git add .gsd\/.*/)?.[0]}`, + !content.includes("git add .sf/"), + `rethink.md must not contain hardcoded "git add .sf/" — use {{commitInstruction}} instead.\nFound: ${content.match(/.*git add .sf\/.*/)?.[0]}`, ); // Must contain the {{commitInstruction}} placeholder @@ -112,9 +112,9 @@ test("rethink.md prompt uses {{commitInstruction}} not hardcoded git add .gsd/ ( ); }); -// ─── smartStage respects .gitignore for .gsd/ (#2570) ─────────────── +// ─── smartStage respects .gitignore for .sf/ (#2570) ─────────────── -test("smartStage does not stage .gsd/ files when .gsd is gitignored (#2570)", async (t) => { +test("smartStage does not stage .sf/ files when .sf is gitignored (#2570)", async (t) => { // This imports GitServiceImpl to test through the public commit() method // which calls smartStage() internally. const { GitServiceImpl } = await import("../../git-service.ts"); @@ -122,29 +122,29 @@ test("smartStage does not stage .gsd/ files when .gsd is gitignored (#2570)", as const dir = makeTempRepo(); t.after(() => { cleanup(dir); }); - // Add .gsd to .gitignore - writeFileSync(join(dir, ".gitignore"), ".gsd\nnode_modules/\n"); + // Add .sf to .gitignore + writeFileSync(join(dir, ".gitignore"), ".sf\nnode_modules/\n"); git(dir, "add", ".gitignore"); - git(dir, "commit", "-m", "add gitignore with .gsd"); + git(dir, "commit", "-m", "add gitignore with .sf"); - // Create .gsd/ milestone artifacts (NOT tracked, NOT symlinked) - mkdirSync(join(dir, ".gsd", "milestones", "M001", "slices", "S01"), { recursive: true }); - writeFileSync(join(dir, ".gsd", "milestones", "M001", "slices", "S01", "S01-PLAN.md"), "# Plan"); - writeFileSync(join(dir, ".gsd", "DECISIONS.md"), "# Decisions"); + // Create .sf/ milestone artifacts (NOT tracked, NOT symlinked) + mkdirSync(join(dir, ".sf", "milestones", "M001", "slices", "S01"), { recursive: true }); + writeFileSync(join(dir, ".sf", "milestones", "M001", "slices", "S01", "S01-PLAN.md"), "# Plan"); + writeFileSync(join(dir, ".sf", "DECISIONS.md"), "# Decisions"); // Create a normal source file writeFileSync(join(dir, "src.ts"), "export const x = 1;"); // Commit through GitServiceImpl (uses smartStage internally) const svc = new GitServiceImpl(dir); - const msg = svc.commit({ message: "test: should not include .gsd files" }); + const msg = svc.commit({ message: "test: should not include .sf files" }); assert.ok(msg !== null, "commit should succeed"); // Check what was committed const committed = git(dir, "show", "--name-only", "HEAD"); assert.ok(committed.includes("src.ts"), "source files ARE committed"); assert.ok( - !committed.includes(".gsd/"), - `gitignored .gsd/ files must NOT be staged by smartStage.\nCommitted files: ${committed}`, + !committed.includes(".sf/"), + `gitignored .sf/ files must NOT be staged by smartStage.\nCommitted files: ${committed}`, ); }); diff --git a/src/resources/extensions/sf/tests/integration/gitignore-tracked-sf.test.ts b/src/resources/extensions/sf/tests/integration/gitignore-tracked-sf.test.ts index 90952ace7..0407d4178 100644 --- a/src/resources/extensions/sf/tests/integration/gitignore-tracked-sf.test.ts +++ b/src/resources/extensions/sf/tests/integration/gitignore-tracked-sf.test.ts @@ -1,9 +1,9 @@ /** * gitignore-tracked-sf.test.ts — Regression tests for #1364. * - * Verifies that ensureGitignore() does NOT add ".gsd" to .gitignore - * when .gsd/ contains git-tracked files, and that migrateToExternalState() - * aborts migration for tracked .gsd/ directories. + * Verifies that ensureGitignore() does NOT add ".sf" to .gitignore + * when .sf/ contains git-tracked files, and that migrateToExternalState() + * aborts migration for tracked .sf/ directories. * * Uses real temporary git repos — no mocks. */ @@ -53,55 +53,55 @@ function cleanup(dir: string): void { // ─── hasGitTrackedGsdFiles ─────────────────────────────────────────── -test("hasGitTrackedGsdFiles returns false when .gsd/ does not exist", (t) => { +test("hasGitTrackedGsdFiles returns false when .sf/ does not exist", (t) => { const dir = makeTempRepo(); t.after(() => { cleanup(dir); }); assert.equal(hasGitTrackedGsdFiles(dir), false); }); -test("hasGitTrackedGsdFiles returns true when .gsd/ has tracked files", (t) => { +test("hasGitTrackedGsdFiles returns true when .sf/ has tracked files", (t) => { const dir = makeTempRepo(); t.after(() => { cleanup(dir); }); - mkdirSync(join(dir, ".gsd", "milestones"), { recursive: true }); - writeFileSync(join(dir, ".gsd", "PROJECT.md"), "# Test Project\n"); - git(dir, "add", ".gsd/PROJECT.md"); + mkdirSync(join(dir, ".sf", "milestones"), { recursive: true }); + writeFileSync(join(dir, ".sf", "PROJECT.md"), "# Test Project\n"); + git(dir, "add", ".sf/PROJECT.md"); git(dir, "commit", "-m", "add sf"); assert.equal(hasGitTrackedGsdFiles(dir), true); }); -test("hasGitTrackedGsdFiles returns false when .gsd/ exists but is untracked", (t) => { +test("hasGitTrackedGsdFiles returns false when .sf/ exists but is untracked", (t) => { const dir = makeTempRepo(); t.after(() => { cleanup(dir); }); - mkdirSync(join(dir, ".gsd"), { recursive: true }); - writeFileSync(join(dir, ".gsd", "STATE.md"), "state\n"); + mkdirSync(join(dir, ".sf"), { recursive: true }); + writeFileSync(join(dir, ".sf", "STATE.md"), "state\n"); // Not git-added — should return false assert.equal(hasGitTrackedGsdFiles(dir), false); }); -// ─── ensureGitignore — tracked .gsd/ protection ───────────────────── +// ─── ensureGitignore — tracked .sf/ protection ───────────────────── -test("ensureGitignore does NOT add .gsd when .gsd/ has tracked files (#1364)", (t) => { +test("ensureGitignore does NOT add .sf when .sf/ has tracked files (#1364)", (t) => { const dir = makeTempRepo(); try { - // Set up .gsd/ with tracked files - mkdirSync(join(dir, ".gsd", "milestones"), { recursive: true }); - writeFileSync(join(dir, ".gsd", "PROJECT.md"), "# Test Project\n"); - writeFileSync(join(dir, ".gsd", "DECISIONS.md"), "# Decisions\n"); - git(dir, "add", ".gsd/"); + // Set up .sf/ with tracked files + mkdirSync(join(dir, ".sf", "milestones"), { recursive: true }); + writeFileSync(join(dir, ".sf", "PROJECT.md"), "# Test Project\n"); + writeFileSync(join(dir, ".sf", "DECISIONS.md"), "# Decisions\n"); + git(dir, "add", ".sf/"); git(dir, "commit", "-m", "track sf state"); // Run ensureGitignore ensureGitignore(dir); - // Verify .gsd is NOT in .gitignore + // Verify .sf is NOT in .gitignore const gitignore = readFileSync(join(dir, ".gitignore"), "utf-8"); const lines = gitignore.split("\n").map((l) => l.trim()); assert.ok( - !lines.includes(".gsd"), - `Expected .gsd NOT to appear in .gitignore, but it does:\n${gitignore}`, + !lines.includes(".sf"), + `Expected .sf NOT to appear in .gitignore, but it does:\n${gitignore}`, ); // Other baseline patterns should still be present @@ -112,18 +112,18 @@ test("ensureGitignore does NOT add .gsd when .gsd/ has tracked files (#1364)", ( } }); -test("ensureGitignore adds .gsd when .gsd/ has NO tracked files", (t) => { +test("ensureGitignore adds .sf when .sf/ has NO tracked files", (t) => { const dir = makeTempRepo(); try { - // Run ensureGitignore (no .gsd/ at all) + // Run ensureGitignore (no .sf/ at all) ensureGitignore(dir); - // Verify .gsd IS in .gitignore + // Verify .sf IS in .gitignore const gitignore = readFileSync(join(dir, ".gitignore"), "utf-8"); const lines = gitignore.split("\n").map((l) => l.trim()); assert.ok( - lines.includes(".gsd"), - `Expected .gsd in .gitignore, but it's missing:\n${gitignore}`, + lines.includes(".sf"), + `Expected .sf in .gitignore, but it's missing:\n${gitignore}`, ); } finally { cleanup(dir); @@ -141,24 +141,24 @@ test("ensureGitignore respects manageGitignore: false", (t) => { // ─── ensureGitignore — verify no tracked files become invisible ───── -test("ensureGitignore with tracked .gsd/ does not cause git to see files as deleted", (t) => { +test("ensureGitignore with tracked .sf/ does not cause git to see files as deleted", (t) => { const dir = makeTempRepo(); try { - // Create tracked .gsd/ files - mkdirSync(join(dir, ".gsd", "milestones", "M001"), { recursive: true }); - writeFileSync(join(dir, ".gsd", "PROJECT.md"), "# Project\n"); + // Create tracked .sf/ files + mkdirSync(join(dir, ".sf", "milestones", "M001"), { recursive: true }); + writeFileSync(join(dir, ".sf", "PROJECT.md"), "# Project\n"); writeFileSync( - join(dir, ".gsd", "milestones", "M001", "M001-CONTEXT.md"), + join(dir, ".sf", "milestones", "M001", "M001-CONTEXT.md"), "# M001\n", ); - git(dir, "add", ".gsd/"); + git(dir, "add", ".sf/"); git(dir, "commit", "-m", "track sf state"); // Run ensureGitignore ensureGitignore(dir); - // git status should show NO deleted files under .gsd/ - const status = git(dir, "status", "--porcelain", ".gsd/"); + // git status should show NO deleted files under .sf/ + const status = git(dir, "status", "--porcelain", ".sf/"); // Filter for deletions (lines starting with " D" or "D ") const deletions = status @@ -168,7 +168,7 @@ test("ensureGitignore with tracked .gsd/ does not cause git to see files as dele assert.equal( deletions.length, 0, - `Expected no deleted .gsd/ files, but found:\n${deletions.join("\n")}`, + `Expected no deleted .sf/ files, but found:\n${deletions.join("\n")}`, ); } finally { cleanup(dir); @@ -178,10 +178,10 @@ test("ensureGitignore with tracked .gsd/ does not cause git to see files as dele test("hasGitTrackedGsdFiles returns true (fail-safe) when git is not available", (t) => { const dir = makeTempRepo(); try { - // Create and track .gsd/ files - mkdirSync(join(dir, ".gsd"), { recursive: true }); - writeFileSync(join(dir, ".gsd", "PROJECT.md"), "# Project\n"); - git(dir, "add", ".gsd/"); + // Create and track .sf/ files + mkdirSync(join(dir, ".sf"), { recursive: true }); + writeFileSync(join(dir, ".sf", "PROJECT.md"), "# Project\n"); + git(dir, "add", ".sf/"); git(dir, "commit", "-m", "track sf"); // Corrupt the git index to simulate git failure @@ -197,30 +197,30 @@ test("hasGitTrackedGsdFiles returns true (fail-safe) when git is not available", } }); -// ─── migrateToExternalState — tracked .gsd/ protection ────────────── +// ─── migrateToExternalState — tracked .sf/ protection ────────────── -test("migrateToExternalState aborts when .gsd/ has tracked files (#1364)", (t) => { +test("migrateToExternalState aborts when .sf/ has tracked files (#1364)", (t) => { const dir = makeTempRepo(); try { - // Create tracked .gsd/ files - mkdirSync(join(dir, ".gsd", "milestones"), { recursive: true }); - writeFileSync(join(dir, ".gsd", "PROJECT.md"), "# Project\n"); - git(dir, "add", ".gsd/"); + // Create tracked .sf/ files + mkdirSync(join(dir, ".sf", "milestones"), { recursive: true }); + writeFileSync(join(dir, ".sf", "PROJECT.md"), "# Project\n"); + git(dir, "add", ".sf/"); git(dir, "commit", "-m", "track sf state"); // Attempt migration — should abort without moving anything const result = migrateToExternalState(dir); - assert.equal(result.migrated, false, "Should NOT migrate tracked .gsd/"); + assert.equal(result.migrated, false, "Should NOT migrate tracked .sf/"); assert.equal(result.error, undefined, "Should not report an error — just skip"); - // .gsd/ should still be a real directory, not a symlink - assert.ok(existsSync(join(dir, ".gsd", "PROJECT.md")), ".gsd/PROJECT.md should still exist"); + // .sf/ should still be a real directory, not a symlink + assert.ok(existsSync(join(dir, ".sf", "PROJECT.md")), ".sf/PROJECT.md should still exist"); - // No .gsd.migrating should exist + // No .sf.migrating should exist assert.ok( - !existsSync(join(dir, ".gsd.migrating")), - ".gsd.migrating should not exist", + !existsSync(join(dir, ".sf.migrating")), + ".sf.migrating should not exist", ); } finally { cleanup(dir); @@ -230,13 +230,13 @@ test("migrateToExternalState aborts when .gsd/ has tracked files (#1364)", (t) = test("migrateToExternalState cleans git index so tracked files don't show as deleted (#1364 path 2)", (t) => { const dir = makeTempRepo(); try { - // Track .gsd/ files, then untrack them so migration proceeds - mkdirSync(join(dir, ".gsd", "milestones", "M001"), { recursive: true }); - writeFileSync(join(dir, ".gsd", "PROJECT.md"), "# Project\n"); - writeFileSync(join(dir, ".gsd", "milestones", "M001", "PLAN.md"), "# Plan\n"); - git(dir, "add", ".gsd/"); + // Track .sf/ files, then untrack them so migration proceeds + mkdirSync(join(dir, ".sf", "milestones", "M001"), { recursive: true }); + writeFileSync(join(dir, ".sf", "PROJECT.md"), "# Project\n"); + writeFileSync(join(dir, ".sf", "milestones", "M001", "PLAN.md"), "# Plan\n"); + git(dir, "add", ".sf/"); git(dir, "commit", "-m", "track sf state"); - git(dir, "rm", "-r", "--cached", ".gsd/"); + git(dir, "rm", "-r", "--cached", ".sf/"); git(dir, "commit", "-m", "untrack sf (simulates pre-migration project)"); const result = migrateToExternalState(dir); diff --git a/src/resources/extensions/sf/tests/integration/headless-command.ts b/src/resources/extensions/sf/tests/integration/headless-command.ts index 212242edc..db49629e4 100644 --- a/src/resources/extensions/sf/tests/integration/headless-command.ts +++ b/src/resources/extensions/sf/tests/integration/headless-command.ts @@ -2,13 +2,13 @@ * Integration test for `sf headless` CLI subcommand * * Validates that the headless CLI entry point works end-to-end: - * 1. Creates a temp dir with a complete .gsd/ project fixture + * 1. Creates a temp dir with a complete .sf/ project fixture * 2. Initializes a git repo in the temp dir * 3. Spawns `node dist/loader.js headless --json next` as a child process * 4. Waits for the process to exit (with a 5-minute timeout) * 5. Validates exit code, JSONL stdout, stderr progress, and task artifact * - * Auth: Uses OAuth credentials from ~/.gsd/agent/auth.json (Claude Code Max). + * Auth: Uses OAuth credentials from ~/.sf/agent/auth.json (Claude Code Max). * Falls back to ANTHROPIC_API_KEY env var if OAuth is not configured (D013). * * Usage: @@ -29,7 +29,7 @@ const TIMEOUT_MS = parseInt(process.env.HEADLESS_TIMEOUT_MS ?? "300000", 10); // const DRY_RUN = process.argv.includes("--dry-run"); // ── Fixture Data ───────────────────────────────────────────────────────────── -// A complete .gsd/ project state that deriveState() can parse. +// A complete .sf/ project state that deriveState() can parse. // The trivial task asks the agent to create a single file — zero questions needed. const FIXTURE_PROJECT_MD = `# Project @@ -247,8 +247,8 @@ function createFixture(): string { execSync('git config user.email "test@test.com"', { cwd: tmpDir, stdio: "pipe" }); execSync('git config user.name "Test"', { cwd: tmpDir, stdio: "pipe" }); - // Create .gsd/ structure - const sfDir = join(tmpDir, ".gsd"); + // Create .sf/ structure + const sfDir = join(tmpDir, ".sf"); const milestonesDir = join(sfDir, "milestones"); const m001Dir = join(milestonesDir, "M001"); const slicesDir = join(m001Dir, "slices"); @@ -267,11 +267,11 @@ function createFixture(): string { // Add .gitignore for runtime files writeFileSync(join(tmpDir, ".gitignore"), [ - ".gsd/auto.lock", - ".gsd/completed-units.json", - ".gsd/metrics.json", - ".gsd/activity/", - ".gsd/runtime/", + ".sf/auto.lock", + ".sf/completed-units.json", + ".sf/metrics.json", + ".sf/activity/", + ".sf/runtime/", ].join("\n") + "\n"); // Initial commit so SF has a clean git state @@ -330,12 +330,12 @@ async function main(): Promise<void> { // Validate fixture structure const requiredFiles = [ - ".gsd/PROJECT.md", - ".gsd/STATE.md", - ".gsd/milestones/M001/M001-CONTEXT.md", - ".gsd/milestones/M001/M001-ROADMAP.md", - ".gsd/milestones/M001/slices/S01/S01-PLAN.md", - ".gsd/milestones/M001/slices/S01/tasks/T01-PLAN.md", + ".sf/PROJECT.md", + ".sf/STATE.md", + ".sf/milestones/M001/M001-CONTEXT.md", + ".sf/milestones/M001/M001-ROADMAP.md", + ".sf/milestones/M001/slices/S01/S01-PLAN.md", + ".sf/milestones/M001/slices/S01/tasks/T01-PLAN.md", ]; for (const file of requiredFiles) { @@ -351,9 +351,9 @@ async function main(): Promise<void> { // ── Step 2: Validate environment ──────────────────────────────────────── console.log("\n[2/6] Validating environment..."); - // Auth: prefer OAuth credentials from ~/.gsd/agent/auth.json (D013). + // Auth: prefer OAuth credentials from ~/.sf/agent/auth.json (D013). // Fall back to ANTHROPIC_API_KEY env var if present. - const authJsonPath = join(homedir(), ".gsd", "agent", "auth.json"); + const authJsonPath = join(homedir(), ".sf", "agent", "auth.json"); let hasOAuth = false; if (existsSync(authJsonPath)) { try { @@ -365,12 +365,12 @@ async function main(): Promise<void> { } if (hasOAuth) { - console.log(" OK OAuth credentials found in ~/.gsd/agent/auth.json (Claude Code Max)"); + console.log(" OK OAuth credentials found in ~/.sf/agent/auth.json (Claude Code Max)"); } else if (process.env.ANTHROPIC_API_KEY) { console.log(" OK ANTHROPIC_API_KEY present (env var fallback)"); } else { console.error(" FAIL: No auth available. Need either:"); - console.error(" - OAuth credentials in ~/.gsd/agent/auth.json (Claude Code Max)"); + console.error(" - OAuth credentials in ~/.sf/agent/auth.json (Claude Code Max)"); console.error(" - ANTHROPIC_API_KEY environment variable"); cleanup(fixtureDir); process.exit(1); diff --git a/src/resources/extensions/sf/tests/integration/idle-recovery.test.ts b/src/resources/extensions/sf/tests/integration/idle-recovery.test.ts index 13fc457f0..f2be258b7 100644 --- a/src/resources/extensions/sf/tests/integration/idle-recovery.test.ts +++ b/src/resources/extensions/sf/tests/integration/idle-recovery.test.ts @@ -13,7 +13,7 @@ import assert from 'node:assert/strict'; function createFixtureBase(): string { const base = mkdtempSync(join(tmpdir(), "sf-idle-recovery-test-")); - mkdirSync(join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks"), { recursive: true }); return base; } @@ -111,7 +111,7 @@ test('writeBlockerPlaceholder: creates directory if missing', () => { const base = mkdtempSync(join(tmpdir(), "sf-idle-recovery-test-")); try { // Only create milestone dir, not slice dir - mkdirSync(join(base, ".gsd", "milestones", "M001"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M001"), { recursive: true }); // resolveSlicePath needs the slice dir to exist to resolve, so this should return null const result = writeBlockerPlaceholder("research-slice", "M001/S01", base, "test reason"); // Since the slice dir doesn't exist, resolveExpectedArtifactPath returns null @@ -170,10 +170,10 @@ const ROADMAP_COMPLETE = `# M001: Test Milestone test('verifyExpectedArtifact: complete-slice — all artifacts present + roadmap marked [x] returns true', () => { const base = createFixtureBase(); try { - const sliceDir = join(base, ".gsd", "milestones", "M001", "slices", "S01"); + const sliceDir = join(base, ".sf", "milestones", "M001", "slices", "S01"); writeFileSync(join(sliceDir, "S01-SUMMARY.md"), "# Summary\n", "utf-8"); writeFileSync(join(sliceDir, "S01-UAT.md"), "# UAT\n", "utf-8"); - writeFileSync(join(base, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), ROADMAP_COMPLETE, "utf-8"); + writeFileSync(join(base, ".sf", "milestones", "M001", "M001-ROADMAP.md"), ROADMAP_COMPLETE, "utf-8"); const result = verifyExpectedArtifact("complete-slice", "M001/S01", base); assert.ok(result === true, "SUMMARY + UAT + roadmap [x] should verify as true"); } finally { @@ -184,10 +184,10 @@ test('verifyExpectedArtifact: complete-slice — all artifacts present + roadmap test('verifyExpectedArtifact: complete-slice — SUMMARY + UAT present but roadmap NOT marked [x] returns false', () => { const base = createFixtureBase(); try { - const sliceDir = join(base, ".gsd", "milestones", "M001", "slices", "S01"); + const sliceDir = join(base, ".sf", "milestones", "M001", "slices", "S01"); writeFileSync(join(sliceDir, "S01-SUMMARY.md"), "# Summary\n", "utf-8"); writeFileSync(join(sliceDir, "S01-UAT.md"), "# UAT\n", "utf-8"); - writeFileSync(join(base, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), ROADMAP_INCOMPLETE, "utf-8"); + writeFileSync(join(base, ".sf", "milestones", "M001", "M001-ROADMAP.md"), ROADMAP_INCOMPLETE, "utf-8"); const result = verifyExpectedArtifact("complete-slice", "M001/S01", base); assert.ok(result === false, "roadmap not marked [x] should return false (crash recovery scenario)"); } finally { @@ -198,10 +198,10 @@ test('verifyExpectedArtifact: complete-slice — SUMMARY + UAT present but roadm test('verifyExpectedArtifact: complete-slice — SUMMARY present but UAT missing returns false', () => { const base = createFixtureBase(); try { - const sliceDir = join(base, ".gsd", "milestones", "M001", "slices", "S01"); + const sliceDir = join(base, ".sf", "milestones", "M001", "slices", "S01"); writeFileSync(join(sliceDir, "S01-SUMMARY.md"), "# Summary\n", "utf-8"); // no UAT file - writeFileSync(join(base, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), ROADMAP_COMPLETE, "utf-8"); + writeFileSync(join(base, ".sf", "milestones", "M001", "M001-ROADMAP.md"), ROADMAP_COMPLETE, "utf-8"); const result = verifyExpectedArtifact("complete-slice", "M001/S01", base); assert.ok(result === false, "missing UAT should return false"); } finally { @@ -212,7 +212,7 @@ test('verifyExpectedArtifact: complete-slice — SUMMARY present but UAT missing test('verifyExpectedArtifact: complete-slice — no roadmap file present is lenient (returns true)', () => { const base = createFixtureBase(); try { - const sliceDir = join(base, ".gsd", "milestones", "M001", "slices", "S01"); + const sliceDir = join(base, ".sf", "milestones", "M001", "slices", "S01"); writeFileSync(join(sliceDir, "S01-SUMMARY.md"), "# Summary\n", "utf-8"); writeFileSync(join(sliceDir, "S01-UAT.md"), "# UAT\n", "utf-8"); // no roadmap file @@ -228,7 +228,7 @@ test('verifyExpectedArtifact: complete-slice — no roadmap file present is leni test('buildLoopRemediationSteps: execute-task returns concrete steps', () => { const base = mkdtempSync(join(tmpdir(), "sf-loop-remediation-test-")); try { - mkdirSync(join(base, ".gsd", "milestones", "M002", "slices", "S03", "tasks"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M002", "slices", "S03", "tasks"), { recursive: true }); const result = buildLoopRemediationSteps("execute-task", "M002/S03/T01", base); assert.ok(result !== null, "should return remediation steps"); assert.ok(result!.includes("sf undo-task"), "steps include undo-task command"); @@ -242,7 +242,7 @@ test('buildLoopRemediationSteps: execute-task returns concrete steps', () => { test('buildLoopRemediationSteps: plan-slice returns concrete steps', () => { const base = mkdtempSync(join(tmpdir(), "sf-loop-remediation-test-")); try { - mkdirSync(join(base, ".gsd", "milestones", "M001", "slices", "S01"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M001", "slices", "S01"), { recursive: true }); const result = buildLoopRemediationSteps("plan-slice", "M001/S01", base); assert.ok(result !== null, "should return remediation steps for plan-slice"); assert.ok(result!.includes("S01-PLAN.md"), "steps mention the slice plan file"); @@ -255,7 +255,7 @@ test('buildLoopRemediationSteps: plan-slice returns concrete steps', () => { test('buildLoopRemediationSteps: research-slice returns concrete steps', () => { const base = mkdtempSync(join(tmpdir(), "sf-loop-remediation-test-")); try { - mkdirSync(join(base, ".gsd", "milestones", "M001", "slices", "S01"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M001", "slices", "S01"), { recursive: true }); const result = buildLoopRemediationSteps("research-slice", "M001/S01", base); assert.ok(result !== null, "should return remediation steps for research-slice"); assert.ok(result!.includes("S01-RESEARCH.md"), "steps mention the slice research file"); @@ -301,9 +301,9 @@ test('writeBlockerPlaceholder: updates DB task status for execute-task (#2531)', const { openDatabase, closeDatabase, insertMilestone, insertSlice, insertTask, getTask, isDbAvailable } = await import("../../sf-db.ts"); - const dbPath = join(base, ".gsd", "sf.db"); + const dbPath = join(base, ".sf", "sf.db"); // Create the tasks directory (required for artifact path resolution) - mkdirSync(join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks"), { recursive: true }); openDatabase(dbPath); try { @@ -336,8 +336,8 @@ test('writeBlockerPlaceholder: does NOT update DB for non-execute-task types', a const { openDatabase, closeDatabase, insertMilestone, insertSlice, getSlice, isDbAvailable } = await import("../../sf-db.ts"); - const dbPath = join(base, ".gsd", "sf.db"); - mkdirSync(join(base, ".gsd", "milestones", "M001", "slices", "S01"), { recursive: true }); + const dbPath = join(base, ".sf", "sf.db"); + mkdirSync(join(base, ".sf", "milestones", "M001", "slices", "S01"), { recursive: true }); openDatabase(dbPath); try { @@ -364,8 +364,8 @@ test('writeBlockerPlaceholder: updates DB slice status for complete-slice (#2653 const { openDatabase, closeDatabase, insertMilestone, insertSlice, getSlice, isDbAvailable } = await import("../../sf-db.ts"); - const dbPath = join(base, ".gsd", "sf.db"); - mkdirSync(join(base, ".gsd", "milestones", "M001", "slices", "S01"), { recursive: true }); + const dbPath = join(base, ".sf", "sf.db"); + mkdirSync(join(base, ".sf", "milestones", "M001", "slices", "S01"), { recursive: true }); openDatabase(dbPath); try { diff --git a/src/resources/extensions/sf/tests/integration/inherited-repo-home-dir.test.ts b/src/resources/extensions/sf/tests/integration/inherited-repo-home-dir.test.ts index a88ef24a3..b5a30b010 100644 --- a/src/resources/extensions/sf/tests/integration/inherited-repo-home-dir.test.ts +++ b/src/resources/extensions/sf/tests/integration/inherited-repo-home-dir.test.ts @@ -2,10 +2,10 @@ * inherited-repo-home-dir.test.ts — Regression test for #2393. * * When the user's home directory IS a git repo (common with dotfile - * managers like yadm), isInheritedRepo() must not treat ~/.gsd (the - * global SF state directory) as a project .gsd belonging to the home + * managers like yadm), isInheritedRepo() must not treat ~/.sf (the + * global SF state directory) as a project .sf belonging to the home * repo. Without the fix, isInheritedRepo() returns false for project - * subdirectories because it sees ~/.gsd and concludes the parent repo + * subdirectories because it sees ~/.sf and concludes the parent repo * has already been initialised with SF — causing the wrong project * state to be loaded. */ @@ -50,15 +50,15 @@ describe("isInheritedRepo when git root is HOME (#2393)", () => { run("git", ["add", ".bashrc"], fakeHome); run("git", ["commit", "-m", "init dotfiles"], fakeHome); - // Create a plain ~/.gsd directory at fakeHome — this simulates the - // global SF home directory, NOT a project .gsd. - mkdirSync(join(fakeHome, ".gsd", "projects"), { recursive: true }); + // Create a plain ~/.sf directory at fakeHome — this simulates the + // global SF home directory, NOT a project .sf. + mkdirSync(join(fakeHome, ".sf", "projects"), { recursive: true }); - // Save and override env. Point SF_HOME at fakeHome/.gsd so the + // Save and override env. Point SF_HOME at fakeHome/.sf so the // function recognizes it as the global state directory. origGsdHome = process.env.SF_HOME; origGsdStateDir = process.env.SF_STATE_DIR; - process.env.SF_HOME = join(fakeHome, ".gsd"); + process.env.SF_HOME = join(fakeHome, ".sf"); stateDir = mkdtempSync(join(tmpdir(), "sf-state-")); process.env.SF_STATE_DIR = stateDir; }); @@ -73,37 +73,37 @@ describe("isInheritedRepo when git root is HOME (#2393)", () => { rmSync(stateDir, { recursive: true, force: true }); }); - test("subdirectory of home-as-git-root is detected as inherited even when ~/.gsd exists", () => { + test("subdirectory of home-as-git-root is detected as inherited even when ~/.sf exists", () => { // Create a project directory inside fake HOME const projectDir = join(fakeHome, "projects", "my-app"); mkdirSync(projectDir, { recursive: true }); - // The bug: isInheritedRepo sees ~/.gsd and returns false, thinking + // The bug: isInheritedRepo sees ~/.sf and returns false, thinking // the home repo is a legitimate SF project. It should return true - // because ~/.gsd is the global state dir, not a project .gsd. + // because ~/.sf is the global state dir, not a project .sf. assert.strictEqual( isInheritedRepo(projectDir), true, "project inside home-as-git-root must be detected as inherited repo, " + - "even when ~/.gsd (global state dir) exists", + "even when ~/.sf (global state dir) exists", ); }); - test("subdirectory with a real project .gsd symlink at git root is NOT inherited", () => { + test("subdirectory with a real project .sf symlink at git root is NOT inherited", () => { // Simulate a legitimately initialised SF project at the home repo root: - // .gsd is a symlink to an external state directory. + // .sf is a symlink to an external state directory. const externalState = join(stateDir, "projects", "home-project"); mkdirSync(externalState, { recursive: true }); - const sfDir = join(fakeHome, ".gsd"); + const sfDir = join(fakeHome, ".sf"); - // Remove the plain directory and replace with a symlink (real project .gsd) + // Remove the plain directory and replace with a symlink (real project .sf) rmSync(sfDir, { recursive: true, force: true }); symlinkSync(externalState, sfDir); const projectDir = join(fakeHome, "projects", "my-app"); mkdirSync(projectDir, { recursive: true }); - // When .gsd at root IS a project symlink, subdirectories are legitimate children + // When .sf at root IS a project symlink, subdirectories are legitimate children assert.strictEqual( isInheritedRepo(projectDir), false, @@ -120,7 +120,7 @@ describe("isInheritedRepo when git root is HOME (#2393)", () => { }); }); -describe("isInheritedRepo with stale .gsd at parent git root", () => { +describe("isInheritedRepo with stale .sf at parent git root", () => { let parentRepo: string; beforeEach(() => { @@ -137,20 +137,20 @@ describe("isInheritedRepo with stale .gsd at parent git root", () => { rmSync(parentRepo, { recursive: true, force: true }); }); - test("stale .gsd dir at parent git root does not suppress inherited detection", () => { - // Simulate a stale .gsd directory at the parent git root (e.g. from a + test("stale .sf dir at parent git root does not suppress inherited detection", () => { + // Simulate a stale .sf directory at the parent git root (e.g. from a // prior doctor run or accidental init). This is a real directory, NOT // a symlink, and NOT the global SF home. - mkdirSync(join(parentRepo, ".gsd"), { recursive: true }); + mkdirSync(join(parentRepo, ".sf"), { recursive: true }); const projectDir = join(parentRepo, "my-project"); mkdirSync(projectDir, { recursive: true }); - // Without fix: isProjectGsd(join(root, ".gsd")) returns true because - // the stale .gsd is a real directory that isn't the global SF home, + // Without fix: isProjectGsd(join(root, ".sf")) returns true because + // the stale .sf is a real directory that isn't the global SF home, // causing isInheritedRepo to return false (false negative). // - // The stale .gsd at parent is still treated as a "project .gsd" by + // The stale .sf at parent is still treated as a "project .sf" by // isProjectGsd(), so the git root check at line 128 returns false. // This is the expected behavior for that check — the defense-in-depth // fix in auto-start.ts handles this case by checking for local .git. @@ -159,31 +159,31 @@ describe("isInheritedRepo with stale .gsd at parent git root", () => { assert.strictEqual( isInheritedRepo(projectDir), false, - "stale .gsd dir at git root still causes isInheritedRepo to return false " + + "stale .sf dir at git root still causes isInheritedRepo to return false " + "(defense-in-depth in auto-start.ts handles this case)", ); }); - test("basePath's own .gsd symlink does not suppress inherited detection", () => { - // Create a project subdir with its own .gsd symlink (set up during + test("basePath's own .sf symlink does not suppress inherited detection", () => { + // Create a project subdir with its own .sf symlink (set up during // the discuss phase, before auto-mode bootstrap runs). const projectDir = join(parentRepo, "my-project"); mkdirSync(projectDir, { recursive: true }); const externalState = mkdtempSync(join(tmpdir(), "sf-ext-state-")); - symlinkSync(externalState, join(projectDir, ".gsd")); + symlinkSync(externalState, join(projectDir, ".sf")); // Before fix: the walk-up loop started at normalizedBase (projectDir), - // found .gsd at projectDir, and returned false — even though projectDir - // has no .git of its own. The .gsd at basePath is irrelevant to whether + // found .sf at projectDir, and returned false — even though projectDir + // has no .git of its own. The .sf at basePath is irrelevant to whether // the git repo is inherited from a parent. // // After fix: the walk-up starts at dirname(normalizedBase), skipping - // basePath's own .gsd. + // basePath's own .sf. assert.strictEqual( isInheritedRepo(projectDir), true, - "project's own .gsd symlink must not suppress inherited repo detection", + "project's own .sf symlink must not suppress inherited repo detection", ); rmSync(externalState, { recursive: true, force: true }); diff --git a/src/resources/extensions/sf/tests/integration/integration-lifecycle.test.ts b/src/resources/extensions/sf/tests/integration/integration-lifecycle.test.ts index dbd3da29c..7d425c45a 100644 --- a/src/resources/extensions/sf/tests/integration/integration-lifecycle.test.ts +++ b/src/resources/extensions/sf/tests/integration/integration-lifecycle.test.ts @@ -119,9 +119,9 @@ const ROADMAP_CONTENT = `# M001: Test Milestone\n\n**Vision:** Integration test // ═══════════════════════════════════════════════════════════════════════════ test('integration-lifecycle: full pipeline', async () => { - // ── Step 1: Set up temp dir with realistic .gsd/ structure ────────── + // ── Step 1: Set up temp dir with realistic .sf/ structure ────────── const base = mkdtempSync(join(tmpdir(), 'sf-int-lifecycle-')); - const sfDir = join(base, '.gsd'); + const sfDir = join(base, '.sf'); mkdirSync(sfDir, { recursive: true }); mkdirSync(join(sfDir, 'milestones', 'M001'), { recursive: true }); mkdirSync(join(sfDir, 'milestones', 'M002'), { recursive: true }); diff --git a/src/resources/extensions/sf/tests/integration/integration-mixed-milestones.test.ts b/src/resources/extensions/sf/tests/integration/integration-mixed-milestones.test.ts index aed33d496..033bbe4da 100644 --- a/src/resources/extensions/sf/tests/integration/integration-mixed-milestones.test.ts +++ b/src/resources/extensions/sf/tests/integration/integration-mixed-milestones.test.ts @@ -29,31 +29,31 @@ import assert from 'node:assert/strict'; function createFixtureBase(): string { const base = mkdtempSync(join(tmpdir(), 'sf-integration-mixed-')); - mkdirSync(join(base, '.gsd', 'milestones'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones'), { recursive: true }); return base; } function writeRoadmap(base: string, mid: string, content: string): void { - const dir = join(base, '.gsd', 'milestones', mid); + const dir = join(base, '.sf', 'milestones', mid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${mid}-ROADMAP.md`), content); } function writePlan(base: string, mid: string, sid: string, content: string): void { - const dir = join(base, '.gsd', 'milestones', mid, 'slices', sid); + const dir = join(base, '.sf', 'milestones', mid, 'slices', sid); mkdirSync(join(dir, 'tasks'), { recursive: true }); writeFileSync(join(dir, "tasks", "T01-PLAN.md"), "# T01 Plan\n"); writeFileSync(join(dir, `${sid}-PLAN.md`), content); } function writeMilestoneSummary(base: string, mid: string, content: string): void { - const dir = join(base, '.gsd', 'milestones', mid); + const dir = join(base, '.sf', 'milestones', mid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${mid}-SUMMARY.md`), content); } function writeMilestoneValidation(base: string, mid: string): void { - const dir = join(base, '.gsd', 'milestones', mid); + const dir = join(base, '.sf', 'milestones', mid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${mid}-VALIDATION.md`), `---\nverdict: pass\nremediation_round: 0\n---\n\n# Validation\nPassed.`); } @@ -68,7 +68,7 @@ function run(command: string, cwd: string): string { function createGitRepo(): string { const base = mkdtempSync(join(tmpdir(), 'sf-integration-git-')); - mkdirSync(join(base, '.gsd', 'milestones'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones'), { recursive: true }); run('git init -b main', base); run("git config user.name 'Integration Test'", base); run("git config user.email 'test@example.com'", base); @@ -338,7 +338,7 @@ test('Group 4: inlinePriorMilestoneSummary with mixed formats', async () => { const base = createFixtureBase(); try { // M001 — completed with summary - mkdirSync(join(base, '.gsd', 'milestones', 'M001'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones', 'M001'), { recursive: true }); writeMilestoneSummary(base, 'M001', `# M001: Legacy Feature Summary **Completed legacy feature** @@ -351,7 +351,7 @@ Built the legacy feature successfully. `); // M002-abc123 — active milestone (just needs directory to exist) - mkdirSync(join(base, '.gsd', 'milestones', 'M002-abc123'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones', 'M002-abc123'), { recursive: true }); const result = await inlinePriorMilestoneSummary('M002-abc123', base); diff --git a/src/resources/extensions/sf/tests/integration/integration-proof.test.ts b/src/resources/extensions/sf/tests/integration/integration-proof.test.ts index 6edf57360..1a29b10c7 100644 --- a/src/resources/extensions/sf/tests/integration/integration-proof.test.ts +++ b/src/resources/extensions/sf/tests/integration/integration-proof.test.ts @@ -111,14 +111,14 @@ function makeCtx(): { notifications: Array<{ message: string; level: string }>; } /** - * Create a temp directory with a realistic .gsd/ structure: + * Create a temp directory with a realistic .sf/ structure: * - M001-ROADMAP.md with one slice (S01, two tasks T01/T02) * - S01-PLAN.md with two task checkboxes * - REQUIREMENTS.md and DECISIONS.md stubs to keep doctor happy */ function createRealisticFixture(): string { const base = makeTempDir(); - const sfDir = join(base, ".gsd"); + const sfDir = join(base, ".sf"); const mDir = join(sfDir, "milestones", "M001"); const sliceDir = join(mDir, "slices", "S01"); const tasksDir = join(sliceDir, "tasks"); @@ -276,7 +276,7 @@ function makeCompleteSliceParams(): any { test("full lifecycle: migration through completion through doctor", async (t) => { const base = createRealisticFixture(); - const dbPath = join(base, ".gsd", "sf.db"); + const dbPath = join(base, ".sf", "sf.db"); t.after(() => { closeDatabase(); @@ -336,7 +336,7 @@ test("full lifecycle: migration through completion through doctor", async (t) => } // Verify plan checkboxes toggled - const planPath = join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-PLAN.md"); + const planPath = join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-PLAN.md"); const planAfterTasks = readFileSync(planPath, "utf-8"); assert.match(planAfterTasks, /\[x\]\s+\*\*T01:/, "T01 should be checked in plan"); assert.match(planAfterTasks, /\[x\]\s+\*\*T02:/, "T02 should be checked in plan"); @@ -357,7 +357,7 @@ test("full lifecycle: migration through completion through doctor", async (t) => } // Verify roadmap checkbox toggled - const roadmapPath = join(base, ".gsd", "milestones", "M001", "M001-ROADMAP.md"); + const roadmapPath = join(base, ".sf", "milestones", "M001", "M001-ROADMAP.md"); const roadmapAfter = readFileSync(roadmapPath, "utf-8"); assert.ok(roadmapAfter.includes("\u2705"), "S01 should be checked in roadmap (✅ emoji in table format)"); @@ -396,7 +396,7 @@ test("full lifecycle: migration through completion through doctor", async (t) => // ── (i) Rogue file detection (R008) ────────────────────────────── // Write a fake summary for a non-DB-tracked task T99 - const rogueDir = join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks"); + const rogueDir = join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks"); writeFileSync(join(rogueDir, "T99-SUMMARY.md"), "# Rogue Summary\n", "utf-8"); // Clear path cache so resolveTaskFile sees the newly written file @@ -415,7 +415,7 @@ test("full lifecycle: migration through completion through doctor", async (t) => test("recovery: DB loss → migrateFromMarkdown restores state, stale render detection", async (t) => { const base = createRealisticFixture(); - const dbPath = join(base, ".gsd", "sf.db"); + const dbPath = join(base, ".sf", "sf.db"); t.after(() => { closeDatabase(); @@ -503,7 +503,7 @@ test("recovery: DB loss → migrateFromMarkdown restores state, stale render det test("undo/reset: undo task and reset slice revert DB + markdown", async (t) => { const base = createRealisticFixture(); - const dbPath = join(base, ".gsd", "sf.db"); + const dbPath = join(base, ".sf", "sf.db"); t.after(() => { closeDatabase(); @@ -534,7 +534,7 @@ test("undo/reset: undo task and reset slice revert DB + markdown", async (t) => // T01 summary file should be deleted const t1SummaryPath = join( base, - ".gsd", + ".sf", "milestones", "M001", "slices", @@ -545,7 +545,7 @@ test("undo/reset: undo task and reset slice revert DB + markdown", async (t) => assert.equal(existsSync(t1SummaryPath), false, "T01 summary should be deleted after undo"); // Plan checkbox should be unchecked - const planPath = join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-PLAN.md"); + const planPath = join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-PLAN.md"); const planAfterUndo = readFileSync(planPath, "utf-8"); assert.match(planAfterUndo, /\[ \]\s+\*\*T01:/, "T01 should be unchecked in plan after undo"); @@ -581,7 +581,7 @@ test("undo/reset: undo task and reset slice revert DB + markdown", async (t) => assert.equal(existsSync(t1SummaryPath), false, "T01 summary should be deleted after reset"); const t2SummaryPath = join( base, - ".gsd", + ".sf", "milestones", "M001", "slices", @@ -594,7 +594,7 @@ test("undo/reset: undo task and reset slice revert DB + markdown", async (t) => // Slice summary and UAT should be deleted const sliceSummaryPath = join( base, - ".gsd", + ".sf", "milestones", "M001", "slices", @@ -603,7 +603,7 @@ test("undo/reset: undo task and reset slice revert DB + markdown", async (t) => ); const sliceUatPath = join( base, - ".gsd", + ".sf", "milestones", "M001", "slices", diff --git a/src/resources/extensions/sf/tests/integration/merge-cwd-restore.test.ts b/src/resources/extensions/sf/tests/integration/merge-cwd-restore.test.ts index ff1b0e979..d5d33b527 100644 --- a/src/resources/extensions/sf/tests/integration/merge-cwd-restore.test.ts +++ b/src/resources/extensions/sf/tests/integration/merge-cwd-restore.test.ts @@ -47,9 +47,9 @@ function createTempRepo(): string { run("git config user.email test@test.com", dir); run("git config user.name Test", dir); writeFileSync(join(dir, "README.md"), "# test\n"); - writeFileSync(join(dir, ".gitignore"), ".gsd/worktrees/\n"); - mkdirSync(join(dir, ".gsd"), { recursive: true }); - writeFileSync(join(dir, ".gsd", "STATE.md"), "# State\n"); + writeFileSync(join(dir, ".gitignore"), ".sf/worktrees/\n"); + mkdirSync(join(dir, ".sf"), { recursive: true }); + writeFileSync(join(dir, ".sf", "STATE.md"), "# State\n"); run("git add .", dir); run("git commit -m init", dir); return dir; @@ -137,9 +137,9 @@ describe("merge cwd restore (#2929)", () => { writeFileSync(join(repo, "dirty-from-m020.txt"), "should not be committed\n"); // Set up roadmap so mergeMilestoneToMain can find milestone metadata - mkdirSync(join(repo, ".gsd", "milestones", "M010"), { recursive: true }); + mkdirSync(join(repo, ".sf", "milestones", "M010"), { recursive: true }); writeFileSync( - join(repo, ".gsd", "milestones", "M010", "M010-ROADMAP.md"), + join(repo, ".sf", "milestones", "M010", "M010-ROADMAP.md"), makeRoadmap("M010", "First milestone"), ); diff --git a/src/resources/extensions/sf/tests/integration/migrate-command.test.ts b/src/resources/extensions/sf/tests/integration/migrate-command.test.ts index f00dc473f..35aa17ecb 100644 --- a/src/resources/extensions/sf/tests/integration/migrate-command.test.ts +++ b/src/resources/extensions/sf/tests/integration/migrate-command.test.ts @@ -314,7 +314,7 @@ test('Full pipeline: parse → transform → preview → write → deriveState', assert.ok(result.paths.length > 0, 'pipeline: files written'); // Key files exist - const sf = join(writeTarget, '.gsd'); + const sf = join(writeTarget, '.sf'); assert.ok(existsSync(join(sf, 'PROJECT.md')), 'pipeline: PROJECT.md written'); assert.ok(existsSync(join(sf, 'STATE.md')), 'pipeline: STATE.md written'); assert.ok(existsSync(join(sf, 'REQUIREMENTS.md')), 'pipeline: REQUIREMENTS.md written'); @@ -342,17 +342,17 @@ test('Full pipeline: parse → transform → preview → write → deriveState', } }); - // ─── Test 6: .gsd/ exists detection ──────────────────────────────────── + // ─── Test 6: .sf/ exists detection ──────────────────────────────────── -test('.gsd/ exists detection', () => { +test('.sf/ exists detection', () => { const base = mkdtempSync(join(tmpdir(), 'sf-cmd-exists-')); try { - // No .gsd/ yet - assert.ok(!existsSync(join(base, '.gsd')), 'exists-detection: .gsd absent initially'); + // No .sf/ yet + assert.ok(!existsSync(join(base, '.sf')), 'exists-detection: .sf absent initially'); - // Create .gsd/ - mkdirSync(join(base, '.gsd'), { recursive: true }); - assert.ok(existsSync(join(base, '.gsd')), 'exists-detection: .gsd detected after creation'); + // Create .sf/ + mkdirSync(join(base, '.sf'), { recursive: true }); + assert.ok(existsSync(join(base, '.sf')), 'exists-detection: .sf detected after creation'); } finally { rmSync(base, { recursive: true, force: true }); } diff --git a/src/resources/extensions/sf/tests/integration/milestone-transition-worktree.test.ts b/src/resources/extensions/sf/tests/integration/milestone-transition-worktree.test.ts index b5a6814ab..dc3ffff26 100644 --- a/src/resources/extensions/sf/tests/integration/milestone-transition-worktree.test.ts +++ b/src/resources/extensions/sf/tests/integration/milestone-transition-worktree.test.ts @@ -45,7 +45,7 @@ function createTempRepo(): string { } function createMilestoneArtifacts(dir: string, mid: string): void { - const msDir = join(dir, ".gsd", "milestones", mid); + const msDir = join(dir, ".sf", "milestones", mid); mkdirSync(msDir, { recursive: true }); writeFileSync(join(msDir, "CONTEXT.md"), `# ${mid} Context\n`); const roadmap = [ @@ -86,7 +86,7 @@ test("worktree swap on milestone transition: merge old, create new", () => { run("git commit -m \"feat(M001): add feature\"", wt1); // Phase 2: Simulate milestone transition — merge M001, exit worktree - const roadmapPath = join(tempDir, ".gsd", "milestones", "M001", "M001-ROADMAP.md"); + const roadmapPath = join(tempDir, ".sf", "milestones", "M001", "M001-ROADMAP.md"); const roadmapContent = readFileSync(roadmapPath, "utf-8"); mergeMilestoneToMain(tempDir, "M001", roadmapContent); diff --git a/src/resources/extensions/sf/tests/integration/parallel-merge.test.ts b/src/resources/extensions/sf/tests/integration/parallel-merge.test.ts index 23f0d3381..585bd529c 100644 --- a/src/resources/extensions/sf/tests/integration/parallel-merge.test.ts +++ b/src/resources/extensions/sf/tests/integration/parallel-merge.test.ts @@ -57,11 +57,11 @@ function createTempRepo(): string { run("git config user.email test@test.com", dir); run("git config user.name Test", dir); writeFileSync(join(dir, "README.md"), "# test\n"); - // Mirror production: .gsd/worktrees/ is gitignored so autoCommitDirtyState + // Mirror production: .sf/worktrees/ is gitignored so autoCommitDirtyState // doesn't pick up the worktrees directory as dirty state (#1127 fix). - writeFileSync(join(dir, ".gitignore"), ".gsd/worktrees/\n"); - mkdirSync(join(dir, ".gsd"), { recursive: true }); - writeFileSync(join(dir, ".gsd", "STATE.md"), "# State\n"); + writeFileSync(join(dir, ".gitignore"), ".sf/worktrees/\n"); + mkdirSync(join(dir, ".sf"), { recursive: true }); + writeFileSync(join(dir, ".sf", "STATE.md"), "# State\n"); run("git add .", dir); run("git commit -m init", dir); return dir; @@ -85,9 +85,9 @@ function cleanup(dir: string): void { try { rmSync(dir, { recursive: true, force: true }); } catch { /* */ } } -/** Set up a milestone roadmap file in .gsd/milestones/<MID>/ */ +/** Set up a milestone roadmap file in .sf/milestones/<MID>/ */ function setupRoadmap(repo: string, mid: string, title: string, slices: string[]): void { - const dir = join(repo, ".gsd", "milestones", mid); + const dir = join(repo, ".sf", "milestones", mid); mkdirSync(dir, { recursive: true }); const sliceLines = slices.map(s => `- [x] **${s}**`).join("\n"); writeFileSync( @@ -235,7 +235,7 @@ test("formatMergeResults — mixed results", () => { test("mergeCompletedMilestone — missing roadmap returns error result", async () => { const base = join(tmpdir(), `parallel-merge-noroadmap-${Date.now()}`); - mkdirSync(join(base, ".gsd"), { recursive: true }); + mkdirSync(join(base, ".sf"), { recursive: true }); try { const result = await mergeCompletedMilestone(base, "M999"); assert.equal(result.success, false); @@ -269,7 +269,7 @@ test("mergeCompletedMilestone — clean merge, session status cleaned up", async cost: 1.5, lastHeartbeat: Date.now(), startedAt: Date.now() - 60000, - worktreePath: join(repo, ".gsd", "worktrees", "M010"), + worktreePath: join(repo, ".sf", "worktrees", "M010"), }); // Verify session status exists before merge @@ -478,7 +478,7 @@ test("mergeAllCompleted — by-completion order respects startedAt", async () => /** Set up a worktree DB with a milestone marked complete */ function setupWorktreeDb(basePath: string, mid: string): void { - const wtGsdDir = join(basePath, ".gsd", "worktrees", mid, ".gsd"); + const wtGsdDir = join(basePath, ".sf", "worktrees", mid, ".sf"); mkdirSync(wtGsdDir, { recursive: true }); const dbPath = join(wtGsdDir, "sf.db"); openDatabase(dbPath); diff --git a/src/resources/extensions/sf/tests/integration/parallel-workers-multi-milestone-e2e.test.ts b/src/resources/extensions/sf/tests/integration/parallel-workers-multi-milestone-e2e.test.ts index a18b09b17..29c3bfc1b 100644 --- a/src/resources/extensions/sf/tests/integration/parallel-workers-multi-milestone-e2e.test.ts +++ b/src/resources/extensions/sf/tests/integration/parallel-workers-multi-milestone-e2e.test.ts @@ -48,16 +48,16 @@ import { function createFixtureBase(): string { const base = mkdtempSync(join(tmpdir(), 'sf-e2e-parallel-')); - mkdirSync(join(base, '.gsd', 'milestones'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones'), { recursive: true }); return base; } function writeMetricsLedger(base: string, ledger: MetricsLedger): void { - writeFileSync(join(base, '.gsd', 'metrics.json'), JSON.stringify(ledger, null, 2)); + writeFileSync(join(base, '.sf', 'metrics.json'), JSON.stringify(ledger, null, 2)); } function readMetricsLedger(base: string): MetricsLedger { - return JSON.parse(readFileSync(join(base, '.gsd', 'metrics.json'), 'utf-8')); + return JSON.parse(readFileSync(join(base, '.sf', 'metrics.json'), 'utf-8')); } function makeUnit(overrides: Partial<UnitMetrics> = {}): UnitMetrics { @@ -89,8 +89,8 @@ test('E2E: Parallel workers across milestones', () => { const base = createFixtureBase(); // Create milestone directories - mkdirSync(join(base, '.gsd', 'milestones', 'M001'), { recursive: true }); - mkdirSync(join(base, '.gsd', 'milestones', 'M002'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones', 'M001'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones', 'M002'), { recursive: true }); // Simulate M001 parallel workers (batch 1) const batch1Id = "batch-m001"; diff --git a/src/resources/extensions/sf/tests/integration/paths.test.ts b/src/resources/extensions/sf/tests/integration/paths.test.ts index ff897412d..a3a3f5311 100644 --- a/src/resources/extensions/sf/tests/integration/paths.test.ts +++ b/src/resources/extensions/sf/tests/integration/paths.test.ts @@ -22,44 +22,44 @@ function initGit(dir: string): void { } describe('paths', () => { - test('Case 1: .gsd exists at basePath — fast path', () => { + test('Case 1: .sf exists at basePath — fast path', () => { const root = tmp(); try { - mkdirSync(join(root, ".gsd")); + mkdirSync(join(root, ".sf")); _clearGsdRootCache(); const result = sfRoot(root); - assert.deepStrictEqual(result, join(root, ".gsd"), "fast path: returns basePath/.gsd"); + assert.deepStrictEqual(result, join(root, ".sf"), "fast path: returns basePath/.sf"); } finally { cleanup(root); } }); - test('Case 2: .gsd exists at git root, cwd is a subdirectory', () => { + test('Case 2: .sf exists at git root, cwd is a subdirectory', () => { const root = tmp(); try { initGit(root); - mkdirSync(join(root, ".gsd")); + mkdirSync(join(root, ".sf")); const sub = join(root, "src", "deep"); mkdirSync(sub, { recursive: true }); _clearGsdRootCache(); const result = sfRoot(sub); - assert.deepStrictEqual(result, join(root, ".gsd"), "git-root probe: finds .gsd at git root from subdirectory"); + assert.deepStrictEqual(result, join(root, ".sf"), "git-root probe: finds .sf at git root from subdirectory"); } finally { cleanup(root); } }); - test('Case 3: .gsd in an ancestor — walk-up finds it', () => { + test('Case 3: .sf in an ancestor — walk-up finds it', () => { const root = tmp(); try { initGit(root); const project = join(root, "project"); - mkdirSync(join(project, ".gsd"), { recursive: true }); + mkdirSync(join(project, ".sf"), { recursive: true }); const deep = join(project, "src", "deep"); mkdirSync(deep, { recursive: true }); _clearGsdRootCache(); const result = sfRoot(deep); - assert.deepStrictEqual(result, join(project, ".gsd"), "walk-up: finds .gsd in ancestor when git root has none"); + assert.deepStrictEqual(result, join(project, ".sf"), "walk-up: finds .sf in ancestor when git root has none"); } finally { cleanup(root); } }); - test('Case 4: .gsd nowhere — fallback returns original basePath/.gsd', () => { + test('Case 4: .sf nowhere — fallback returns original basePath/.sf', () => { const root = tmp(); try { initGit(root); @@ -67,14 +67,14 @@ describe('paths', () => { mkdirSync(sub, { recursive: true }); _clearGsdRootCache(); const result = sfRoot(sub); - assert.deepStrictEqual(result, join(sub, ".gsd"), "fallback: returns basePath/.gsd when .gsd not found anywhere"); + assert.deepStrictEqual(result, join(sub, ".sf"), "fallback: returns basePath/.sf when .sf not found anywhere"); } finally { cleanup(root); } }); test('Case 5: cache — second call returns same value without re-probing', () => { const root = tmp(); try { - mkdirSync(join(root, ".gsd")); + mkdirSync(join(root, ".sf")); _clearGsdRootCache(); const first = sfRoot(root); const second = sfRoot(root); @@ -83,16 +83,16 @@ describe('paths', () => { } finally { cleanup(root); } }); - test('Case 6: .gsd at basePath takes precedence over ancestor .gsd', () => { + test('Case 6: .sf at basePath takes precedence over ancestor .sf', () => { const outer = tmp(); try { initGit(outer); - mkdirSync(join(outer, ".gsd")); + mkdirSync(join(outer, ".sf")); const inner = join(outer, "nested"); - mkdirSync(join(inner, ".gsd"), { recursive: true }); + mkdirSync(join(inner, ".sf"), { recursive: true }); _clearGsdRootCache(); const result = sfRoot(inner); - assert.deepStrictEqual(result, join(inner, ".gsd"), "precedence: nearest .gsd wins over ancestor"); + assert.deepStrictEqual(result, join(inner, ".sf"), "precedence: nearest .sf wins over ancestor"); } finally { cleanup(outer); } }); }); diff --git a/src/resources/extensions/sf/tests/integration/queue-completed-milestone-perf.test.ts b/src/resources/extensions/sf/tests/integration/queue-completed-milestone-perf.test.ts index 49c2ecb83..6b1cd58ab 100644 --- a/src/resources/extensions/sf/tests/integration/queue-completed-milestone-perf.test.ts +++ b/src/resources/extensions/sf/tests/integration/queue-completed-milestone-perf.test.ts @@ -24,7 +24,7 @@ const { assertTrue, assertEq, report } = createTestContext(); // ─── Fixture: project with many completed milestones ───────────────────── const tmpBase = mkdtempSync(join(tmpdir(), "sf-queue-perf-")); -const sf = join(tmpBase, ".gsd"); +const sf = join(tmpBase, ".sf"); mkdirSync(join(sf, "milestones"), { recursive: true }); const COMPLETED_COUNT = 25; diff --git a/src/resources/extensions/sf/tests/integration/queue-reorder-e2e.test.ts b/src/resources/extensions/sf/tests/integration/queue-reorder-e2e.test.ts index 12819ab1f..df38e92ba 100644 --- a/src/resources/extensions/sf/tests/integration/queue-reorder-e2e.test.ts +++ b/src/resources/extensions/sf/tests/integration/queue-reorder-e2e.test.ts @@ -25,7 +25,7 @@ import { parseContextDependsOn } from '../../files.ts'; function createFixtureBase(): string { const base = mkdtempSync(join(tmpdir(), 'sf-reorder-e2e-')); - mkdirSync(join(base, '.gsd', 'milestones'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones'), { recursive: true }); return base; } @@ -34,18 +34,18 @@ function cleanup(base: string): void { } function writeMilestoneDir(base: string, mid: string): void { - mkdirSync(join(base, '.gsd', 'milestones', mid), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones', mid), { recursive: true }); } function writeContext(base: string, mid: string, frontmatter: string, body: string = ''): void { - const dir = join(base, '.gsd', 'milestones', mid); + const dir = join(base, '.sf', 'milestones', mid); mkdirSync(dir, { recursive: true }); const fm = frontmatter ? `---\n${frontmatter}\n---\n\n` : ''; writeFileSync(join(dir, `${mid}-CONTEXT.md`), `${fm}# ${mid}: Test\n\n${body}`); } function writeCompleteMilestone(base: string, mid: string): void { - const dir = join(base, '.gsd', 'milestones', mid); + const dir = join(base, '.sf', 'milestones', mid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${mid}-ROADMAP.md`), `# ${mid}: Complete @@ -61,7 +61,7 @@ function writeCompleteMilestone(base: string, mid: string): void { } function readContextFile(base: string, mid: string): string { - return readFileSync(join(base, '.gsd', 'milestones', mid, `${mid}-CONTEXT.md`), 'utf-8'); + return readFileSync(join(base, '.sf', 'milestones', mid, `${mid}-CONTEXT.md`), 'utf-8'); } // ═══════════════════════════════════════════════════════════════════════════ @@ -126,7 +126,7 @@ test('E2E: reorder with depends_on removal', async () => { // Remove depends_on from M008-CONTEXT.md (simulating what handleQueueReorder does) const contextContent = readContextFile(base, 'M008'); const newContent = contextContent.replace(/---\ndepends_on: \[M009\]\n---\n\n/, ''); - writeFileSync(join(base, '.gsd', 'milestones', 'M008', 'M008-CONTEXT.md'), newContent); + writeFileSync(join(base, '.sf', 'milestones', 'M008', 'M008-CONTEXT.md'), newContent); // Verify: depends_on is gone const updatedContent = readContextFile(base, 'M008'); @@ -244,8 +244,8 @@ test('E2E: non-milestone directories filtered from findMilestoneIds (#1494)', () writeContext(base, 'M001', '', 'First'); writeContext(base, 'M002', '', 'Second'); // Create a rogue non-milestone directory - mkdirSync(join(base, '.gsd', 'milestones', 'slices'), { recursive: true }); - mkdirSync(join(base, '.gsd', 'milestones', 'temp-backup'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones', 'slices'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones', 'temp-backup'), { recursive: true }); invalidateStateCache(); const ids = findMilestoneIds(base); @@ -279,7 +279,7 @@ test('E2E: depends_on inline format preserved after partial removal', () => { // Simulate removing only M009 dep (keep M010) const content = readContextFile(base, 'M008'); const updated = content.replace('depends_on: [M009, M010]', 'depends_on: [M010]'); - writeFileSync(join(base, '.gsd', 'milestones', 'M008', 'M008-CONTEXT.md'), updated); + writeFileSync(join(base, '.sf', 'milestones', 'M008', 'M008-CONTEXT.md'), updated); // Verify only M010 remains const contentAfter = readContextFile(base, 'M008'); @@ -299,7 +299,7 @@ test('E2E: DB-backed path respects queue order (#2556)', async () => { const base = createFixtureBase(); try { const { openDatabase, closeDatabase, insertMilestone, isDbAvailable } = await import('../../sf-db.ts'); - const dbPath = join(base, '.gsd', 'sf.db'); + const dbPath = join(base, '.sf', 'sf.db'); // Create milestone directories (required for findMilestoneIds) writeMilestoneDir(base, 'M006'); diff --git a/src/resources/extensions/sf/tests/integration/quick-branch-lifecycle.test.ts b/src/resources/extensions/sf/tests/integration/quick-branch-lifecycle.test.ts index 5acae7f38..574b8b8ad 100644 --- a/src/resources/extensions/sf/tests/integration/quick-branch-lifecycle.test.ts +++ b/src/resources/extensions/sf/tests/integration/quick-branch-lifecycle.test.ts @@ -26,8 +26,8 @@ function createTestRepo(): string { run("git init -b main", repo); run(`git config user.name "SF Test"`, repo); run(`git config user.email "test@sf.dev"`, repo); - mkdirSync(join(repo, ".gsd", "runtime"), { recursive: true }); - mkdirSync(join(repo, ".gsd", "milestones", "M001"), { recursive: true }); + mkdirSync(join(repo, ".sf", "runtime"), { recursive: true }); + mkdirSync(join(repo, ".sf", "milestones", "M001"), { recursive: true }); writeFileSync(join(repo, "README.md"), "init\n"); run("git add -A", repo); run(`git commit -m "init"`, repo); @@ -129,7 +129,7 @@ test('cleanupQuickBranch: merges back and cleans up (same session)', async () => slug: "fix-typo", description: "fix typo", }; - const runtimeDir = join(repo, ".gsd", "runtime"); + const runtimeDir = join(repo, ".sf", "runtime"); mkdirSync(runtimeDir, { recursive: true }); writeFileSync(join(runtimeDir, "quick-return.json"), JSON.stringify(returnState) + "\n"); @@ -174,7 +174,7 @@ test('cleanupQuickBranch: recovers from disk state (cross-session)', async () => run(`git commit -m "add-docs"`, repo); // Write disk state manually (simulates what handleQuick would persist) - const runtimeDir = join(repo, ".gsd", "runtime"); + const runtimeDir = join(repo, ".sf", "runtime"); mkdirSync(runtimeDir, { recursive: true }); writeFileSync(join(runtimeDir, "quick-return.json"), JSON.stringify({ basePath: repo, diff --git a/src/resources/extensions/sf/tests/integration/run-uat.test.ts b/src/resources/extensions/sf/tests/integration/run-uat.test.ts index 162fd898d..6f956e8d2 100644 --- a/src/resources/extensions/sf/tests/integration/run-uat.test.ts +++ b/src/resources/extensions/sf/tests/integration/run-uat.test.ts @@ -27,7 +27,7 @@ function loadPromptFromWorktree(name: string, vars: Record<string, string> = {}) function createFixtureBase(): string { const base = mkdtempSync(join(tmpdir(), 'sf-run-uat-test-')); - mkdirSync(join(base, '.gsd', 'milestones'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones'), { recursive: true }); return base; } @@ -38,7 +38,7 @@ function writeSliceFile( suffix: string, content: string, ): void { - const dir = join(base, '.gsd', 'milestones', mid, 'slices', sid); + const dir = join(base, '.sf', 'milestones', mid, 'slices', sid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${sid}-${suffix}.md`), content); } @@ -170,8 +170,8 @@ test('(j) case sensitivity', () => { test('(k) run-uat prompt template', () => { const milestoneId = 'M001'; const sliceId = 'S01'; - const uatPath = '.gsd/milestones/M001/slices/S01/S01-UAT.md'; - const uatResultPath = '.gsd/milestones/M001/slices/S01/S01-ASSESSMENT.md'; + const uatPath = '.sf/milestones/M001/slices/S01/S01-UAT.md'; + const uatResultPath = '.sf/milestones/M001/slices/S01/S01-ASSESSMENT.md'; const uatType = 'live-runtime'; const inlinedContext = '<!-- no context -->'; let promptResult: string | undefined; @@ -233,8 +233,8 @@ test('(k2) run-uat prompt references sf_summary_save, not direct write', () => { workingDirectory: '/tmp/test-project', milestoneId: 'M001', sliceId: 'S01', - uatPath: '.gsd/milestones/M001/slices/S01/S01-UAT.md', - uatResultPath: '.gsd/milestones/M001/slices/S01/S01-UAT.md', + uatPath: '.sf/milestones/M001/slices/S01/S01-UAT.md', + uatResultPath: '.sf/milestones/M001/slices/S01/S01-UAT.md', uatType: 'artifact-driven', inlinedContext: '<!-- no context -->', }); @@ -306,7 +306,7 @@ test('test block at line 307', () => { test('(m) non-artifact UAT skip', async () => { const base = createFixtureBase(); try { - const roadmapDir = join(base, '.gsd', 'milestones', 'M001'); + const roadmapDir = join(base, '.sf', 'milestones', 'M001'); mkdirSync(roadmapDir, { recursive: true }); writeFileSync( join(roadmapDir, 'M001-ROADMAP.md'), @@ -393,8 +393,8 @@ test('(p) run-uat prompt allows PASS when human-only checks remain as NEEDS-HUMA workingDirectory: '/tmp/test-project', milestoneId: 'M001', sliceId: 'S01', - uatPath: '.gsd/milestones/M001/slices/S01/S01-UAT.md', - uatResultPath: '.gsd/milestones/M001/slices/S01/S01-UAT.md', + uatPath: '.sf/milestones/M001/slices/S01/S01-UAT.md', + uatResultPath: '.sf/milestones/M001/slices/S01/S01-UAT.md', uatType: 'mixed', inlinedContext: '<!-- no context -->', }); @@ -418,7 +418,7 @@ test('(p) run-uat prompt allows PASS when human-only checks remain as NEEDS-HUMA test('(n) stale replay guard', async () => { const base = createFixtureBase(); try { - const roadmapDir = join(base, '.gsd', 'milestones', 'M001'); + const roadmapDir = join(base, '.sf', 'milestones', 'M001'); mkdirSync(roadmapDir, { recursive: true }); writeFileSync( join(roadmapDir, 'M001-ROADMAP.md'), @@ -466,7 +466,7 @@ test('(q) verdict in ASSESSMENT file skips UAT dispatch (file-based path)', asyn // but checkNeedsRunUat only checked S{sid}-UAT.md — causing a stuck loop. const base = createFixtureBase(); try { - const roadmapDir = join(base, '.gsd', 'milestones', 'M001'); + const roadmapDir = join(base, '.sf', 'milestones', 'M001'); mkdirSync(roadmapDir, { recursive: true }); writeFileSync( join(roadmapDir, 'M001-ROADMAP.md'), @@ -515,7 +515,7 @@ test('(r) no ASSESSMENT file still dispatches UAT (no false skip)', async () => // normally. The ASSESSMENT check must not cause a false-negative skip. const base = createFixtureBase(); try { - const roadmapDir = join(base, '.gsd', 'milestones', 'M001'); + const roadmapDir = join(base, '.sf', 'milestones', 'M001'); mkdirSync(roadmapDir, { recursive: true }); writeFileSync( join(roadmapDir, 'M001-ROADMAP.md'), @@ -562,7 +562,7 @@ test('(s) ASSESSMENT without verdict does not skip UAT dispatch', async () => { // NOT suppress UAT dispatch — only a file with an actual verdict should. const base = createFixtureBase(); try { - const roadmapDir = join(base, '.gsd', 'milestones', 'M001'); + const roadmapDir = join(base, '.sf', 'milestones', 'M001'); mkdirSync(roadmapDir, { recursive: true }); writeFileSync( join(roadmapDir, 'M001-ROADMAP.md'), diff --git a/src/resources/extensions/sf/tests/integration/state-machine-edge-cases.test.ts b/src/resources/extensions/sf/tests/integration/state-machine-edge-cases.test.ts index 9492134ac..23de467ed 100644 --- a/src/resources/extensions/sf/tests/integration/state-machine-edge-cases.test.ts +++ b/src/resources/extensions/sf/tests/integration/state-machine-edge-cases.test.ts @@ -94,12 +94,12 @@ function makeTempDir(): string { } /** - * Create a standard .gsd/ fixture with M001 containing S01 (2 tasks) and S02 (1 task). + * Create a standard .sf/ fixture with M001 containing S01 (2 tasks) and S02 (1 task). * Same structure as state-machine-live-validation.test.ts for consistency. */ function createFullFixture(): string { const base = makeTempDir(); - const sfDir = join(base, ".gsd"); + const sfDir = join(base, ".sf"); const m001Dir = join(sfDir, "milestones", "M001"); const s01Dir = join(m001Dir, "slices", "S01"); const s01Tasks = join(s01Dir, "tasks"); @@ -194,7 +194,7 @@ function createFullFixture(): string { */ function createMultiMilestoneFixture(): string { const base = makeTempDir(); - const sfDir = join(base, ".gsd"); + const sfDir = join(base, ".sf"); for (const mid of ["M001", "M002", "M003"]) { const mDir = join(sfDir, "milestones", mid); @@ -296,7 +296,7 @@ describe("state derivation failures", () => { test("file deleted between deriveState calls produces consistent result", async () => { // Simulates race condition: PLAN file exists on first derive, deleted before second base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", title: "First", status: "in_progress" }); insertTask({ id: "T01", sliceId: "S01", milestoneId: "M001", status: "pending" }); @@ -306,7 +306,7 @@ describe("state derivation failures", () => { assert.equal(stateBefore.phase, "executing"); // Delete the task plan file mid-flow - const planPath = join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks", "T01-PLAN.md"); + const planPath = join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks", "T01-PLAN.md"); if (existsSync(planPath)) unlinkSync(planPath); invalidateAllCaches(); @@ -321,11 +321,11 @@ describe("state derivation failures", () => { test("partial DB write: milestone inserted but no slices → pre-planning", async () => { base = makeTempDir(); - const mDir = join(base, ".gsd", "milestones", "M001"); + const mDir = join(base, ".sf", "milestones", "M001"); mkdirSync(mDir, { recursive: true }); writeFileSync(join(mDir, "M001-CONTEXT.md"), "# M001: Test\n\n## Purpose\nTest.\n"); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); // Only insert milestone — no slices, no roadmap insertMilestone({ id: "M001", title: "Partial", status: "active" }); @@ -338,7 +338,7 @@ describe("state derivation failures", () => { test("cache staleness: derive within TTL returns same result after DB mutation", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", title: "First", status: "in_progress" }); insertTask({ id: "T01", sliceId: "S01", milestoneId: "M001", status: "pending" }); @@ -363,13 +363,13 @@ describe("state derivation failures", () => { test("corrupt ROADMAP: binary content does not crash deriveState", async () => { base = makeTempDir(); - const mDir = join(base, ".gsd", "milestones", "M001"); + const mDir = join(base, ".sf", "milestones", "M001"); mkdirSync(mDir, { recursive: true }); writeFileSync(join(mDir, "M001-CONTEXT.md"), "# M001: Corrupt\n\n## Purpose\nTest.\n"); // Write binary garbage as ROADMAP writeFileSync(join(mDir, "M001-ROADMAP.md"), Buffer.from([0x00, 0xFF, 0xFE, 0x89, 0x50, 0x4E, 0x47])); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Corrupt", status: "active" }); invalidateAllCaches(); @@ -380,12 +380,12 @@ describe("state derivation failures", () => { test("0-byte ROADMAP file is treated as no roadmap (pre-planning)", async () => { base = makeTempDir(); - const mDir = join(base, ".gsd", "milestones", "M001"); + const mDir = join(base, ".sf", "milestones", "M001"); mkdirSync(mDir, { recursive: true }); writeFileSync(join(mDir, "M001-CONTEXT.md"), "# M001: Empty\n\n## Purpose\nTest.\n"); writeFileSync(join(mDir, "M001-ROADMAP.md"), ""); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Empty", status: "active" }); invalidateAllCaches(); @@ -395,7 +395,7 @@ describe("state derivation failures", () => { test("ROADMAP with no ## Slices section derives pre-planning", async () => { base = makeTempDir(); - const mDir = join(base, ".gsd", "milestones", "M001"); + const mDir = join(base, ".sf", "milestones", "M001"); mkdirSync(mDir, { recursive: true }); writeFileSync(join(mDir, "M001-CONTEXT.md"), "# M001: No Slices\n\n## Purpose\nTest.\n"); writeFileSync( @@ -418,7 +418,7 @@ describe("state derivation failures", () => { ].join("\n"), ); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "No Slices", status: "active" }); invalidateAllCaches(); @@ -468,13 +468,13 @@ describe("transition boundary failures", () => { test("mid-transition: CONTEXT.md created between derives transitions needs-discussion → pre-planning correctly", async () => { base = makeTempDir(); - const mDir = join(base, ".gsd", "milestones", "M001"); + const mDir = join(base, ".sf", "milestones", "M001"); mkdirSync(mDir, { recursive: true }); // Start with only CONTEXT-DRAFT → needs-discussion writeFileSync(join(mDir, "M001-CONTEXT-DRAFT.md"), "# Draft\nSome draft.\n"); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); invalidateAllCaches(); const state1 = await deriveState(base); assert.equal(state1.phase, "needs-discussion"); @@ -490,7 +490,7 @@ describe("transition boundary failures", () => { test("cascading slice dependencies: S02 depends S01, S03 depends S02 — only S01 eligible", async () => { base = makeTempDir(); - const mDir = join(base, ".gsd", "milestones", "M001"); + const mDir = join(base, ".sf", "milestones", "M001"); // Create 3 slices with chain deps for (const sid of ["S01", "S02", "S03"]) { @@ -546,7 +546,7 @@ describe("transition boundary failures", () => { ].join("\n"), ); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Chain", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", title: "Base", status: "pending", depends: [] }); insertSlice({ id: "S02", milestoneId: "M001", title: "Middle", status: "pending", depends: ["S01"] }); @@ -565,7 +565,7 @@ describe("transition boundary failures", () => { test("cascading deps: completing S01 unblocks S02 (not S03)", async () => { base = makeTempDir(); - const mDir = join(base, ".gsd", "milestones", "M001"); + const mDir = join(base, ".sf", "milestones", "M001"); for (const sid of ["S01", "S02", "S03"]) { const sDir = join(mDir, "slices", sid, "tasks"); mkdirSync(sDir, { recursive: true }); @@ -614,7 +614,7 @@ describe("transition boundary failures", () => { ].join("\n"), ); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Chain", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", title: "Base", status: "complete", depends: [] }); insertSlice({ id: "S02", milestoneId: "M001", title: "Middle", status: "pending", depends: ["S01"] }); @@ -633,7 +633,7 @@ describe("transition boundary failures", () => { test("multi-milestone deps: M002 depends M001, M003 depends M002 — blocked correctly", async () => { base = createMultiMilestoneFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "First", status: "active" }); insertMilestone({ id: "M002", title: "Second", status: "active", depends_on: ["M001"] }); insertMilestone({ id: "M003", title: "Third", status: "active", depends_on: ["M002"] }); @@ -654,7 +654,7 @@ describe("transition boundary failures", () => { test("blocker_discovered in task transitions to replanning-slice", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", title: "First", status: "in_progress" }); insertTask({ id: "T01", sliceId: "S01", milestoneId: "M001", status: "complete", blockerDiscovered: true }); @@ -668,7 +668,7 @@ describe("transition boundary failures", () => { test("replan loop protection: replan already done skips replanning-slice", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", title: "First", status: "in_progress" }); insertTask({ id: "T01", sliceId: "S01", milestoneId: "M001", status: "complete", blockerDiscovered: true }); @@ -693,7 +693,7 @@ describe("transition boundary failures", () => { test("blocked state: all slices have unmet deps → fallback picks slice", async () => { base = makeTempDir(); - const mDir = join(base, ".gsd", "milestones", "M001"); + const mDir = join(base, ".sf", "milestones", "M001"); mkdirSync(join(mDir, "slices", "S01", "tasks"), { recursive: true }); mkdirSync(join(mDir, "slices", "S02", "tasks"), { recursive: true }); @@ -726,7 +726,7 @@ describe("transition boundary failures", () => { ].join("\n"), ); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Blocked", status: "active" }); // Circular deps: S01→S02 and S02→S01 — both blocked insertSlice({ id: "S01", milestoneId: "M001", title: "A", status: "pending", depends: ["S02"] }); @@ -756,7 +756,7 @@ describe("dispatch failure modes", () => { test("dispatch with null activeSlice in executing phase → stop (error)", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); const ctx = buildDispatchCtx(base, "M001", { @@ -773,7 +773,7 @@ describe("dispatch failure modes", () => { test("dispatch for unhandled phase → stop with diagnostic", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); const ctx = buildDispatchCtx(base, "M001", { phase: "paused" as any, @@ -787,7 +787,7 @@ describe("dispatch failure modes", () => { test("dispatch: summarizing with null activeSlice → stop (error)", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); const ctx = buildDispatchCtx(base, "M001", { phase: "summarizing", @@ -805,7 +805,7 @@ describe("dispatch failure modes", () => { test("dispatch: evaluating-gates without gate config → skip (gates omitted)", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", title: "First", status: "in_progress" }); @@ -826,7 +826,7 @@ describe("dispatch failure modes", () => { test("dispatch: needs-discussion → discuss-milestone dispatch", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); const ctx = buildDispatchCtx(base, "M001", { phase: "needs-discussion", @@ -841,7 +841,7 @@ describe("dispatch failure modes", () => { test("dispatch: complete phase → stop with info level", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); const ctx = buildDispatchCtx(base, "M001", { phase: "complete", @@ -884,7 +884,7 @@ describe("completion and verification failures", () => { test("needs-remediation VALIDATION blocks milestone completion dispatch", async () => { base = createFullFixture(); - const mDir = join(base, ".gsd", "milestones", "M001"); + const mDir = join(base, ".sf", "milestones", "M001"); writeFileSync( join(mDir, "M001-VALIDATION.md"), [ @@ -899,7 +899,7 @@ describe("completion and verification failures", () => { ].join("\n"), ); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", title: "First", status: "complete" }); insertSlice({ id: "S02", milestoneId: "M001", title: "Second", status: "complete" }); @@ -920,7 +920,7 @@ describe("completion and verification failures", () => { test("missing slice SUMMARY blocks milestone validation dispatch", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); // Use "pending" status — closed slices (complete/done/skipped) are // excluded from SUMMARY checks per #3620. @@ -980,7 +980,7 @@ describe("completion and verification failures", () => { test("all slices done + no VALIDATION → validating-milestone (not completing)", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", title: "First", status: "complete" }); insertSlice({ id: "S02", milestoneId: "M001", title: "Second", status: "complete" }); @@ -1000,11 +1000,11 @@ describe("completion and verification failures", () => { test("all slices done + terminal VALIDATION + no SUMMARY → completing-milestone", async () => { base = createFullFixture(); writeFileSync( - join(base, ".gsd", "milestones", "M001", "M001-VALIDATION.md"), + join(base, ".sf", "milestones", "M001", "M001-VALIDATION.md"), "---\nverdict: pass\n---\n# Validation\nPassed.\n", ); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", title: "First", status: "complete" }); insertSlice({ id: "S02", milestoneId: "M001", title: "Second", status: "complete" }); @@ -1059,10 +1059,10 @@ describe("ghost milestone edge cases", () => { test("empty directory with DB row is NOT a ghost (#2921)", () => { base = makeTempDir(); - const mDir = join(base, ".gsd", "milestones", "M001"); + const mDir = join(base, ".sf", "milestones", "M001"); mkdirSync(mDir, { recursive: true }); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Queued", status: "active" }); assert.equal(isGhostMilestone(base, "M001"), false, "DB row means not a ghost"); @@ -1070,17 +1070,17 @@ describe("ghost milestone edge cases", () => { test("empty directory with worktree is NOT a ghost (#2921)", () => { base = makeTempDir(); - const mDir = join(base, ".gsd", "milestones", "M001"); + const mDir = join(base, ".sf", "milestones", "M001"); mkdirSync(mDir, { recursive: true }); // Simulate worktree existence - mkdirSync(join(base, ".gsd", "worktrees", "M001"), { recursive: true }); + mkdirSync(join(base, ".sf", "worktrees", "M001"), { recursive: true }); assert.equal(isGhostMilestone(base, "M001"), false, "worktree means not a ghost"); }); test("empty directory without DB or worktree IS a ghost", () => { base = makeTempDir(); - const mDir = join(base, ".gsd", "milestones", "M001"); + const mDir = join(base, ".sf", "milestones", "M001"); mkdirSync(mDir, { recursive: true }); assert.equal(isGhostMilestone(base, "M001"), true, "no DB, no worktree, no files → ghost"); @@ -1088,7 +1088,7 @@ describe("ghost milestone edge cases", () => { test("directory with only META.json is still a ghost", () => { base = makeTempDir(); - const mDir = join(base, ".gsd", "milestones", "M001"); + const mDir = join(base, ".sf", "milestones", "M001"); mkdirSync(mDir, { recursive: true }); writeFileSync(join(mDir, "META.json"), '{"created":"2026-01-01"}'); @@ -1097,7 +1097,7 @@ describe("ghost milestone edge cases", () => { test("ghost milestones are skipped in state derivation", async () => { base = makeTempDir(); - const sfDir = join(base, ".gsd", "milestones"); + const sfDir = join(base, ".sf", "milestones"); // M001 is ghost — empty dir mkdirSync(join(sfDir, "M001"), { recursive: true }); @@ -1126,17 +1126,17 @@ describe("dispatch guard integration", () => { test("skip_milestone_validation preference writes pass-through VALIDATION", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", title: "First", status: "complete" }); insertSlice({ id: "S02", milestoneId: "M001", title: "Second", status: "complete" }); // Write slice SUMMARYs so the missing SUMMARY guard doesn't fire writeFileSync( - join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-SUMMARY.md"), + join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-SUMMARY.md"), "# S01 Summary\nDone.\n", ); writeFileSync( - join(base, ".gsd", "milestones", "M001", "slices", "S02", "S02-SUMMARY.md"), + join(base, ".sf", "milestones", "M001", "slices", "S02", "S02-SUMMARY.md"), "# S02 Summary\nDone.\n", ); @@ -1151,7 +1151,7 @@ describe("dispatch guard integration", () => { assert.equal(result.action, "skip", "skip_milestone_validation should produce skip action"); // Should have written a pass-through VALIDATION file - const validationPath = join(base, ".gsd", "milestones", "M001", "M001-VALIDATION.md"); + const validationPath = join(base, ".sf", "milestones", "M001", "M001-VALIDATION.md"); assert.ok(existsSync(validationPath), "VALIDATION file should be written"); const content = readFileSync(validationPath, "utf-8"); assert.ok(content.includes("verdict: pass"), "should contain pass verdict"); @@ -1160,11 +1160,11 @@ describe("dispatch guard integration", () => { test("rewrite-docs circuit breaker: exceeding MAX attempts resolves all overrides", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); // Write a rewrite count at the max - const runtimeDir = join(base, ".gsd", "runtime"); + const runtimeDir = join(base, ".sf", "runtime"); mkdirSync(runtimeDir, { recursive: true }); writeFileSync( join(runtimeDir, "rewrite-count.json"), @@ -1178,7 +1178,7 @@ describe("dispatch guard integration", () => { test("replanning-slice with null activeSlice → stop (error)", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); const ctx = buildDispatchCtx(base, "M001", { phase: "replanning-slice", diff --git a/src/resources/extensions/sf/tests/integration/state-machine-live-validation.test.ts b/src/resources/extensions/sf/tests/integration/state-machine-live-validation.test.ts index ff8963250..4e994b67f 100644 --- a/src/resources/extensions/sf/tests/integration/state-machine-live-validation.test.ts +++ b/src/resources/extensions/sf/tests/integration/state-machine-live-validation.test.ts @@ -76,7 +76,7 @@ function makeTempDir(): string { } /** - * Create a realistic .gsd/ fixture with: + * Create a realistic .sf/ fixture with: * - M001 milestone with ROADMAP, CONTEXT * - S01 slice with PLAN (2 tasks T01, T02) * - S02 slice with PLAN (1 task T01) @@ -85,7 +85,7 @@ function makeTempDir(): string { */ function createFullFixture(): string { const base = makeTempDir(); - const sfDir = join(base, ".gsd"); + const sfDir = join(base, ".sf"); const m001Dir = join(sfDir, "milestones", "M001"); const s01Dir = join(m001Dir, "slices", "S01"); const s01Tasks = join(s01Dir, "tasks"); @@ -299,7 +299,7 @@ describe("state-machine-live-validation", () => { describe("happy path: full lifecycle M001 → complete", () => { test("step 1: empty project derives pre-planning", async () => { base = makeTempDir(); - mkdirSync(join(base, ".gsd", "milestones"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones"), { recursive: true }); const state = await deriveState(base); assert.equal(state.phase, "pre-planning"); assert.equal(state.activeMilestone, null); @@ -307,7 +307,7 @@ describe("state-machine-live-validation", () => { test("step 2: milestone with CONTEXT-DRAFT derives needs-discussion", async () => { base = makeTempDir(); - const mDir = join(base, ".gsd", "milestones", "M001"); + const mDir = join(base, ".sf", "milestones", "M001"); mkdirSync(mDir, { recursive: true }); writeFileSync(join(mDir, "M001-CONTEXT-DRAFT.md"), "# Draft\nDraft context.\n"); invalidateStateCache(); @@ -318,7 +318,7 @@ describe("state-machine-live-validation", () => { test("step 3: full fixture with ROADMAP+PLAN derives planning or executing", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); invalidateStateCache(); const state = await deriveState(base); // Without DB migration, filesystem path is used — should be planning or executing @@ -330,7 +330,7 @@ describe("state-machine-live-validation", () => { test("step 4: complete T01 in S01 — handler succeeds, DB reflects completion", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); // Seed DB with hierarchy insertMilestone({ id: "M001", title: "Live Validation", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", title: "First Feature", status: "in_progress" }); @@ -346,18 +346,18 @@ describe("state-machine-live-validation", () => { assert.ok(isClosedStatus(task!.status), `T01 status should be closed, got: ${task!.status}`); // Verify SUMMARY.md written to disk - const summaryPath = join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks", "T01-SUMMARY.md"); + const summaryPath = join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks", "T01-SUMMARY.md"); assert.ok(existsSync(summaryPath), "T01-SUMMARY.md should exist on disk"); // Verify event log entry - const events = readEvents(join(base, ".gsd", "event-log.jsonl")); + const events = readEvents(join(base, ".sf", "event-log.jsonl")); const taskEvent = events.find(e => e.cmd === "complete-task" && (e.params as any).taskId === "T01"); assert.ok(taskEvent, "event log should contain complete-task for T01"); }); test("step 5: complete T02 in S01 — both tasks now done", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Live Validation", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", title: "First Feature", status: "in_progress" }); insertTask({ id: "T01", sliceId: "S01", milestoneId: "M001", title: "Implementation", status: "complete" }); @@ -374,7 +374,7 @@ describe("state-machine-live-validation", () => { test("step 6: complete slice S01 — all tasks done, slice closes", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Live Validation", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", title: "First Feature", status: "in_progress" }); insertTask({ id: "T01", sliceId: "S01", milestoneId: "M001", title: "Impl", status: "complete" }); @@ -388,13 +388,13 @@ describe("state-machine-live-validation", () => { assert.ok(isClosedStatus(slice!.status), `S01 should be closed, got: ${slice!.status}`); // SUMMARY.md on disk - const summaryPath = join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-SUMMARY.md"); + const summaryPath = join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-SUMMARY.md"); assert.ok(existsSync(summaryPath), "S01-SUMMARY.md should exist"); }); test("step 7: complete S02 task + slice — both slices done", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Live Validation", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", title: "First", status: "complete" }); insertSlice({ id: "S02", milestoneId: "M001", title: "Second", status: "in_progress" }); @@ -417,7 +417,7 @@ describe("state-machine-live-validation", () => { test("step 8: complete milestone M001 — full lifecycle done", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Live Validation", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", title: "First", status: "complete" }); insertSlice({ id: "S02", milestoneId: "M001", title: "Second", status: "complete" }); @@ -433,7 +433,7 @@ describe("state-machine-live-validation", () => { assert.ok(isClosedStatus(milestone!.status), `M001 should be closed, got: ${milestone!.status}`); // SUMMARY.md on disk - const summaryPath = join(base, ".gsd", "milestones", "M001", "M001-SUMMARY.md"); + const summaryPath = join(base, ".sf", "milestones", "M001", "M001-SUMMARY.md"); assert.ok(existsSync(summaryPath), "M001-SUMMARY.md should exist"); }); }); @@ -445,7 +445,7 @@ describe("state-machine-live-validation", () => { describe("completion guards — edge cases", () => { test("cannot complete task with empty taskId", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); const result = await handleCompleteTask(makeTaskParams("", "S01", "M001") as any, base); assert.ok("error" in result); assert.match((result as any).error, /taskId is required/); @@ -453,7 +453,7 @@ describe("state-machine-live-validation", () => { test("cannot complete task in closed milestone", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Done", status: "complete" }); insertSlice({ id: "S01", milestoneId: "M001" }); insertTask({ id: "T01", sliceId: "S01", milestoneId: "M001", status: "pending" }); @@ -465,7 +465,7 @@ describe("state-machine-live-validation", () => { test("cannot complete task in closed slice", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", status: "complete" }); insertTask({ id: "T01", sliceId: "S01", milestoneId: "M001", status: "pending" }); @@ -477,7 +477,7 @@ describe("state-machine-live-validation", () => { test("double task completion returns error (H5-related)", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", status: "in_progress" }); insertTask({ id: "T01", sliceId: "S01", milestoneId: "M001", status: "complete" }); @@ -489,7 +489,7 @@ describe("state-machine-live-validation", () => { test("cannot complete slice with zero tasks — vacuous truth guard", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", status: "in_progress" }); // No tasks inserted @@ -501,7 +501,7 @@ describe("state-machine-live-validation", () => { test("cannot complete slice with incomplete tasks", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", status: "in_progress" }); insertTask({ id: "T01", sliceId: "S01", milestoneId: "M001", status: "complete" }); @@ -514,7 +514,7 @@ describe("state-machine-live-validation", () => { test("double slice completion returns error", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", status: "complete" }); insertTask({ id: "T01", sliceId: "S01", milestoneId: "M001", status: "complete" }); @@ -526,7 +526,7 @@ describe("state-machine-live-validation", () => { test("cannot complete milestone with zero slices", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); const result = await handleCompleteMilestone(makeMilestoneParams("M001") as any, base); @@ -536,7 +536,7 @@ describe("state-machine-live-validation", () => { test("cannot complete milestone with incomplete slices", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", status: "complete" }); insertSlice({ id: "S02", milestoneId: "M001", status: "in_progress" }); @@ -550,7 +550,7 @@ describe("state-machine-live-validation", () => { test("cannot complete milestone with incomplete tasks in complete slice (deep check)", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); // Slice marked complete but task is still pending — simulates inconsistent state insertSlice({ id: "S01", milestoneId: "M001", status: "complete" }); @@ -563,7 +563,7 @@ describe("state-machine-live-validation", () => { test("cannot complete milestone without verificationPassed=true", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", status: "complete" }); insertTask({ id: "T01", sliceId: "S01", milestoneId: "M001", status: "complete" }); @@ -577,7 +577,7 @@ describe("state-machine-live-validation", () => { test("double milestone completion returns error", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Done", status: "complete" }); insertSlice({ id: "S01", milestoneId: "M001", status: "complete" }); insertTask({ id: "T01", sliceId: "S01", milestoneId: "M001", status: "complete" }); @@ -595,7 +595,7 @@ describe("state-machine-live-validation", () => { describe("reopen operations", () => { test("reopen task: resets completed task to pending", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", status: "in_progress" }); insertTask({ id: "T01", sliceId: "S01", milestoneId: "M001", status: "complete" }); @@ -612,7 +612,7 @@ describe("state-machine-live-validation", () => { test("cannot reopen task that is not complete", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", status: "in_progress" }); insertTask({ id: "T01", sliceId: "S01", milestoneId: "M001", status: "pending" }); @@ -627,7 +627,7 @@ describe("state-machine-live-validation", () => { test("cannot reopen task in closed slice — must reopen slice first", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", status: "complete" }); insertTask({ id: "T01", sliceId: "S01", milestoneId: "M001", status: "complete" }); @@ -642,7 +642,7 @@ describe("state-machine-live-validation", () => { test("cannot reopen task in closed milestone", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Done", status: "complete" }); insertSlice({ id: "S01", milestoneId: "M001", status: "complete" }); insertTask({ id: "T01", sliceId: "S01", milestoneId: "M001", status: "complete" }); @@ -657,7 +657,7 @@ describe("state-machine-live-validation", () => { test("reopen slice: resets slice to in_progress and all tasks to pending", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", status: "complete" }); insertTask({ id: "T01", sliceId: "S01", milestoneId: "M001", status: "complete" }); @@ -681,7 +681,7 @@ describe("state-machine-live-validation", () => { test("cannot reopen slice in closed milestone", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Done", status: "complete" }); insertSlice({ id: "S01", milestoneId: "M001", status: "complete" }); @@ -697,7 +697,7 @@ describe("state-machine-live-validation", () => { // This test documents the H5 finding: there is no handleReopenMilestone function. // A completed milestone can only be undone via direct DB manipulation. base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Done", status: "complete" }); const milestone = getMilestone("M001"); @@ -717,7 +717,7 @@ describe("state-machine-live-validation", () => { describe("phantom parent auto-creation (H6)", () => { test("completing task for non-existent milestone/slice auto-creates them", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); // No milestone or slice pre-inserted — handler will auto-create const result = await handleCompleteTask(makeTaskParams("T01", "S99", "M099") as any, base); @@ -735,7 +735,7 @@ describe("state-machine-live-validation", () => { test("completing slice for non-existent milestone auto-creates it", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); // Insert task to satisfy completion guard insertMilestone({ id: "M099" }); insertSlice({ id: "S99", milestoneId: "M099" }); @@ -753,7 +753,7 @@ describe("state-machine-live-validation", () => { describe("state derivation with live DB", () => { test("deriveStateFromDb reflects task completion immediately", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", title: "First", status: "in_progress" }); insertTask({ id: "T01", sliceId: "S01", milestoneId: "M001", status: "pending" }); @@ -780,7 +780,7 @@ describe("state-machine-live-validation", () => { test("deriveStateFromDb reflects slice completion → next slice or validating", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", title: "First", status: "complete" }); insertSlice({ id: "S02", milestoneId: "M001", title: "Second", status: "in_progress" }); @@ -796,7 +796,7 @@ describe("state-machine-live-validation", () => { test("deriveStateFromDb with all slices done → validating-milestone", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", title: "First", status: "complete" }); insertSlice({ id: "S02", milestoneId: "M001", title: "Second", status: "complete" }); @@ -810,7 +810,7 @@ describe("state-machine-live-validation", () => { test("ghost milestone is skipped by deriveState", async () => { base = makeTempDir(); - const sfDir = join(base, ".gsd", "milestones"); + const sfDir = join(base, ".sf", "milestones"); // M001 is ghost — empty dir mkdirSync(join(sfDir, "M001"), { recursive: true }); // M002 has content @@ -833,7 +833,7 @@ describe("state-machine-live-validation", () => { describe("event log integrity across operations", () => { test("full operation sequence produces correct event log", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", title: "First", status: "in_progress" }); insertTask({ id: "T01", sliceId: "S01", milestoneId: "M001", status: "pending" }); @@ -846,7 +846,7 @@ describe("state-machine-live-validation", () => { // Complete S01 await handleCompleteSlice(makeSliceParams("S01", "M001") as any, base); - const events = readEvents(join(base, ".gsd", "event-log.jsonl")); + const events = readEvents(join(base, ".sf", "event-log.jsonl")); // Should have 3 events: 2 task completions + 1 slice completion assert.ok(events.length >= 3, `expected ≥3 events, got ${events.length}`); @@ -874,7 +874,7 @@ describe("state-machine-live-validation", () => { test("reopen operations produce events", async () => { base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", status: "in_progress" }); insertTask({ id: "T01", sliceId: "S01", milestoneId: "M001", status: "complete" }); @@ -884,7 +884,7 @@ describe("state-machine-live-validation", () => { base, ); - const events = readEvents(join(base, ".gsd", "event-log.jsonl")); + const events = readEvents(join(base, ".sf", "event-log.jsonl")); const reopenEvent = events.find(e => e.cmd === "reopen-task"); assert.ok(reopenEvent, "should have reopen-task event"); assert.equal((reopenEvent!.params as any).taskId, "T01"); @@ -902,7 +902,7 @@ describe("state-machine-live-validation", () => { // post-mutation hook runs, preventing the reconciler from auto-correcting // the task back to "complete". base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", title: "First", status: "in_progress" }); insertTask({ id: "T01", sliceId: "S01", milestoneId: "M001", status: "pending" }); @@ -911,7 +911,7 @@ describe("state-machine-live-validation", () => { const r1 = await handleCompleteTask(makeTaskParams("T01", "S01", "M001") as any, base); assert.ok(!("error" in r1), `first complete: ${JSON.stringify(r1)}`); - const summaryPath = join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks", "T01-SUMMARY.md"); + const summaryPath = join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks", "T01-SUMMARY.md"); assert.ok(existsSync(summaryPath), "SUMMARY.md exists after completion"); // Reopen — now deletes SUMMARY.md from disk (M12 fix) @@ -932,7 +932,7 @@ describe("state-machine-live-validation", () => { // M12 fix: reopen-slice now deletes all SUMMARY.md and UAT.md artifacts // from disk, preventing reconciler interference. base = createFullFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", title: "First", status: "in_progress" }); insertTask({ id: "T01", sliceId: "S01", milestoneId: "M001", status: "pending" }); diff --git a/src/resources/extensions/sf/tests/integration/state-machine-runtime-failures.test.ts b/src/resources/extensions/sf/tests/integration/state-machine-runtime-failures.test.ts index 46db06b99..fd41236e3 100644 --- a/src/resources/extensions/sf/tests/integration/state-machine-runtime-failures.test.ts +++ b/src/resources/extensions/sf/tests/integration/state-machine-runtime-failures.test.ts @@ -88,14 +88,14 @@ function makeTempDir(): string { function createMinimalFixture(): string { const base = makeTempDir(); - const mDir = join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks"); + const mDir = join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks"); mkdirSync(mDir, { recursive: true }); writeFileSync( - join(base, ".gsd", "milestones", "M001", "M001-CONTEXT.md"), + join(base, ".sf", "milestones", "M001", "M001-CONTEXT.md"), "# M001: Runtime Test\n\n## Purpose\nTest runtime failures.\n", ); writeFileSync( - join(base, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), + join(base, ".sf", "milestones", "M001", "M001-ROADMAP.md"), [ "# M001: Runtime Test", "", @@ -118,7 +118,7 @@ function createMinimalFixture(): string { ].join("\n"), ); writeFileSync( - join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-PLAN.md"), + join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-PLAN.md"), [ "# S01: Feature", "", @@ -518,7 +518,7 @@ describe("filesystem race conditions", () => { test("ROADMAP deleted during derive cycle → graceful degradation", async () => { base = createMinimalFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", title: "Feature", status: "in_progress" }); insertTask({ id: "T01", sliceId: "S01", milestoneId: "M001", status: "pending" }); @@ -528,7 +528,7 @@ describe("filesystem race conditions", () => { assert.equal(state1.phase, "executing"); // Delete ROADMAP mid-flow - const roadmapPath = join(base, ".gsd", "milestones", "M001", "M001-ROADMAP.md"); + const roadmapPath = join(base, ".sf", "milestones", "M001", "M001-ROADMAP.md"); unlinkSync(roadmapPath); invalidateAllCaches(); @@ -539,10 +539,10 @@ describe("filesystem race conditions", () => { test("CONTEXT deleted during derive → falls back gracefully", async () => { base = createMinimalFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); - const contextPath = join(base, ".gsd", "milestones", "M001", "M001-CONTEXT.md"); + const contextPath = join(base, ".sf", "milestones", "M001", "M001-CONTEXT.md"); unlinkSync(contextPath); invalidateAllCaches(); @@ -553,13 +553,13 @@ describe("filesystem race conditions", () => { test("entire slice directory deleted → derive produces valid state", async () => { base = createMinimalFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", title: "Feature", status: "in_progress" }); insertTask({ id: "T01", sliceId: "S01", milestoneId: "M001", status: "pending" }); // Delete entire S01 directory - rmSync(join(base, ".gsd", "milestones", "M001", "slices", "S01"), { recursive: true, force: true }); + rmSync(join(base, ".sf", "milestones", "M001", "slices", "S01"), { recursive: true, force: true }); invalidateAllCaches(); const state = await deriveStateFromDb(base); @@ -569,23 +569,23 @@ describe("filesystem race conditions", () => { test("task PLAN file deleted between dispatch and execution → recovery dispatch", async () => { base = createMinimalFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", title: "Feature", status: "in_progress" }); insertTask({ id: "T01", sliceId: "S01", milestoneId: "M001", status: "pending" }); // Delete T01-PLAN.md - const planPath = join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks", "T01-PLAN.md"); + const planPath = join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks", "T01-PLAN.md"); unlinkSync(planPath); // Also write milestone RESEARCH so research-slice rule doesn't fire first writeFileSync( - join(base, ".gsd", "milestones", "M001", "M001-RESEARCH.md"), + join(base, ".sf", "milestones", "M001", "M001-RESEARCH.md"), "# Research\nDone.\n", ); // Write slice RESEARCH so research-slice rule for non-S01 doesn't fire writeFileSync( - join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-RESEARCH.md"), + join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-RESEARCH.md"), "# S01 Research\nDone.\n", ); @@ -618,11 +618,11 @@ describe("filesystem race conditions", () => { test("worktree directory disappearance: isGhostMilestone still works", () => { const tmpBase = makeTempDir(); - const mDir = join(tmpBase, ".gsd", "milestones", "M001"); + const mDir = join(tmpBase, ".sf", "milestones", "M001"); mkdirSync(mDir, { recursive: true }); // Create worktree dir then delete it (simulates external deletion) - const wtDir = join(tmpBase, ".gsd", "worktrees", "M001"); + const wtDir = join(tmpBase, ".sf", "worktrees", "M001"); mkdirSync(wtDir, { recursive: true }); // With worktree → not a ghost @@ -763,7 +763,7 @@ describe("state consistency under DB mutations", () => { test("rapid DB mutations produce consistent deriveStateFromDb results", async () => { base = createMinimalFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", title: "Feature", status: "in_progress" }); insertTask({ id: "T01", sliceId: "S01", milestoneId: "M001", status: "pending" }); @@ -794,7 +794,7 @@ describe("state consistency under DB mutations", () => { test("DB milestone status change is reflected after cache invalidation", async () => { base = createMinimalFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", title: "Feature", status: "complete" }); insertTask({ id: "T01", sliceId: "S01", milestoneId: "M001", status: "complete" }); @@ -808,7 +808,7 @@ describe("state consistency under DB mutations", () => { updateMilestoneStatus("M001", "complete", new Date().toISOString()); // Write SUMMARY to make it truly complete writeFileSync( - join(base, ".gsd", "milestones", "M001", "M001-SUMMARY.md"), + join(base, ".sf", "milestones", "M001", "M001-SUMMARY.md"), "# M001 Summary\nDone.\n", ); @@ -820,7 +820,7 @@ describe("state consistency under DB mutations", () => { test("deriveState is idempotent: same inputs produce same outputs", async () => { base = createMinimalFixture(); - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: "M001", title: "Active", status: "active" }); insertSlice({ id: "S01", milestoneId: "M001", title: "Feature", status: "in_progress" }); insertTask({ id: "T01", sliceId: "S01", milestoneId: "M001", status: "pending" }); diff --git a/src/resources/extensions/sf/tests/integration/token-savings.test.ts b/src/resources/extensions/sf/tests/integration/token-savings.test.ts index 94a8f17a9..a92d24c4e 100644 --- a/src/resources/extensions/sf/tests/integration/token-savings.test.ts +++ b/src/resources/extensions/sf/tests/integration/token-savings.test.ts @@ -145,10 +145,10 @@ A test project for validating token savings with DB-scoped content. console.log('\n=== token-savings: plan-slice prompt ≥30% character savings ==='); { const base = mkdtempSync(join(tmpdir(), 'sf-token-savings-')); - mkdirSync(join(base, '.gsd'), { recursive: true }); - writeFileSync(join(base, '.gsd', 'DECISIONS.md'), decisionsMarkdown); - writeFileSync(join(base, '.gsd', 'REQUIREMENTS.md'), requirementsMarkdown); - writeFileSync(join(base, '.gsd', 'PROJECT.md'), PROJECT_CONTENT); + mkdirSync(join(base, '.sf'), { recursive: true }); + writeFileSync(join(base, '.sf', 'DECISIONS.md'), decisionsMarkdown); + writeFileSync(join(base, '.sf', 'REQUIREMENTS.md'), requirementsMarkdown); + writeFileSync(join(base, '.sf', 'PROJECT.md'), PROJECT_CONTENT); // Open :memory: DB and import openDatabase(':memory:'); @@ -164,8 +164,8 @@ console.log('\n=== token-savings: plan-slice prompt ≥30% character savings === const dbRequirementsContent = formatRequirementsForPrompt(scopedRequirements); // ── Full-markdown equivalents (what inlineGsdRootFile would return) ── - const fullDecisionsContent = readFileSync(join(base, '.gsd', 'DECISIONS.md'), 'utf-8'); - const fullRequirementsContent = readFileSync(join(base, '.gsd', 'REQUIREMENTS.md'), 'utf-8'); + const fullDecisionsContent = readFileSync(join(base, '.sf', 'DECISIONS.md'), 'utf-8'); + const fullRequirementsContent = readFileSync(join(base, '.sf', 'REQUIREMENTS.md'), 'utf-8'); // DB-scoped total vs full-markdown total const dbTotal = dbDecisionsContent.length + dbRequirementsContent.length; @@ -225,10 +225,10 @@ console.log('\n=== token-savings: plan-slice prompt ≥30% character savings === console.log('\n=== token-savings: research-milestone prompt shows meaningful savings ==='); { const base = mkdtempSync(join(tmpdir(), 'sf-token-savings-')); - mkdirSync(join(base, '.gsd'), { recursive: true }); - writeFileSync(join(base, '.gsd', 'DECISIONS.md'), decisionsMarkdown); - writeFileSync(join(base, '.gsd', 'REQUIREMENTS.md'), requirementsMarkdown); - writeFileSync(join(base, '.gsd', 'PROJECT.md'), PROJECT_CONTENT); + mkdirSync(join(base, '.sf'), { recursive: true }); + writeFileSync(join(base, '.sf', 'DECISIONS.md'), decisionsMarkdown); + writeFileSync(join(base, '.sf', 'REQUIREMENTS.md'), requirementsMarkdown); + writeFileSync(join(base, '.sf', 'PROJECT.md'), PROJECT_CONTENT); openDatabase(':memory:'); migrateFromMarkdown(base); @@ -239,8 +239,8 @@ console.log('\n=== token-savings: research-milestone prompt shows meaningful sav const dbDecisionsContent = formatDecisionsForPrompt(scopedDecisions); const dbRequirementsContent = formatRequirementsForPrompt(allRequirements); - const fullDecisionsContent = readFileSync(join(base, '.gsd', 'DECISIONS.md'), 'utf-8'); - const fullRequirementsContent = readFileSync(join(base, '.gsd', 'REQUIREMENTS.md'), 'utf-8'); + const fullDecisionsContent = readFileSync(join(base, '.sf', 'DECISIONS.md'), 'utf-8'); + const fullRequirementsContent = readFileSync(join(base, '.sf', 'REQUIREMENTS.md'), 'utf-8'); // Decisions should still show savings (8 of 24 scoped to M001) const decisionsSavings = ((fullDecisionsContent.length - dbDecisionsContent.length) / fullDecisionsContent.length) * 100; @@ -273,10 +273,10 @@ console.log('\n=== token-savings: research-milestone prompt shows meaningful sav console.log('\n=== token-savings: quality — correct scoping, no cross-contamination ==='); { const base = mkdtempSync(join(tmpdir(), 'sf-token-savings-')); - mkdirSync(join(base, '.gsd'), { recursive: true }); - writeFileSync(join(base, '.gsd', 'DECISIONS.md'), decisionsMarkdown); - writeFileSync(join(base, '.gsd', 'REQUIREMENTS.md'), requirementsMarkdown); - writeFileSync(join(base, '.gsd', 'PROJECT.md'), PROJECT_CONTENT); + mkdirSync(join(base, '.sf'), { recursive: true }); + writeFileSync(join(base, '.sf', 'DECISIONS.md'), decisionsMarkdown); + writeFileSync(join(base, '.sf', 'REQUIREMENTS.md'), requirementsMarkdown); + writeFileSync(join(base, '.sf', 'PROJECT.md'), PROJECT_CONTENT); openDatabase(':memory:'); migrateFromMarkdown(base); diff --git a/src/resources/extensions/sf/tests/integration/worktree-e2e.test.ts b/src/resources/extensions/sf/tests/integration/worktree-e2e.test.ts index 656621323..2dcd1f31a 100644 --- a/src/resources/extensions/sf/tests/integration/worktree-e2e.test.ts +++ b/src/resources/extensions/sf/tests/integration/worktree-e2e.test.ts @@ -38,8 +38,8 @@ function createTempRepo(): string { run("git config user.email test@test.com", dir); run("git config user.name Test", dir); writeFileSync(join(dir, "README.md"), "# test\n"); - mkdirSync(join(dir, ".gsd"), { recursive: true }); - writeFileSync(join(dir, ".gsd", "STATE.md"), "# State\n"); + mkdirSync(join(dir, ".sf"), { recursive: true }); + writeFileSync(join(dir, ".sf", "STATE.md"), "# State\n"); run("git add .", dir); run("git commit -m init", dir); run("git branch -M main", dir); @@ -64,7 +64,7 @@ function addSliceToMilestone( commits: Array<{ file: string; content: string; message: string }>, ): void { const normalizedPath = wtPath.replaceAll("\\", "/"); - const marker = "/.gsd/worktrees/"; + const marker = "/.sf/worktrees/"; const idx = normalizedPath.indexOf(marker); const worktreeName = idx !== -1 ? normalizedPath.slice(idx + marker.length).split("/")[0] : null; @@ -179,7 +179,7 @@ describe('worktree-e2e', async () => { tempDirs.push(repo); // Create completed milestone roadmap - const msDir = join(repo, ".gsd", "milestones", "M001"); + const msDir = join(repo, ".sf", "milestones", "M001"); mkdirSync(msDir, { recursive: true }); writeFileSync(join(msDir, "ROADMAP.md"), `--- id: M001 @@ -205,8 +205,8 @@ _None_ run("git commit -m \"add milestone\"", repo); // Create orphaned worktree - mkdirSync(join(repo, ".gsd", "worktrees"), { recursive: true }); - run("git worktree add -b milestone/M001 .gsd/worktrees/M001", repo); + mkdirSync(join(repo, ".sf", "worktrees"), { recursive: true }); + run("git worktree add -b milestone/M001 .sf/worktrees/M001", repo); // Detect const detect = await runSFDoctor(repo, { isolationMode: "worktree" }); diff --git a/src/resources/extensions/sf/tests/interrupted-session-auto.test.ts b/src/resources/extensions/sf/tests/interrupted-session-auto.test.ts index 5ae945c94..f422bf8fc 100644 --- a/src/resources/extensions/sf/tests/interrupted-session-auto.test.ts +++ b/src/resources/extensions/sf/tests/interrupted-session-auto.test.ts @@ -9,7 +9,7 @@ import { assessInterruptedSession } from "../interrupted-session.ts"; function makeTmpBase(): string { const base = join(tmpdir(), `sf-auto-interrupted-${randomUUID()}`); - mkdirSync(join(base, ".gsd"), { recursive: true }); + mkdirSync(join(base, ".sf"), { recursive: true }); return base; } @@ -18,7 +18,7 @@ function cleanup(base: string): void { } function writeRoadmap(base: string, checked = false): void { - const milestoneDir = join(base, ".gsd", "milestones", "M001"); + const milestoneDir = join(base, ".sf", "milestones", "M001"); mkdirSync(join(milestoneDir, "slices", "S01", "tasks"), { recursive: true }); writeFileSync( join(milestoneDir, "M001-ROADMAP.md"), @@ -49,7 +49,7 @@ function writeRoadmap(base: string, checked = false): void { } function writeCompleteArtifacts(base: string): void { - const milestoneDir = join(base, ".gsd", "milestones", "M001"); + const milestoneDir = join(base, ".sf", "milestones", "M001"); const sliceDir = join(milestoneDir, "slices", "S01"); mkdirSync(sliceDir, { recursive: true }); writeFileSync(join(sliceDir, "S01-SUMMARY.md"), "# Summary\nDone.\n", "utf-8"); @@ -59,7 +59,7 @@ function writeCompleteArtifacts(base: string): void { function writeLock(base: string, unitType: string, unitId: string): void { writeFileSync( - join(base, ".gsd", "auto.lock"), + join(base, ".sf", "auto.lock"), JSON.stringify({ pid: 999999999, startedAt: new Date().toISOString(), @@ -72,7 +72,7 @@ function writeLock(base: string, unitType: string, unitId: string): void { } function writePausedSession(base: string, milestoneId = "M001", stepMode = false): void { - const runtimeDir = join(base, ".gsd", "runtime"); + const runtimeDir = join(base, ".sf", "runtime"); mkdirSync(runtimeDir, { recursive: true }); writeFileSync( join(runtimeDir, "paused-session.json"), diff --git a/src/resources/extensions/sf/tests/interrupted-session-ui.test.ts b/src/resources/extensions/sf/tests/interrupted-session-ui.test.ts index 2dbb39fac..2c682c3b2 100644 --- a/src/resources/extensions/sf/tests/interrupted-session-ui.test.ts +++ b/src/resources/extensions/sf/tests/interrupted-session-ui.test.ts @@ -9,7 +9,7 @@ import { assessInterruptedSession } from "../interrupted-session.ts"; function makeTmpBase(): string { const base = join(tmpdir(), `sf-smart-entry-${randomUUID()}`); - mkdirSync(join(base, ".gsd"), { recursive: true }); + mkdirSync(join(base, ".sf"), { recursive: true }); return base; } @@ -18,7 +18,7 @@ function cleanup(base: string): void { } function writeRoadmap(base: string, checked = false): void { - const milestoneDir = join(base, ".gsd", "milestones", "M001"); + const milestoneDir = join(base, ".sf", "milestones", "M001"); mkdirSync(join(milestoneDir, "slices", "S01", "tasks"), { recursive: true }); writeFileSync( join(milestoneDir, "M001-ROADMAP.md"), @@ -49,7 +49,7 @@ function writeRoadmap(base: string, checked = false): void { } function writeCompleteArtifacts(base: string): void { - const milestoneDir = join(base, ".gsd", "milestones", "M001"); + const milestoneDir = join(base, ".sf", "milestones", "M001"); const sliceDir = join(milestoneDir, "slices", "S01"); mkdirSync(sliceDir, { recursive: true }); writeFileSync(join(sliceDir, "S01-SUMMARY.md"), "# Summary\nDone.\n", "utf-8"); @@ -58,7 +58,7 @@ function writeCompleteArtifacts(base: string): void { } function writePausedSession(base: string, milestoneId = "M001", stepMode = false): void { - const runtimeDir = join(base, ".gsd", "runtime"); + const runtimeDir = join(base, ".sf", "runtime"); mkdirSync(runtimeDir, { recursive: true }); writeFileSync( join(runtimeDir, "paused-session.json"), @@ -69,7 +69,7 @@ function writePausedSession(base: string, milestoneId = "M001", stepMode = false function writeLock(base: string, unitType: string, unitId: string): void { writeFileSync( - join(base, ".gsd", "auto.lock"), + join(base, ".sf", "auto.lock"), JSON.stringify({ pid: 999999999, startedAt: new Date().toISOString(), diff --git a/src/resources/extensions/sf/tests/journal-integration.test.ts b/src/resources/extensions/sf/tests/journal-integration.test.ts index eb6048e0d..b10a33f36 100644 --- a/src/resources/extensions/sf/tests/journal-integration.test.ts +++ b/src/resources/extensions/sf/tests/journal-integration.test.ts @@ -285,7 +285,7 @@ test("runDispatch checks prior-slice completion against the project root in work const ic = makeIC(deps, { s: { ...makeSession(), - basePath: "/tmp/project/.gsd/worktrees/M029-xoklo9", + basePath: "/tmp/project/.sf/worktrees/M029-xoklo9", originalBasePath: "/tmp/project", } as any, }); diff --git a/src/resources/extensions/sf/tests/journal-query-tool.test.ts b/src/resources/extensions/sf/tests/journal-query-tool.test.ts index d592baf9c..487c6f19d 100644 --- a/src/resources/extensions/sf/tests/journal-query-tool.test.ts +++ b/src/resources/extensions/sf/tests/journal-query-tool.test.ts @@ -20,7 +20,7 @@ function makeMockPi() { function makeTmpBase(): string { const base = join(tmpdir(), `sf-journal-tool-test-${randomUUID()}`); - mkdirSync(join(base, ".gsd"), { recursive: true }); + mkdirSync(join(base, ".sf"), { recursive: true }); return base; } diff --git a/src/resources/extensions/sf/tests/journal.test.ts b/src/resources/extensions/sf/tests/journal.test.ts index e5c402828..6317c0634 100644 --- a/src/resources/extensions/sf/tests/journal.test.ts +++ b/src/resources/extensions/sf/tests/journal.test.ts @@ -22,7 +22,7 @@ import { function makeTmpBase(): string { const base = join(tmpdir(), `sf-journal-test-${randomUUID()}`); - mkdirSync(join(base, ".gsd"), { recursive: true }); + mkdirSync(join(base, ".sf"), { recursive: true }); return base; } @@ -55,7 +55,7 @@ describe("emitJournalEvent", () => { const entry = makeEntry(); emitJournalEvent(base, entry); - const filePath = join(base, ".gsd", "journal", "2025-03-21.jsonl"); + const filePath = join(base, ".sf", "journal", "2025-03-21.jsonl"); assert.ok(existsSync(filePath), "JSONL file should exist"); const raw = readFileSync(filePath, "utf-8").trim(); @@ -71,7 +71,7 @@ describe("emitJournalEvent", () => { emitJournalEvent(base, makeEntry({ seq: 1, eventType: "dispatch-match" })); emitJournalEvent(base, makeEntry({ seq: 2, eventType: "unit-start" })); - const filePath = join(base, ".gsd", "journal", "2025-03-21.jsonl"); + const filePath = join(base, ".sf", "journal", "2025-03-21.jsonl"); const lines = readFileSync(filePath, "utf-8").trim().split("\n"); assert.equal(lines.length, 3, "Should have 3 lines"); @@ -90,7 +90,7 @@ describe("emitJournalEvent", () => { }); emitJournalEvent(base, entry); - const filePath = join(base, ".gsd", "journal", "2025-03-21.jsonl"); + const filePath = join(base, ".sf", "journal", "2025-03-21.jsonl"); const parsed = JSON.parse(readFileSync(filePath, "utf-8").trim()); assert.equal(parsed.rule, "my-dispatch-rule"); assert.deepEqual(parsed.causedBy, { flowId: "flow-prior", seq: 3 }); @@ -99,7 +99,7 @@ describe("emitJournalEvent", () => { }); test("silently catches read-only directory errors", () => { - const journalDir = join(base, ".gsd", "journal"); + const journalDir = join(base, ".sf", "journal"); mkdirSync(journalDir, { recursive: true }); // Make the journal directory read-only @@ -123,13 +123,13 @@ describe("emitJournalEvent — auto-creates parent directory", () => { let base: string; beforeEach(() => { base = join(tmpdir(), `sf-journal-test-${randomUUID()}`); - // Don't create .gsd/ — emitJournalEvent should handle it via mkdirSync recursive + // Don't create .sf/ — emitJournalEvent should handle it via mkdirSync recursive }); afterEach(() => { cleanup(base); }); test("auto-creates nonexistent parent directory", () => { emitJournalEvent(base, makeEntry()); - const filePath = join(base, ".gsd", "journal", "2025-03-21.jsonl"); + const filePath = join(base, ".sf", "journal", "2025-03-21.jsonl"); assert.ok(existsSync(filePath), "File should exist even when parent dirs did not"); }); }); @@ -153,7 +153,7 @@ describe("daily rotation", () => { emitJournalEvent(base, makeEntry({ ts: "2025-03-21T00:00:01.000Z" })); emitJournalEvent(base, makeEntry({ ts: "2025-03-22T12:00:00.000Z" })); - const journalDir = join(base, ".gsd", "journal"); + const journalDir = join(base, ".sf", "journal"); assert.ok(existsSync(join(journalDir, "2025-03-20.jsonl"))); assert.ok(existsSync(join(journalDir, "2025-03-21.jsonl"))); assert.ok(existsSync(join(journalDir, "2025-03-22.jsonl"))); @@ -276,7 +276,7 @@ describe("queryJournal", () => { }); test("skips malformed JSON lines gracefully", () => { - const journalDir = join(base, ".gsd", "journal"); + const journalDir = join(base, ".sf", "journal"); mkdirSync(journalDir, { recursive: true }); // Write a file with a mix of valid and invalid lines diff --git a/src/resources/extensions/sf/tests/knowledge.test.ts b/src/resources/extensions/sf/tests/knowledge.test.ts index 2edd1ec62..04ccd3342 100644 --- a/src/resources/extensions/sf/tests/knowledge.test.ts +++ b/src/resources/extensions/sf/tests/knowledge.test.ts @@ -30,7 +30,7 @@ test('knowledge: KNOWLEDGE key exists in SF_ROOT_FILES', () => { test('knowledge: resolveSfRootFile returns canonical path when KNOWLEDGE.md exists', () => { const tmp = realpathSync(mkdtempSync(join(tmpdir(), 'sf-knowledge-'))); - const sfDir = join(tmp, '.gsd'); + const sfDir = join(tmp, '.sf'); mkdirSync(sfDir, { recursive: true }); writeFileSync(join(sfDir, 'KNOWLEDGE.md'), '# Project Knowledge\n'); @@ -42,7 +42,7 @@ test('knowledge: resolveSfRootFile returns canonical path when KNOWLEDGE.md exis test('knowledge: resolveSfRootFile resolves when legacy knowledge.md exists', () => { const tmp = realpathSync(mkdtempSync(join(tmpdir(), 'sf-knowledge-'))); - const sfDir = join(tmp, '.gsd'); + const sfDir = join(tmp, '.sf'); mkdirSync(sfDir, { recursive: true }); writeFileSync(join(sfDir, 'knowledge.md'), '# Project Knowledge\n'); @@ -61,7 +61,7 @@ test('knowledge: resolveSfRootFile resolves when legacy knowledge.md exists', () test('knowledge: resolveSfRootFile returns canonical path when file does not exist', () => { const tmp = realpathSync(mkdtempSync(join(tmpdir(), 'sf-knowledge-'))); - const sfDir = join(tmp, '.gsd'); + const sfDir = join(tmp, '.sf'); mkdirSync(sfDir, { recursive: true }); const resolved = resolveSfRootFile(tmp, 'KNOWLEDGE'); @@ -74,7 +74,7 @@ test('knowledge: resolveSfRootFile returns canonical path when file does not exi test('knowledge: inlineGsdRootFile returns content when KNOWLEDGE.md exists', async () => { const tmp = mkdtempSync(join(tmpdir(), 'sf-knowledge-')); - const sfDir = join(tmp, '.gsd'); + const sfDir = join(tmp, '.sf'); mkdirSync(sfDir, { recursive: true }); writeFileSync(join(sfDir, 'KNOWLEDGE.md'), '# Project Knowledge\n\n## Rules\n\nK001: Use real DB'); @@ -88,7 +88,7 @@ test('knowledge: inlineGsdRootFile returns content when KNOWLEDGE.md exists', as test('knowledge: inlineGsdRootFile returns null when KNOWLEDGE.md does not exist', async () => { const tmp = mkdtempSync(join(tmpdir(), 'sf-knowledge-')); - const sfDir = join(tmp, '.gsd'); + const sfDir = join(tmp, '.sf'); mkdirSync(sfDir, { recursive: true }); const result = await inlineGsdRootFile(tmp, 'knowledge.md', 'Project Knowledge'); @@ -101,7 +101,7 @@ test('knowledge: inlineGsdRootFile returns null when KNOWLEDGE.md does not exist test('knowledge: appendKnowledge creates KNOWLEDGE.md with rule when file does not exist', async () => { const tmp = mkdtempSync(join(tmpdir(), 'sf-knowledge-')); - const sfDir = join(tmp, '.gsd'); + const sfDir = join(tmp, '.sf'); mkdirSync(sfDir, { recursive: true }); await appendKnowledge(tmp, 'rule', 'Use real DB for integration tests', 'M001/S01'); @@ -117,7 +117,7 @@ test('knowledge: appendKnowledge creates KNOWLEDGE.md with rule when file does n test('knowledge: appendKnowledge appends to existing KNOWLEDGE.md with auto-incrementing ID', async () => { const tmp = mkdtempSync(join(tmpdir(), 'sf-knowledge-')); - const sfDir = join(tmp, '.gsd'); + const sfDir = join(tmp, '.sf'); mkdirSync(sfDir, { recursive: true }); // Create initial file with one rule @@ -136,7 +136,7 @@ test('knowledge: appendKnowledge appends to existing KNOWLEDGE.md with auto-incr test('knowledge: appendKnowledge handles pattern type', async () => { const tmp = mkdtempSync(join(tmpdir(), 'sf-knowledge-')); - const sfDir = join(tmp, '.gsd'); + const sfDir = join(tmp, '.sf'); mkdirSync(sfDir, { recursive: true }); await appendKnowledge(tmp, 'pattern', 'Middleware chain for auth', 'M001'); @@ -150,7 +150,7 @@ test('knowledge: appendKnowledge handles pattern type', async () => { test('knowledge: appendKnowledge handles lesson type', async () => { const tmp = mkdtempSync(join(tmpdir(), 'sf-knowledge-')); - const sfDir = join(tmp, '.gsd'); + const sfDir = join(tmp, '.sf'); mkdirSync(sfDir, { recursive: true }); await appendKnowledge(tmp, 'lesson', 'API timeout on large payloads', 'M002'); @@ -168,7 +168,7 @@ test('loadKnowledgeBlock: returns empty block when neither file exists', () => { const tmp = realpathSync(mkdtempSync(join(tmpdir(), 'sf-kb-'))); const sfHome = join(tmp, 'home'); const cwd = join(tmp, 'project'); - mkdirSync(join(cwd, '.gsd'), { recursive: true }); + mkdirSync(join(cwd, '.sf'), { recursive: true }); mkdirSync(join(sfHome, 'agent'), { recursive: true }); const result = loadKnowledgeBlock(sfHome, cwd); @@ -182,9 +182,9 @@ test('loadKnowledgeBlock: uses project knowledge alone when no global file', () const tmp = realpathSync(mkdtempSync(join(tmpdir(), 'sf-kb-'))); const sfHome = join(tmp, 'home'); const cwd = join(tmp, 'project'); - mkdirSync(join(cwd, '.gsd'), { recursive: true }); + mkdirSync(join(cwd, '.sf'), { recursive: true }); mkdirSync(join(sfHome, 'agent'), { recursive: true }); - writeFileSync(join(cwd, '.gsd', 'KNOWLEDGE.md'), 'K001: Use real DB'); + writeFileSync(join(cwd, '.sf', 'KNOWLEDGE.md'), 'K001: Use real DB'); const result = loadKnowledgeBlock(sfHome, cwd); assert.ok(result.block.includes('[KNOWLEDGE — Rules, patterns, and lessons learned]')); @@ -200,7 +200,7 @@ test('loadKnowledgeBlock: uses global knowledge alone when no project file', () const tmp = realpathSync(mkdtempSync(join(tmpdir(), 'sf-kb-'))); const sfHome = join(tmp, 'home'); const cwd = join(tmp, 'project'); - mkdirSync(join(cwd, '.gsd'), { recursive: true }); + mkdirSync(join(cwd, '.sf'), { recursive: true }); mkdirSync(join(sfHome, 'agent'), { recursive: true }); writeFileSync(join(sfHome, 'agent', 'KNOWLEDGE.md'), 'G001: Respond in English'); @@ -218,10 +218,10 @@ test('loadKnowledgeBlock: merges global before project when both exist', () => { const tmp = realpathSync(mkdtempSync(join(tmpdir(), 'sf-kb-'))); const sfHome = join(tmp, 'home'); const cwd = join(tmp, 'project'); - mkdirSync(join(cwd, '.gsd'), { recursive: true }); + mkdirSync(join(cwd, '.sf'), { recursive: true }); mkdirSync(join(sfHome, 'agent'), { recursive: true }); writeFileSync(join(sfHome, 'agent', 'KNOWLEDGE.md'), 'G001: Global rule'); - writeFileSync(join(cwd, '.gsd', 'KNOWLEDGE.md'), 'K001: Project rule'); + writeFileSync(join(cwd, '.sf', 'KNOWLEDGE.md'), 'K001: Project rule'); const result = loadKnowledgeBlock(sfHome, cwd); assert.ok(result.block.includes('## Global Knowledge')); @@ -238,7 +238,7 @@ test('loadKnowledgeBlock: reports globalSizeKb above 4KB threshold', () => { const tmp = realpathSync(mkdtempSync(join(tmpdir(), 'sf-kb-'))); const sfHome = join(tmp, 'home'); const cwd = join(tmp, 'project'); - mkdirSync(join(cwd, '.gsd'), { recursive: true }); + mkdirSync(join(cwd, '.sf'), { recursive: true }); mkdirSync(join(sfHome, 'agent'), { recursive: true }); // Write > 4KB of content writeFileSync(join(sfHome, 'agent', 'KNOWLEDGE.md'), 'x'.repeat(5000)); diff --git a/src/resources/extensions/sf/tests/manifest-status.test.ts b/src/resources/extensions/sf/tests/manifest-status.test.ts index 71fe506bf..69444dd15 100644 --- a/src/resources/extensions/sf/tests/manifest-status.test.ts +++ b/src/resources/extensions/sf/tests/manifest-status.test.ts @@ -5,7 +5,7 @@ * pending, collected, skipped, and existing arrays based on * manifest status and environment presence. * - * Uses temp directories with real .gsd/milestones/M001/ structure. + * Uses temp directories with real .sf/milestones/M001/ structure. */ import { describe, test, beforeEach, afterEach } from 'node:test'; @@ -21,9 +21,9 @@ function makeTempDir(prefix: string): string { return dir; } -/** Create the .gsd/milestones/M001/ directory structure and write a secrets manifest. */ +/** Create the .sf/milestones/M001/ directory structure and write a secrets manifest. */ function writeManifest(base: string, content: string): void { - const mDir = join(base, '.gsd', 'milestones', 'M001'); + const mDir = join(base, '.sf', 'milestones', 'M001'); mkdirSync(mDir, { recursive: true }); writeFileSync(join(mDir, 'M001-SECRETS.md'), content); } @@ -182,7 +182,7 @@ describe('getManifestStatus: simple temp dir tests', () => { // ─── Missing manifest ──────────────────────────────────────────────────────── test('missing manifest — returns null', async () => { - // No .gsd directory at all + // No .sf directory at all const result = await getManifestStatus(tmp, 'M001'); assert.strictEqual(result, null); }); diff --git a/src/resources/extensions/sf/tests/markdown-renderer.test.ts b/src/resources/extensions/sf/tests/markdown-renderer.test.ts index 8607eba26..687a2f066 100644 --- a/src/resources/extensions/sf/tests/markdown-renderer.test.ts +++ b/src/resources/extensions/sf/tests/markdown-renderer.test.ts @@ -46,7 +46,7 @@ import assert from 'node:assert/strict'; function makeTmpDir(): string { const dir = fs.mkdtempSync(path.join(os.tmpdir(), 'sf-renderer-')); - fs.mkdirSync(path.join(dir, '.gsd'), { recursive: true }); + fs.mkdirSync(path.join(dir, '.sf'), { recursive: true }); return dir; } @@ -68,7 +68,7 @@ function clearAllCaches(): void { * so that path resolvers work correctly. */ function scaffoldDirs(tmpDir: string, mid: string, sliceIds: string[]): void { - const msDir = path.join(tmpDir, '.gsd', 'milestones', mid); + const msDir = path.join(tmpDir, '.sf', 'milestones', mid); fs.mkdirSync(msDir, { recursive: true }); for (const sid of sliceIds) { @@ -245,7 +245,7 @@ test('── markdown-renderer: getArtifact accessor ──', () => { test('── markdown-renderer: renderRoadmapCheckboxes round-trip ──', async () => { const tmpDir = makeTmpDir(); - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); openDatabase(dbPath); clearAllCaches(); @@ -262,7 +262,7 @@ test('── markdown-renderer: renderRoadmapCheckboxes round-trip ──', asyn { id: 'S01', title: 'Core setup', done: false }, { id: 'S02', title: 'Rendering', done: false }, ]); - const roadmapPath = path.join(tmpDir, '.gsd', 'milestones', 'M001', 'M001-ROADMAP.md'); + const roadmapPath = path.join(tmpDir, '.sf', 'milestones', 'M001', 'M001-ROADMAP.md'); fs.writeFileSync(roadmapPath, roadmapContent); clearAllCaches(); @@ -297,7 +297,7 @@ test('── markdown-renderer: renderRoadmapCheckboxes round-trip ──', asyn test('── markdown-renderer: renderRoadmapCheckboxes bidirectional ──', async () => { const tmpDir = makeTmpDir(); - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); openDatabase(dbPath); clearAllCaches(); @@ -314,7 +314,7 @@ test('── markdown-renderer: renderRoadmapCheckboxes bidirectional ──', a { id: 'S01', title: 'Core setup', done: true }, { id: 'S02', title: 'Rendering', done: false }, ]); - const roadmapPath = path.join(tmpDir, '.gsd', 'milestones', 'M001', 'M001-ROADMAP.md'); + const roadmapPath = path.join(tmpDir, '.sf', 'milestones', 'M001', 'M001-ROADMAP.md'); fs.writeFileSync(roadmapPath, roadmapContent); clearAllCaches(); @@ -341,7 +341,7 @@ test('── markdown-renderer: renderRoadmapCheckboxes bidirectional ──', a test('── markdown-renderer: renderPlanCheckboxes round-trip ──', async () => { const tmpDir = makeTmpDir(); - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); openDatabase(dbPath); clearAllCaches(); @@ -360,7 +360,7 @@ test('── markdown-renderer: renderPlanCheckboxes round-trip ──', async ( { id: 'T02', title: 'Second task', done: false }, { id: 'T03', title: 'Third task', done: false }, ]); - const planPath = path.join(tmpDir, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'S01-PLAN.md'); + const planPath = path.join(tmpDir, '.sf', 'milestones', 'M001', 'slices', 'S01', 'S01-PLAN.md'); fs.writeFileSync(planPath, planContent); clearAllCaches(); @@ -387,7 +387,7 @@ test('── markdown-renderer: renderPlanCheckboxes round-trip ──', async ( test('── markdown-renderer: renderPlanCheckboxes bidirectional ──', async () => { const tmpDir = makeTmpDir(); - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); openDatabase(dbPath); clearAllCaches(); @@ -404,7 +404,7 @@ test('── markdown-renderer: renderPlanCheckboxes bidirectional ──', asyn { id: 'T01', title: 'First task', done: true }, // checked but DB says pending { id: 'T02', title: 'Second task', done: false }, // unchecked but DB says done ]); - const planPath = path.join(tmpDir, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'S01-PLAN.md'); + const planPath = path.join(tmpDir, '.sf', 'milestones', 'M001', 'slices', 'S01', 'S01-PLAN.md'); fs.writeFileSync(planPath, planContent); clearAllCaches(); @@ -427,7 +427,7 @@ test('── markdown-renderer: renderPlanCheckboxes bidirectional ──', asyn test('── markdown-renderer: renderPlanFromDb creates parse-compatible slice plan + task plan files ──', async () => { const tmpDir = makeTmpDir(); - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); openDatabase(dbPath); clearAllCaches(); @@ -504,7 +504,7 @@ test('── markdown-renderer: renderPlanFromDb creates parse-compatible slice assert.ok(planArtifact !== null, 'slice plan artifact stored in DB'); assert.ok(planArtifact!.full_content.includes('## Tasks'), 'stored plan artifact contains task section'); - const taskPlanPath = path.join(tmpDir, '.gsd', 'milestones', 'M001', 'slices', 'S02', 'tasks', 'T01-PLAN.md'); + const taskPlanPath = path.join(tmpDir, '.sf', 'milestones', 'M001', 'slices', 'S02', 'tasks', 'T01-PLAN.md'); const taskPlanContent = fs.readFileSync(taskPlanPath, 'utf-8'); const taskPlanFile = parseTaskPlanFile(taskPlanContent); assert.strictEqual(taskPlanFile.frontmatter.estimated_steps, 1, 'task plan frontmatter exposes estimated_steps'); @@ -526,7 +526,7 @@ test('── markdown-renderer: renderPlanFromDb creates parse-compatible slice test('── markdown-renderer: renderTaskPlanFromDb throws for missing task ──', async () => { const tmpDir = makeTmpDir(); - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); openDatabase(dbPath); clearAllCaches(); @@ -556,7 +556,7 @@ test('── markdown-renderer: renderTaskPlanFromDb throws for missing task ─ test('── markdown-renderer: renderTaskSummary round-trip ──', async () => { const tmpDir = makeTmpDir(); - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); openDatabase(dbPath); clearAllCaches(); @@ -581,7 +581,7 @@ test('── markdown-renderer: renderTaskSummary round-trip ──', async () = // Verify file exists on disk const summaryPath = path.join( - tmpDir, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'tasks', 'T01-SUMMARY.md', + tmpDir, '.sf', 'milestones', 'M001', 'slices', 'S01', 'tasks', 'T01-SUMMARY.md', ); assert.ok(fs.existsSync(summaryPath), 'T01-SUMMARY.md written to disk'); @@ -603,7 +603,7 @@ test('── markdown-renderer: renderTaskSummary round-trip ──', async () = test('── markdown-renderer: renderTaskSummary skips empty ──', async () => { const tmpDir = makeTmpDir(); - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); openDatabase(dbPath); clearAllCaches(); @@ -635,7 +635,7 @@ test('── markdown-renderer: renderTaskSummary skips empty ──', async () test('── markdown-renderer: renderSliceSummary round-trip ──', async () => { const tmpDir = makeTmpDir(); - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); openDatabase(dbPath); clearAllCaches(); @@ -661,7 +661,7 @@ test('── markdown-renderer: renderSliceSummary round-trip ──', async () // Verify SUMMARY file const summaryPath = path.join( - tmpDir, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'S01-SUMMARY.md', + tmpDir, '.sf', 'milestones', 'M001', 'slices', 'S01', 'S01-SUMMARY.md', ); assert.ok(fs.existsSync(summaryPath), 'S01-SUMMARY.md written to disk'); @@ -670,7 +670,7 @@ test('── markdown-renderer: renderSliceSummary round-trip ──', async () // Verify UAT file const uatPath = path.join( - tmpDir, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'S01-UAT.md', + tmpDir, '.sf', 'milestones', 'M001', 'slices', 'S01', 'S01-UAT.md', ); assert.ok(fs.existsSync(uatPath), 'S01-UAT.md written to disk'); @@ -688,7 +688,7 @@ test('── markdown-renderer: renderSliceSummary round-trip ──', async () test('── markdown-renderer: renderAllFromDb produces all files ──', async () => { const tmpDir = makeTmpDir(); - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); openDatabase(dbPath); clearAllCaches(); @@ -714,7 +714,7 @@ test('── markdown-renderer: renderAllFromDb produces all files ──', asyn { id: 'S02', title: 'Render', done: false }, ]); fs.writeFileSync( - path.join(tmpDir, '.gsd', 'milestones', 'M001', 'M001-ROADMAP.md'), + path.join(tmpDir, '.sf', 'milestones', 'M001', 'M001-ROADMAP.md'), roadmap1, ); @@ -722,7 +722,7 @@ test('── markdown-renderer: renderAllFromDb produces all files ──', asyn { id: 'S01', title: 'Future', done: false }, ]); fs.writeFileSync( - path.join(tmpDir, '.gsd', 'milestones', 'M002', 'M002-ROADMAP.md'), + path.join(tmpDir, '.sf', 'milestones', 'M002', 'M002-ROADMAP.md'), roadmap2, ); @@ -730,7 +730,7 @@ test('── markdown-renderer: renderAllFromDb produces all files ──', asyn { id: 'T01', title: 'DB', done: false }, ]); fs.writeFileSync( - path.join(tmpDir, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'S01-PLAN.md'), + path.join(tmpDir, '.sf', 'milestones', 'M001', 'slices', 'S01', 'S01-PLAN.md'), plan1, ); @@ -738,7 +738,7 @@ test('── markdown-renderer: renderAllFromDb produces all files ──', asyn { id: 'T01', title: 'Renderer', done: false }, ]); fs.writeFileSync( - path.join(tmpDir, '.gsd', 'milestones', 'M001', 'slices', 'S02', 'S02-PLAN.md'), + path.join(tmpDir, '.sf', 'milestones', 'M001', 'slices', 'S02', 'S02-PLAN.md'), plan2, ); @@ -746,7 +746,7 @@ test('── markdown-renderer: renderAllFromDb produces all files ──', asyn { id: 'T01', title: 'Future task', done: false }, ]); fs.writeFileSync( - path.join(tmpDir, '.gsd', 'milestones', 'M002', 'slices', 'S01', 'S01-PLAN.md'), + path.join(tmpDir, '.sf', 'milestones', 'M002', 'slices', 'S01', 'S01-PLAN.md'), plan3, ); @@ -759,7 +759,7 @@ test('── markdown-renderer: renderAllFromDb produces all files ──', asyn // Verify M001 roadmap has S01 checked const m1Roadmap = fs.readFileSync( - path.join(tmpDir, '.gsd', 'milestones', 'M001', 'M001-ROADMAP.md'), 'utf-8', + path.join(tmpDir, '.sf', 'milestones', 'M001', 'M001-ROADMAP.md'), 'utf-8', ); clearAllCaches(); const parsed1 = parseRoadmap(m1Roadmap); @@ -768,7 +768,7 @@ test('── markdown-renderer: renderAllFromDb produces all files ──', asyn // Verify M001/S01 plan has T01 checked const m1s1Plan = fs.readFileSync( - path.join(tmpDir, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'S01-PLAN.md'), 'utf-8', + path.join(tmpDir, '.sf', 'milestones', 'M001', 'slices', 'S01', 'S01-PLAN.md'), 'utf-8', ); clearAllCaches(); const parsedPlan = parsePlan(m1s1Plan); @@ -776,7 +776,7 @@ test('── markdown-renderer: renderAllFromDb produces all files ──', asyn // Verify task summary written const taskSummaryPath = path.join( - tmpDir, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'tasks', 'T01-SUMMARY.md', + tmpDir, '.sf', 'milestones', 'M001', 'slices', 'S01', 'tasks', 'T01-SUMMARY.md', ); assert.ok(fs.existsSync(taskSummaryPath), 'T01 summary written by renderAll'); } finally { @@ -791,7 +791,7 @@ test('── markdown-renderer: renderAllFromDb produces all files ──', asyn test('── markdown-renderer: graceful fallback reads from disk when artifact not in DB ──', async () => { const tmpDir = makeTmpDir(); - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); openDatabase(dbPath); clearAllCaches(); @@ -805,7 +805,7 @@ test('── markdown-renderer: graceful fallback reads from disk when artifact const roadmapContent = makeRoadmapContent([ { id: 'S01', title: 'Core', done: false }, ]); - const roadmapPath = path.join(tmpDir, '.gsd', 'milestones', 'M001', 'M001-ROADMAP.md'); + const roadmapPath = path.join(tmpDir, '.sf', 'milestones', 'M001', 'M001-ROADMAP.md'); fs.writeFileSync(roadmapPath, roadmapContent); clearAllCaches(); @@ -850,7 +850,7 @@ test('── markdown-renderer: stderr warning on missing content ──', async test('── markdown-renderer: detectStaleRenders finds plan checkbox mismatch ──', () => { const tmpDir = makeTmpDir(); - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); openDatabase(dbPath); clearAllCaches(); @@ -870,7 +870,7 @@ test('── markdown-renderer: detectStaleRenders finds plan checkbox mismatch { id: 'T01', title: 'First task', done: true }, { id: 'T02', title: 'Second task', done: false }, ]); - const planPath = path.join(tmpDir, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'S01-PLAN.md'); + const planPath = path.join(tmpDir, '.sf', 'milestones', 'M001', 'slices', 'S01', 'S01-PLAN.md'); fs.writeFileSync(planPath, planContent); clearAllCaches(); @@ -899,7 +899,7 @@ test('── markdown-renderer: detectStaleRenders finds plan checkbox mismatch test('── markdown-renderer: repairStaleRenders fixes plan and second detect returns empty ──', async () => { const tmpDir = makeTmpDir(); - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); openDatabase(dbPath); clearAllCaches(); @@ -917,7 +917,7 @@ test('── markdown-renderer: repairStaleRenders fixes plan and second detect { id: 'T01', title: 'First task', done: false }, { id: 'T02', title: 'Second task', done: false }, ]); - const planPath = path.join(tmpDir, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'S01-PLAN.md'); + const planPath = path.join(tmpDir, '.sf', 'milestones', 'M001', 'slices', 'S01', 'S01-PLAN.md'); fs.writeFileSync(planPath, planContent); clearAllCaches(); @@ -950,7 +950,7 @@ test('── markdown-renderer: repairStaleRenders fixes plan and second detect test('── markdown-renderer: detectStaleRenders finds roadmap checkbox mismatch ──', () => { const tmpDir = makeTmpDir(); - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); openDatabase(dbPath); clearAllCaches(); @@ -966,7 +966,7 @@ test('── markdown-renderer: detectStaleRenders finds roadmap checkbox mismat { id: 'S01', title: 'Core', done: false }, { id: 'S02', title: 'Render', done: false }, ]); - const roadmapPath = path.join(tmpDir, '.gsd', 'milestones', 'M001', 'M001-ROADMAP.md'); + const roadmapPath = path.join(tmpDir, '.sf', 'milestones', 'M001', 'M001-ROADMAP.md'); fs.writeFileSync(roadmapPath, roadmapContent); clearAllCaches(); @@ -988,7 +988,7 @@ test('── markdown-renderer: detectStaleRenders finds roadmap checkbox mismat test('── markdown-renderer: detectStaleRenders finds missing task summary ──', () => { const tmpDir = makeTmpDir(); - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); openDatabase(dbPath); clearAllCaches(); @@ -1015,7 +1015,7 @@ test('── markdown-renderer: detectStaleRenders finds missing task summary const planContent = makePlanContent('S01', [ { id: 'T01', title: 'Task', done: true }, ]); - const planPath = path.join(tmpDir, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'S01-PLAN.md'); + const planPath = path.join(tmpDir, '.sf', 'milestones', 'M001', 'slices', 'S01', 'S01-PLAN.md'); fs.writeFileSync(planPath, planContent); clearAllCaches(); @@ -1035,7 +1035,7 @@ test('── markdown-renderer: detectStaleRenders finds missing task summary test('── markdown-renderer: repairStaleRenders writes missing task summary ──', async () => { const tmpDir = makeTmpDir(); - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); openDatabase(dbPath); clearAllCaches(); @@ -1059,7 +1059,7 @@ test('── markdown-renderer: repairStaleRenders writes missing task summary const planContent = makePlanContent('S01', [ { id: 'T01', title: 'Task', done: true }, ]); - const planPath = path.join(tmpDir, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'S01-PLAN.md'); + const planPath = path.join(tmpDir, '.sf', 'milestones', 'M001', 'slices', 'S01', 'S01-PLAN.md'); fs.writeFileSync(planPath, planContent); clearAllCaches(); @@ -1069,7 +1069,7 @@ test('── markdown-renderer: repairStaleRenders writes missing task summary // Verify file written const summaryPath = path.join( - tmpDir, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'tasks', 'T01-SUMMARY.md', + tmpDir, '.sf', 'milestones', 'M001', 'slices', 'S01', 'tasks', 'T01-SUMMARY.md', ); assert.ok(fs.existsSync(summaryPath), 'T01-SUMMARY.md should exist after repair'); @@ -1090,7 +1090,7 @@ test('── markdown-renderer: repairStaleRenders writes missing task summary test('── markdown-renderer: repairStaleRenders idempotency — fully synced returns 0 ──', async () => { const tmpDir = makeTmpDir(); - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); openDatabase(dbPath); clearAllCaches(); @@ -1105,7 +1105,7 @@ test('── markdown-renderer: repairStaleRenders idempotency — fully synced const planContent = makePlanContent('S01', [ { id: 'T01', title: 'Task', done: true }, ]); - const planPath = path.join(tmpDir, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'S01-PLAN.md'); + const planPath = path.join(tmpDir, '.sf', 'milestones', 'M001', 'slices', 'S01', 'S01-PLAN.md'); fs.writeFileSync(planPath, planContent); clearAllCaches(); @@ -1124,7 +1124,7 @@ test('── markdown-renderer: repairStaleRenders idempotency — fully synced test('── markdown-renderer: detectStaleRenders finds missing slice summary and UAT ──', () => { const tmpDir = makeTmpDir(); - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); openDatabase(dbPath); clearAllCaches(); diff --git a/src/resources/extensions/sf/tests/mcp-project-config.test.ts b/src/resources/extensions/sf/tests/mcp-project-config.test.ts index dff6fd45d..8b6befb67 100644 --- a/src/resources/extensions/sf/tests/mcp-project-config.test.ts +++ b/src/resources/extensions/sf/tests/mcp-project-config.test.ts @@ -11,7 +11,7 @@ import { test("ensureProjectWorkflowMcpConfig creates .mcp.json with the workflow server", () => { const projectRoot = mkdtempSync(join(tmpdir(), "sf-mcp-init-")); - mkdirSync(join(projectRoot, ".gsd"), { recursive: true }); + mkdirSync(join(projectRoot, ".sf"), { recursive: true }); try { const result = ensureProjectWorkflowMcpConfig(projectRoot); @@ -39,7 +39,7 @@ test("ensureProjectWorkflowMcpConfig creates .mcp.json with the workflow server" test("ensureProjectWorkflowMcpConfig preserves existing mcp servers", () => { const projectRoot = mkdtempSync(join(tmpdir(), "sf-mcp-init-")); - mkdirSync(join(projectRoot, ".gsd"), { recursive: true }); + mkdirSync(join(projectRoot, ".sf"), { recursive: true }); const configPath = join(projectRoot, ".mcp.json"); writeFileSync( @@ -74,7 +74,7 @@ test("ensureProjectWorkflowMcpConfig preserves existing mcp servers", () => { test("ensureProjectWorkflowMcpConfig is idempotent when config is already current", () => { const projectRoot = mkdtempSync(join(tmpdir(), "sf-mcp-init-")); - mkdirSync(join(projectRoot, ".gsd"), { recursive: true }); + mkdirSync(join(projectRoot, ".sf"), { recursive: true }); try { const first = ensureProjectWorkflowMcpConfig(projectRoot); diff --git a/src/resources/extensions/sf/tests/md-importer.test.ts b/src/resources/extensions/sf/tests/md-importer.test.ts index ba52a1a61..831618e12 100644 --- a/src/resources/extensions/sf/tests/md-importer.test.ts +++ b/src/resources/extensions/sf/tests/md-importer.test.ts @@ -28,7 +28,7 @@ const DECISIONS_MD = `# Decisions Register | # | When | Scope | Decision | Choice | Rationale | Revisable? | |---|------|-------|----------|--------|-----------|------------| | D001 | M001 | library | SQLite library | better-sqlite3 | Sync API | No | -| D002 | M001 | arch | DB location | .gsd/sf.db | Derived state | No | +| D002 | M001 | arch | DB location | .sf/sf.db | Derived state | No | | D010 | M001/S01 | library | Provider strategy (amends D001) | node:sqlite fallback | Zero deps | No | | D020 | M001/S02 | library | Importer approach (amends D010) | Direct parse | Simple | Yes | `; @@ -97,7 +97,7 @@ const REQUIREMENTS_MD = `# Requirements // ═══════════════════════════════════════════════════════════════════════════ function createFixtureTree(baseDir: string): void { - const sf = path.join(baseDir, '.gsd'); + const sf = path.join(baseDir, '.sf'); fs.mkdirSync(sf, { recursive: true }); fs.writeFileSync(path.join(sf, 'DECISIONS.md'), DECISIONS_MD); fs.writeFileSync(path.join(sf, 'REQUIREMENTS.md'), REQUIREMENTS_MD); @@ -194,7 +194,7 @@ test('md-importer: made_by column parsing (new 8-column format)', () => { | # | When | Scope | Decision | Choice | Rationale | Revisable? | Made By | |---|------|-------|----------|--------|-----------|------------|---------| | D001 | M001 | library | SQLite library | better-sqlite3 | Sync API | No | human | -| D002 | M001 | arch | DB location | .gsd/sf.db | Derived state | No | agent | +| D002 | M001 | arch | DB location | .sf/sf.db | Derived state | No | agent | | D003 | M002 | impl | Config format | JSON | Simple | Yes | collaborative | | D004 | M002 | impl | Cache strategy | LRU | Predictable | No | bogus | `; @@ -337,8 +337,8 @@ test('md-importer: idempotent re-import', () => { test('md-importer: missing file handling', () => { const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'sf-empty-test-')); - // Create empty .gsd/ with no files - fs.mkdirSync(path.join(tmpDir, '.gsd'), { recursive: true }); + // Create empty .sf/ with no files + fs.mkdirSync(path.join(tmpDir, '.sf'), { recursive: true }); try { openDatabase(':memory:'); @@ -389,7 +389,7 @@ test('md-importer: round-trip fidelity', () => { assert.deepStrictEqual(d002?.when_context, 'M001', 'D002 when_context round-trip'); assert.deepStrictEqual(d002?.scope, 'arch', 'D002 scope round-trip'); assert.deepStrictEqual(d002?.decision, 'DB location', 'D002 decision round-trip'); - assert.deepStrictEqual(d002?.choice, '.gsd/sf.db', 'D002 choice round-trip'); + assert.deepStrictEqual(d002?.choice, '.sf/sf.db', 'D002 choice round-trip'); assert.deepStrictEqual(d002?.rationale, 'Derived state', 'D002 rationale round-trip'); const r002 = getRequirementById('R002'); diff --git a/src/resources/extensions/sf/tests/measurement.test.ts b/src/resources/extensions/sf/tests/measurement.test.ts index 25785d10f..cecffa62d 100644 --- a/src/resources/extensions/sf/tests/measurement.test.ts +++ b/src/resources/extensions/sf/tests/measurement.test.ts @@ -433,7 +433,7 @@ describe("measurement: context reduction verification", () => { test("formatRoadmapExcerpt achieves ≥40% reduction", () => { const sliceId = 'S02'; - const excerptResult = formatRoadmapExcerpt(syntheticRoadmap, sliceId, '.gsd/milestones/M005/M005-ROADMAP.md'); + const excerptResult = formatRoadmapExcerpt(syntheticRoadmap, sliceId, '.sf/milestones/M005/M005-ROADMAP.md'); const fullSize = syntheticRoadmap.length; const excerptSize = excerptResult.length; diff --git a/src/resources/extensions/sf/tests/memory-leak-guards.test.ts b/src/resources/extensions/sf/tests/memory-leak-guards.test.ts index 1d97a1c2e..460f97c8f 100644 --- a/src/resources/extensions/sf/tests/memory-leak-guards.test.ts +++ b/src/resources/extensions/sf/tests/memory-leak-guards.test.ts @@ -35,7 +35,7 @@ test("clearActivityLogState resets dedup state so identical saves write again", // First save saveActivityLog(ctx, baseDir, "execute-task", "M001/S01/T01"); - const actDir = join(baseDir, ".gsd", "activity"); + const actDir = join(baseDir, ".sf", "activity"); assert.equal(readdirSync(actDir).length, 1, "first save creates one file"); // Same content, same unit — deduped @@ -68,7 +68,7 @@ test("saveActivityLog writes valid JSONL via streaming", () => { saveActivityLog(ctx, baseDir, "execute-task", "M002/S01/T01"); - const actDir = join(baseDir, ".gsd", "activity"); + const actDir = join(baseDir, ".sf", "activity"); const files = readdirSync(actDir); assert.equal(files.length, 1, "one file written"); diff --git a/src/resources/extensions/sf/tests/merge-conflict-stops-loop.test.ts b/src/resources/extensions/sf/tests/merge-conflict-stops-loop.test.ts index 1b6450ee7..9ff33a978 100644 --- a/src/resources/extensions/sf/tests/merge-conflict-stops-loop.test.ts +++ b/src/resources/extensions/sf/tests/merge-conflict-stops-loop.test.ts @@ -1,7 +1,7 @@ /** * merge-conflict-stops-loop.test.ts — #2330 * - * When a squash merge has real code conflicts (not just .gsd/ files), + * When a squash merge has real code conflicts (not just .sf/ files), * the merge retries forever because MergeConflictError is caught * silently in mergeAndExit. This test verifies that: * 1. worktree-resolver re-throws MergeConflictError for code conflicts diff --git a/src/resources/extensions/sf/tests/metrics.test.ts b/src/resources/extensions/sf/tests/metrics.test.ts index adb983147..a2850d862 100644 --- a/src/resources/extensions/sf/tests/metrics.test.ts +++ b/src/resources/extensions/sf/tests/metrics.test.ts @@ -201,7 +201,7 @@ test("old UnitMetrics without budget fields work with all aggregation functions" test("initMetrics creates ledger, snapshotUnitMetrics persists across resets", () => { const tmpBase = mkdtempSync(join(tmpdir(), "sf-metrics-test-")); - mkdirSync(join(tmpBase, ".gsd"), { recursive: true }); + mkdirSync(join(tmpBase, ".sf"), { recursive: true }); try { resetMetrics(); @@ -237,7 +237,7 @@ test("initMetrics creates ledger, snapshotUnitMetrics persists across resets", ( assert.equal(getLedger()!.units[0].id, "M001/S01/T01"); // Verify file content - const raw = readFileSync(join(tmpBase, ".gsd", "metrics.json"), "utf-8"); + const raw = readFileSync(join(tmpBase, ".sf", "metrics.json"), "utf-8"); const parsed: MetricsLedger = JSON.parse(raw); assert.equal(parsed.version, 1); assert.equal(parsed.units.length, 1); @@ -256,7 +256,7 @@ test("initMetrics creates ledger, snapshotUnitMetrics persists across resets", ( test("snapshotUnitMetrics deduplicates entries with same type+id+startedAt", () => { const tmpBase = mkdtempSync(join(tmpdir(), "sf-metrics-dedup-")); - mkdirSync(join(tmpBase, ".gsd"), { recursive: true }); + mkdirSync(join(tmpBase, ".sf"), { recursive: true }); try { initMetrics(tmpBase); const startedAt = Date.now() - 10000; @@ -301,7 +301,7 @@ test("snapshotUnitMetrics deduplicates entries with same type+id+startedAt", () test("snapshotUnitMetrics handles simulated idle-watchdog duplicate pattern", () => { const tmpBase = mkdtempSync(join(tmpdir(), "sf-metrics-watchdog-")); - mkdirSync(join(tmpBase, ".gsd"), { recursive: true }); + mkdirSync(join(tmpBase, ".sf"), { recursive: true }); try { initMetrics(tmpBase); const startedAt = Date.now() - 60000; @@ -326,7 +326,7 @@ test("snapshotUnitMetrics handles simulated idle-watchdog duplicate pattern", () assert.equal(getLedger()!.units.length, 1, "10 watchdog snapshots should produce 1 entry, not 10"); // Persist and verify - const raw = readFileSync(join(tmpBase, ".gsd", "metrics.json"), "utf-8"); + const raw = readFileSync(join(tmpBase, ".sf", "metrics.json"), "utf-8"); const parsed: MetricsLedger = JSON.parse(raw); assert.equal(parsed.units.length, 1); } finally { @@ -339,7 +339,7 @@ test("snapshotUnitMetrics handles simulated idle-watchdog duplicate pattern", () test("snapshotUnitMetrics counts toolCall blocks correctly (#1713)", () => { const tmpBase = mkdtempSync(join(tmpdir(), "sf-metrics-toolcall-")); - mkdirSync(join(tmpBase, ".gsd"), { recursive: true }); + mkdirSync(join(tmpBase, ".sf"), { recursive: true }); try { resetMetrics(); @@ -387,7 +387,7 @@ test("snapshotUnitMetrics counts toolCall blocks correctly (#1713)", () => { test("#1943 initMetrics deduplicates entries loaded from a corrupted disk ledger", () => { const tmpBase = mkdtempSync(join(tmpdir(), "sf-metrics-dedup-load-")); - mkdirSync(join(tmpBase, ".gsd"), { recursive: true }); + mkdirSync(join(tmpBase, ".sf"), { recursive: true }); try { resetMetrics(); @@ -407,7 +407,7 @@ test("#1943 initMetrics deduplicates entries loaded from a corrupted disk ledger ], }; writeFileSync( - join(tmpBase, ".gsd", "metrics.json"), + join(tmpBase, ".sf", "metrics.json"), JSON.stringify(corruptedLedger, null, 2), ); @@ -430,7 +430,7 @@ test("#1943 initMetrics deduplicates entries loaded from a corrupted disk ledger assert.equal(researchEntry!.cost, 1.65, "should keep the latest cost"); // The on-disk file should also be deduplicated - const diskRaw = readFileSync(join(tmpBase, ".gsd", "metrics.json"), "utf-8"); + const diskRaw = readFileSync(join(tmpBase, ".sf", "metrics.json"), "utf-8"); const diskLedger: MetricsLedger = JSON.parse(diskRaw); assert.equal(diskLedger.units.length, 2, "disk should also have deduplicated entries"); } finally { @@ -477,11 +477,11 @@ test("#1943 getProjectTotals reports correct cost after dedup (no 35% inflation) // After loading through initMetrics (which should dedup), totals should be correct const tmpBase = mkdtempSync(join(tmpdir(), "sf-metrics-cost-inflation-")); - mkdirSync(join(tmpBase, ".gsd"), { recursive: true }); + mkdirSync(join(tmpBase, ".sf"), { recursive: true }); try { resetMetrics(); writeFileSync( - join(tmpBase, ".gsd", "metrics.json"), + join(tmpBase, ".sf", "metrics.json"), JSON.stringify({ version: 1, projectStartedAt: 1700000000000, units: duplicateUnits }, null, 2), ); initMetrics(tmpBase); diff --git a/src/resources/extensions/sf/tests/migrate-external-worktree.test.ts b/src/resources/extensions/sf/tests/migrate-external-worktree.test.ts index 6801bfa71..123c87a27 100644 --- a/src/resources/extensions/sf/tests/migrate-external-worktree.test.ts +++ b/src/resources/extensions/sf/tests/migrate-external-worktree.test.ts @@ -42,11 +42,11 @@ describe("migrate-external worktree guard (#2970)", () => { run('git commit -m "init"', base); // Create a worktree - worktreePath = join(base, ".gsd", "worktrees", "M001"); + worktreePath = join(base, ".sf", "worktrees", "M001"); run(`git worktree add -b milestone/M001 ${worktreePath}`, base); - // Populate worktree with a .gsd directory (simulating syncSfStateToWorktree) - const worktreeGsd = join(worktreePath, ".gsd"); + // Populate worktree with a .sf directory (simulating syncSfStateToWorktree) + const worktreeGsd = join(worktreePath, ".sf"); mkdirSync(worktreeGsd, { recursive: true }); writeFileSync(join(worktreeGsd, "PREFERENCES.md"), "# prefs\n", "utf-8"); }); @@ -60,23 +60,23 @@ describe("migrate-external worktree guard (#2970)", () => { }); test("migrateToExternalState skips when basePath is a git worktree", () => { - // The worktree has a real .gsd directory — migration would normally run. + // The worktree has a real .sf directory — migration would normally run. // But since this is a worktree, it should be skipped. const result = migrateToExternalState(worktreePath); assert.equal(result.migrated, false, "should not migrate inside a worktree"); assert.equal(result.error, undefined, "should not report an error"); - // .gsd should still exist as a real directory (not renamed/removed) + // .sf should still exist as a real directory (not renamed/removed) assert.ok( - existsSync(join(worktreePath, ".gsd")), - ".gsd directory should still exist after skipped migration" + existsSync(join(worktreePath, ".sf")), + ".sf directory should still exist after skipped migration" ); - // .gsd.migrating should NOT exist + // .sf.migrating should NOT exist assert.ok( - !existsSync(join(worktreePath, ".gsd.migrating")), - ".gsd.migrating should not be created in a worktree" + !existsSync(join(worktreePath, ".sf.migrating")), + ".sf.migrating should not be created in a worktree" ); }); @@ -92,9 +92,9 @@ describe("migrate-external worktree guard (#2970)", () => { run("git add README.md", mainBase); run('git commit -m "init"', mainBase); - // Create a .gsd directory with content - mkdirSync(join(mainBase, ".gsd"), { recursive: true }); - writeFileSync(join(mainBase, ".gsd", "PREFERENCES.md"), "# prefs\n", "utf-8"); + // Create a .sf directory with content + mkdirSync(join(mainBase, ".sf"), { recursive: true }); + writeFileSync(join(mainBase, ".sf", "PREFERENCES.md"), "# prefs\n", "utf-8"); const result = migrateToExternalState(mainBase); assert.equal(result.migrated, true, "should migrate on main repo"); diff --git a/src/resources/extensions/sf/tests/migrate-hierarchy.test.ts b/src/resources/extensions/sf/tests/migrate-hierarchy.test.ts index efe53f7b6..f2a191867 100644 --- a/src/resources/extensions/sf/tests/migrate-hierarchy.test.ts +++ b/src/resources/extensions/sf/tests/migrate-hierarchy.test.ts @@ -25,12 +25,12 @@ import assert from 'node:assert/strict'; function createFixtureBase(): string { const base = mkdtempSync(join(tmpdir(), 'sf-migrate-hier-')); - mkdirSync(join(base, '.gsd', 'milestones'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones'), { recursive: true }); return base; } function writeFile(base: string, relativePath: string, content: string): void { - const full = join(base, '.gsd', relativePath); + const full = join(base, '.sf', relativePath); mkdirSync(join(full, '..'), { recursive: true }); writeFileSync(full, content); } @@ -281,7 +281,7 @@ test('migrate-hier: ghost milestone skipped', () => { // M001: real milestone writeFile(base, 'milestones/M001/M001-ROADMAP.md', ROADMAP_2_SLICES); // M002: ghost — just an empty dir (no CONTEXT, ROADMAP, or SUMMARY) - mkdirSync(join(base, '.gsd', 'milestones', 'M002'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones', 'M002'), { recursive: true }); openDatabase(':memory:'); const counts = migrateHierarchyToDb(base); diff --git a/src/resources/extensions/sf/tests/migrate-writer-integration.test.ts b/src/resources/extensions/sf/tests/migrate-writer-integration.test.ts index 7b3cf707f..71acb6070 100644 --- a/src/resources/extensions/sf/tests/migrate-writer-integration.test.ts +++ b/src/resources/extensions/sf/tests/migrate-writer-integration.test.ts @@ -1,5 +1,5 @@ // Migration writer integration test -// Writes a complete .gsd tree to a temp dir, verifies file existence, +// Writes a complete .sf tree to a temp dir, verifies file existence, // parses key files, and asserts deriveState() returns coherent state. // Also tests generatePreview() for correct counts. @@ -140,7 +140,7 @@ test('Scenario 1: Incomplete project — write, parse, deriveState', async () => // (a) Key files exist console.log(' --- file existence ---'); - const sf = join(base, '.gsd'); + const sf = join(base, '.sf'); const m = join(sf, 'milestones', 'M001'); assert.ok(existsSync(join(m, 'M001-ROADMAP.md')), 'incomplete: M001-ROADMAP.md exists'); @@ -259,10 +259,10 @@ test('Scenario 2: Fully complete project — deriveState phase', async () => { await writeSFDirectory(project, base); // Null research should NOT produce a file - const m = join(base, '.gsd', 'milestones', 'M001'); + const m = join(base, '.sf', 'milestones', 'M001'); assert.ok(!existsSync(join(m, 'M001-RESEARCH.md')), 'complete: M001-RESEARCH.md NOT written (null)'); // No REQUIREMENTS.md since empty requirements - assert.ok(!existsSync(join(base, '.gsd', 'REQUIREMENTS.md')), 'complete: REQUIREMENTS.md NOT written (empty)'); + assert.ok(!existsSync(join(base, '.sf', 'REQUIREMENTS.md')), 'complete: REQUIREMENTS.md NOT written (empty)'); // Completed milestone should have VALIDATION and SUMMARY from migration (#819) assert.ok(existsSync(join(m, 'M001-VALIDATION.md')), 'complete: M001-VALIDATION.md written for completed milestone'); assert.ok(existsSync(join(m, 'M001-SUMMARY.md')), 'complete: M001-SUMMARY.md written for completed milestone'); diff --git a/src/resources/extensions/sf/tests/milestone-status-tool.test.ts b/src/resources/extensions/sf/tests/milestone-status-tool.test.ts index b7a85ec40..f250804e2 100644 --- a/src/resources/extensions/sf/tests/milestone-status-tool.test.ts +++ b/src/resources/extensions/sf/tests/milestone-status-tool.test.ts @@ -26,7 +26,7 @@ function makeMockPi() { function makeTmpBase(): string { const base = join(tmpdir(), `sf-query-tool-test-${randomUUID()}`); - mkdirSync(join(base, ".gsd"), { recursive: true }); + mkdirSync(join(base, ".sf"), { recursive: true }); return base; } @@ -35,7 +35,7 @@ function cleanup(base: string): void { } function openTestDb(base: string): void { - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); } async function executeToolInDir(tool: any, params: Record<string, unknown>, dir: string) { @@ -182,7 +182,7 @@ test("sf_milestone_status returns not-found for missing milestone", async () => // ─── DB unavailable ─────────────────────────────────────────────────────────── test("sf_milestone_status handles missing DB gracefully", async () => { - // Create a directory without .gsd/ to ensure ensureDbOpen has nothing to open + // Create a directory without .sf/ to ensure ensureDbOpen has nothing to open const base = join(tmpdir(), `sf-no-db-${randomUUID()}`); mkdirSync(base, { recursive: true }); closeDatabase(); // ensure no prior DB is open diff --git a/src/resources/extensions/sf/tests/milestone-transition-state-rebuild.test.ts b/src/resources/extensions/sf/tests/milestone-transition-state-rebuild.test.ts index dd57c5424..8d6f100d0 100644 --- a/src/resources/extensions/sf/tests/milestone-transition-state-rebuild.test.ts +++ b/src/resources/extensions/sf/tests/milestone-transition-state-rebuild.test.ts @@ -101,8 +101,8 @@ test("auto.ts buildLoopDeps wires rebuildState", () => { test("completed-units.json is cleared on milestone transition (functional)", () => { const tempDir = realpathSync(mkdtempSync(join(tmpdir(), "sf-cu-reset-"))); try { - // Create .gsd directory with a populated completed-units.json - const sfDir = join(tempDir, ".gsd"); + // Create .sf directory with a populated completed-units.json + const sfDir = join(tempDir, ".sf"); mkdirSync(sfDir, { recursive: true }); const completedKeysPath = join(sfDir, "completed-units.json"); diff --git a/src/resources/extensions/sf/tests/none-mode-gates.test.ts b/src/resources/extensions/sf/tests/none-mode-gates.test.ts index fcd16862a..b99366fad 100644 --- a/src/resources/extensions/sf/tests/none-mode-gates.test.ts +++ b/src/resources/extensions/sf/tests/none-mode-gates.test.ts @@ -8,7 +8,7 @@ * Uses the writeRunnerPreferences pattern from doctor-git.test.ts: * PROJECT_PREFERENCES_PATH is a module-level constant frozen at import * time, so process.chdir() won't redirect preference loading. We write - * prefs to the runner's cwd .gsd/PREFERENCES.md and clean up in finally. + * prefs to the runner's cwd .sf/PREFERENCES.md and clean up in finally. */ import { mkdirSync, writeFileSync, rmSync, existsSync } from "node:fs"; @@ -24,10 +24,10 @@ import assert from 'node:assert/strict'; // --- Preferences helpers (same pattern as doctor-git.test.ts K001) --- -const RUNNER_PREFS_PATH = join(process.cwd(), ".gsd", "PREFERENCES.md"); +const RUNNER_PREFS_PATH = join(process.cwd(), ".sf", "PREFERENCES.md"); function writeRunnerPreferences(isolation: "none" | "worktree" | "branch"): void { - mkdirSync(join(process.cwd(), ".gsd"), { recursive: true }); + mkdirSync(join(process.cwd(), ".sf"), { recursive: true }); writeFileSync(RUNNER_PREFS_PATH, `---\ngit:\n isolation: "${isolation}"\n---\n`); } @@ -74,11 +74,11 @@ try { // Worktree isolation requires explicit opt-in — default is "none" so SF // works out of the box without PREFERENCES.md (#2480). // Skip if global prefs exist — they override the default and this test -// cannot control ~/.gsd/PREFERENCES.md. +// cannot control ~/.sf/PREFERENCES.md. test('shouldUseWorktreeIsolation returns false for no prefs (default: none)', () => { - const globalPrefsExist = existsSync(join(homedir(), ".gsd", "PREFERENCES.md")) - || existsSync(join(homedir(), ".gsd", "PREFERENCES.md")); + const globalPrefsExist = existsSync(join(homedir(), ".sf", "PREFERENCES.md")) + || existsSync(join(homedir(), ".sf", "PREFERENCES.md")); if (!globalPrefsExist) { try { removeRunnerPreferences(); // ensure no prefs file @@ -93,8 +93,8 @@ test('shouldUseWorktreeIsolation returns false for no prefs (default: none)', () // Test 5: getIsolationMode returns "none" when no PREFERENCES.md exists (#2480) test('getIsolationMode returns "none" with no prefs (default)', () => { - const globalPrefsExist = existsSync(join(homedir(), ".gsd", "PREFERENCES.md")) - || existsSync(join(homedir(), ".gsd", "PREFERENCES.md")); + const globalPrefsExist = existsSync(join(homedir(), ".sf", "PREFERENCES.md")) + || existsSync(join(homedir(), ".sf", "PREFERENCES.md")); if (!globalPrefsExist) { try { removeRunnerPreferences(); diff --git a/src/resources/extensions/sf/tests/notification-store.test.ts b/src/resources/extensions/sf/tests/notification-store.test.ts index 8ce86ee9e..d3619bf90 100644 --- a/src/resources/extensions/sf/tests/notification-store.test.ts +++ b/src/resources/extensions/sf/tests/notification-store.test.ts @@ -25,7 +25,7 @@ describe("notification-store", () => { beforeEach(() => { tmp = mkdtempSync(join(tmpdir(), "sf-notif-test-")); - mkdirSync(join(tmp, ".gsd"), { recursive: true }); + mkdirSync(join(tmp, ".sf"), { recursive: true }); _resetNotificationStore(); }); @@ -38,7 +38,7 @@ describe("notification-store", () => { initNotificationStore(tmp); appendNotification("test message", "info"); - const filePath = join(tmp, ".gsd", "notifications.jsonl"); + const filePath = join(tmp, ".sf", "notifications.jsonl"); assert.ok(existsSync(filePath)); const content = readFileSync(filePath, "utf-8").trim(); @@ -221,7 +221,7 @@ describe("notification-store", () => { test("reinit switches to new project path", () => { const tmp2 = mkdtempSync(join(tmpdir(), "sf-notif-test2-")); - mkdirSync(join(tmp2, ".gsd"), { recursive: true }); + mkdirSync(join(tmp2, ".sf"), { recursive: true }); initNotificationStore(tmp); appendNotification("project1", "info"); @@ -270,7 +270,7 @@ describe("notification-store", () => { appendNotification("msg1", "info"); // Simulate another process holding the lock - const lockPath = join(tmp, ".gsd", "notifications.lock"); + const lockPath = join(tmp, ".sf", "notifications.lock"); writeFileSync(lockPath, String(Date.now()), "utf-8"); // markAllRead should still work (best-effort) but not delete the foreign lock @@ -287,7 +287,7 @@ describe("notification-store", () => { appendNotification("msg1", "info"); // Simulate another process holding the lock - const lockPath = join(tmp, ".gsd", "notifications.lock"); + const lockPath = join(tmp, ".sf", "notifications.lock"); writeFileSync(lockPath, String(Date.now()), "utf-8"); // clearNotifications should still work but not delete the foreign lock diff --git a/src/resources/extensions/sf/tests/notification-widget.test.ts b/src/resources/extensions/sf/tests/notification-widget.test.ts index b2ed490de..5d7743331 100644 --- a/src/resources/extensions/sf/tests/notification-widget.test.ts +++ b/src/resources/extensions/sf/tests/notification-widget.test.ts @@ -10,7 +10,7 @@ import { buildNotificationWidgetLines } from "../notification-widget.js"; test("buildNotificationWidgetLines shows unread count with shortcut pair", () => { const tmp = mkdtempSync(join(tmpdir(), "sf-notification-widget-")); try { - mkdirSync(join(tmp, ".gsd"), { recursive: true }); + mkdirSync(join(tmp, ".sf"), { recursive: true }); _resetNotificationStore(); initNotificationStore(tmp); appendNotification("Need attention", "warning"); diff --git a/src/resources/extensions/sf/tests/notifications-handler.test.ts b/src/resources/extensions/sf/tests/notifications-handler.test.ts index 65cba8ba4..05d8f749f 100644 --- a/src/resources/extensions/sf/tests/notifications-handler.test.ts +++ b/src/resources/extensions/sf/tests/notifications-handler.test.ts @@ -17,7 +17,7 @@ function makeTempDir(prefix: string): string { `sf-notifications-handler-test-${prefix}-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`, ); mkdirSync(dir, { recursive: true }); - mkdirSync(join(dir, ".gsd"), { recursive: true }); + mkdirSync(join(dir, ".sf"), { recursive: true }); return dir; } diff --git a/src/resources/extensions/sf/tests/orphaned-worktree-audit.test.ts b/src/resources/extensions/sf/tests/orphaned-worktree-audit.test.ts index 62f8ebcda..be2220862 100644 --- a/src/resources/extensions/sf/tests/orphaned-worktree-audit.test.ts +++ b/src/resources/extensions/sf/tests/orphaned-worktree-audit.test.ts @@ -13,7 +13,7 @@ function run(cmd: string, cwd: string): string { return execSync(cmd, { cwd, stdio: ["ignore", "pipe", "pipe"], encoding: "utf-8" }).trim(); } -/** Create a temp git repo with .gsd structure and DB. */ +/** Create a temp git repo with .sf structure and DB. */ function createRepo(): string { const dir = realpathSync(mkdtempSync(join(tmpdir(), "orphan-audit-test-"))); run("git init", dir); @@ -25,8 +25,8 @@ function createRepo(): string { run("git commit -m init", dir); run("git branch -M main", dir); - // Create .gsd structure on disk (not tracked in git) - mkdirSync(join(dir, ".gsd", "milestones", "M001"), { recursive: true }); + // Create .sf structure on disk (not tracked in git) + mkdirSync(join(dir, ".sf", "milestones", "M001"), { recursive: true }); return dir; } @@ -36,7 +36,7 @@ describe("auditOrphanedMilestoneBranches", () => { beforeEach(() => { dir = createRepo(); - openDatabase(join(dir, ".gsd", "sf.db")); + openDatabase(join(dir, ".sf", "sf.db")); }); afterEach(() => { @@ -127,7 +127,7 @@ describe("auditOrphanedMilestoneBranches", () => { insertMilestone({ id: "M001", title: "Test", status: "complete" }); // Create orphaned worktree directory - const wtDir = join(dir, ".gsd", "worktrees", "M001"); + const wtDir = join(dir, ".sf", "worktrees", "M001"); mkdirSync(wtDir, { recursive: true }); writeFileSync(join(wtDir, "leftover.txt"), "orphaned file\n"); diff --git a/src/resources/extensions/sf/tests/overrides.test.ts b/src/resources/extensions/sf/tests/overrides.test.ts index 0d7b03598..8dd53b31c 100644 --- a/src/resources/extensions/sf/tests/overrides.test.ts +++ b/src/resources/extensions/sf/tests/overrides.test.ts @@ -13,7 +13,7 @@ const tempDirs: string[] = []; function makeTempDir(prefix: string): string { const dir = mkdtempSync(join(tmpdir(), `sf-overrides-test-${prefix}-`)); - mkdirSync(join(dir, ".gsd"), { recursive: true }); + mkdirSync(join(dir, ".sf"), { recursive: true }); tempDirs.push(dir); return dir; } @@ -54,7 +54,7 @@ describe('overrides', () => { test('appendOverride: creates new file', async () => { const tmp = makeTempDir("append-new"); await appendOverride(tmp, "Use Postgres", "M001/S01/T01"); - const content = readFileSync(join(tmp, ".gsd", "OVERRIDES.md"), "utf-8"); + const content = readFileSync(join(tmp, ".sf", "OVERRIDES.md"), "utf-8"); assert.ok(content.includes("# SF Overrides"), "has header"); assert.ok(content.includes("**Change:** Use Postgres"), "has change"); assert.ok(content.includes("**Scope:** active"), "has active scope"); @@ -65,7 +65,7 @@ describe('overrides', () => { const tmp = makeTempDir("append-existing"); await appendOverride(tmp, "First override", "M001/S01/T01"); await appendOverride(tmp, "Second override", "M001/S02/T02"); - const content = readFileSync(join(tmp, ".gsd", "OVERRIDES.md"), "utf-8"); + const content = readFileSync(join(tmp, ".sf", "OVERRIDES.md"), "utf-8"); assert.ok(content.includes("**Change:** First override"), "has first override"); assert.ok(content.includes("**Change:** Second override"), "has second override"); const parsed = parseOverrides(content); @@ -81,7 +81,7 @@ describe('overrides', () => { test('loadActiveOverrides: filters to active only', async () => { const tmp = makeTempDir("load-filter"); const content = `# SF Overrides\n\n---\n\n## Override: 2026-03-14T10:00:00.000Z\n\n**Change:** Resolved change\n**Scope:** resolved\n**Applied-at:** M001/S01/T01\n\n---\n\n## Override: 2026-03-14T11:00:00.000Z\n\n**Change:** Active change\n**Scope:** active\n**Applied-at:** M001/S02/T01\n\n---\n`; - writeFileSync(join(tmp, ".gsd", "OVERRIDES.md"), content, "utf-8"); + writeFileSync(join(tmp, ".sf", "OVERRIDES.md"), content, "utf-8"); const result = await loadActiveOverrides(tmp); assert.deepStrictEqual(result.length, 1, "only one active override"); assert.deepStrictEqual(result[0].change, "Active change", "correct active change"); @@ -110,7 +110,7 @@ describe('overrides', () => { await resolveAllOverrides(tmp); active = await loadActiveOverrides(tmp); assert.deepStrictEqual(active.length, 0, "no active after resolve"); - const content = readFileSync(join(tmp, ".gsd", "OVERRIDES.md"), "utf-8"); + const content = readFileSync(join(tmp, ".sf", "OVERRIDES.md"), "utf-8"); const allOverrides = parseOverrides(content); assert.deepStrictEqual(allOverrides.length, 2, "still two overrides total"); assert.ok(allOverrides.every(o => o.scope === "resolved"), "all resolved"); diff --git a/src/resources/extensions/sf/tests/parallel-budget-atomicity.test.ts b/src/resources/extensions/sf/tests/parallel-budget-atomicity.test.ts index d5f789ee9..b6c49a518 100644 --- a/src/resources/extensions/sf/tests/parallel-budget-atomicity.test.ts +++ b/src/resources/extensions/sf/tests/parallel-budget-atomicity.test.ts @@ -42,7 +42,7 @@ import type { SFPreferences } from "../preferences.ts"; function makeTmpBase(): string { const base = join(tmpdir(), `sf-budget-test-${randomUUID()}`); - mkdirSync(join(base, ".gsd"), { recursive: true }); + mkdirSync(join(base, ".sf"), { recursive: true }); return base; } @@ -78,7 +78,7 @@ function writeWorkerCost( cost, lastHeartbeat: Date.now(), startedAt: Date.now() - 60000, - worktreePath: join(base, ".gsd", "worktrees", milestoneId.toLowerCase()), + worktreePath: join(base, ".sf", "worktrees", milestoneId.toLowerCase()), }); } @@ -314,7 +314,7 @@ test("budget — refreshWorkerStatuses updates worker state from disk", async () cost: 2.5, lastHeartbeat: Date.now(), startedAt: Date.now() - 120000, - worktreePath: join(base, ".gsd", "worktrees", "m001"), + worktreePath: join(base, ".sf", "worktrees", "m001"), }); refreshWorkerStatuses(base); diff --git a/src/resources/extensions/sf/tests/parallel-commit-scope.test.ts b/src/resources/extensions/sf/tests/parallel-commit-scope.test.ts index 610e5ec7b..0f828c511 100644 --- a/src/resources/extensions/sf/tests/parallel-commit-scope.test.ts +++ b/src/resources/extensions/sf/tests/parallel-commit-scope.test.ts @@ -2,11 +2,11 @@ * parallel-commit-scope.test.ts — Regression test for #1991. * * Parallel workers must only commit files belonging to their locked milestone. - * When SF_MILESTONE_LOCK is set, smartStage() must exclude .gsd/milestones/<M>/ + * When SF_MILESTONE_LOCK is set, smartStage() must exclude .sf/milestones/<M>/ * directories for milestones other than the locked one. * * Without the fix, a worker for M033 can stage and commit fabricated artifacts - * under .gsd/milestones/M032/, causing cross-milestone pollution. + * under .sf/milestones/M032/, causing cross-milestone pollution. */ import { describe, test, beforeEach, afterEach } from "node:test"; @@ -77,10 +77,10 @@ describe("parallel commit scope (#1991)", () => { process.env.SF_PARALLEL_WORKER = "1"; // Create dirty files in BOTH milestones (simulates cross-milestone pollution) - createFile(repo, ".gsd/milestones/M032/M032-SUMMARY.md", "# M032 Summary\nFabricated by M033 worker"); - createFile(repo, ".gsd/milestones/M032/M032-VALIDATION.md", "# M032 Validation\nFabricated"); - createFile(repo, ".gsd/milestones/M032/slices/S01/S01-SUMMARY.md", "Fabricated S01 summary"); - createFile(repo, ".gsd/milestones/M033/slices/S01/tasks/T01-SUMMARY.md", "Legit T01 summary"); + createFile(repo, ".sf/milestones/M032/M032-SUMMARY.md", "# M032 Summary\nFabricated by M033 worker"); + createFile(repo, ".sf/milestones/M032/M032-VALIDATION.md", "# M032 Validation\nFabricated"); + createFile(repo, ".sf/milestones/M032/slices/S01/S01-SUMMARY.md", "Fabricated S01 summary"); + createFile(repo, ".sf/milestones/M033/slices/S01/tasks/T01-SUMMARY.md", "Legit T01 summary"); createFile(repo, "src/feature.ts", "export const x = 1;"); const svc = new GitServiceImpl(repo); @@ -91,10 +91,10 @@ describe("parallel commit scope (#1991)", () => { // Source files and own milestone files SHOULD be committed assert.ok(committed.includes("src/feature.ts"), "source files are committed"); - assert.ok(committed.includes(".gsd/milestones/M033/"), "own milestone files are committed"); + assert.ok(committed.includes(".sf/milestones/M033/"), "own milestone files are committed"); // Other milestone files MUST NOT be committed - assert.ok(!committed.includes(".gsd/milestones/M032/"), + assert.ok(!committed.includes(".sf/milestones/M032/"), "M032 files must NOT be committed by M033 worker — cross-milestone pollution (#1991)"); // Verify M032 files are still dirty (unstaged) in the working tree @@ -111,8 +111,8 @@ describe("parallel commit scope (#1991)", () => { delete process.env.SF_MILESTONE_LOCK; delete process.env.SF_PARALLEL_WORKER; - createFile(repo, ".gsd/milestones/M032/M032-SUMMARY.md", "# M032 Summary"); - createFile(repo, ".gsd/milestones/M033/slices/S01/tasks/T01-SUMMARY.md", "T01 summary"); + createFile(repo, ".sf/milestones/M032/M032-SUMMARY.md", "# M032 Summary"); + createFile(repo, ".sf/milestones/M033/slices/S01/tasks/T01-SUMMARY.md", "T01 summary"); createFile(repo, "src/feature.ts", "export const x = 1;"); const svc = new GitServiceImpl(repo); @@ -122,8 +122,8 @@ describe("parallel commit scope (#1991)", () => { const committed = gitRun(["show", "--name-only", "HEAD"], repo); // In solo mode, ALL milestone files should be committed - assert.ok(committed.includes(".gsd/milestones/M032/"), "M032 files committed in solo mode"); - assert.ok(committed.includes(".gsd/milestones/M033/"), "M033 files committed in solo mode"); + assert.ok(committed.includes(".sf/milestones/M032/"), "M032 files committed in solo mode"); + assert.ok(committed.includes(".sf/milestones/M033/"), "M033 files committed in solo mode"); assert.ok(committed.includes("src/feature.ts"), "source files committed in solo mode"); rmSync(repo, { recursive: true, force: true }); @@ -136,10 +136,10 @@ describe("parallel commit scope (#1991)", () => { process.env.SF_PARALLEL_WORKER = "1"; // Create files across many milestones - createFile(repo, ".gsd/milestones/M032/M032-SUMMARY.md", "foreign"); - createFile(repo, ".gsd/milestones/M033/M033-SUMMARY.md", "foreign"); - createFile(repo, ".gsd/milestones/M034/M034-SUMMARY.md", "foreign"); - createFile(repo, ".gsd/milestones/M035/slices/S01/tasks/T01-SUMMARY.md", "own work"); + createFile(repo, ".sf/milestones/M032/M032-SUMMARY.md", "foreign"); + createFile(repo, ".sf/milestones/M033/M033-SUMMARY.md", "foreign"); + createFile(repo, ".sf/milestones/M034/M034-SUMMARY.md", "foreign"); + createFile(repo, ".sf/milestones/M035/slices/S01/tasks/T01-SUMMARY.md", "own work"); createFile(repo, "src/app.ts", "export const app = {};"); const svc = new GitServiceImpl(repo); @@ -148,11 +148,11 @@ describe("parallel commit scope (#1991)", () => { const committed = gitRun(["show", "--name-only", "HEAD"], repo); - assert.ok(committed.includes(".gsd/milestones/M035/"), "own milestone committed"); + assert.ok(committed.includes(".sf/milestones/M035/"), "own milestone committed"); assert.ok(committed.includes("src/app.ts"), "source files committed"); - assert.ok(!committed.includes(".gsd/milestones/M032/"), "M032 excluded"); - assert.ok(!committed.includes(".gsd/milestones/M033/"), "M033 excluded"); - assert.ok(!committed.includes(".gsd/milestones/M034/"), "M034 excluded"); + assert.ok(!committed.includes(".sf/milestones/M032/"), "M032 excluded"); + assert.ok(!committed.includes(".sf/milestones/M033/"), "M033 excluded"); + assert.ok(!committed.includes(".sf/milestones/M034/"), "M034 excluded"); rmSync(repo, { recursive: true, force: true }); }); diff --git a/src/resources/extensions/sf/tests/parallel-crash-recovery.test.ts b/src/resources/extensions/sf/tests/parallel-crash-recovery.test.ts index 3007da060..308fee87f 100644 --- a/src/resources/extensions/sf/tests/parallel-crash-recovery.test.ts +++ b/src/resources/extensions/sf/tests/parallel-crash-recovery.test.ts @@ -30,12 +30,12 @@ import { writeSessionStatus, readAllSessionStatuses, removeSessionStatus } from function makeTempDir(): string { const dir = mkdtempSync(join(tmpdir(), "sf-crash-recovery-")); - mkdirSync(join(dir, ".gsd"), { recursive: true }); + mkdirSync(join(dir, ".sf"), { recursive: true }); return dir; } function stateFilePath(basePath: string): string { - return join(basePath, ".gsd", "orchestrator.json"); + return join(basePath, ".sf", "orchestrator.json"); } function writeStateFile(basePath: string, state: PersistedState): void { @@ -205,7 +205,7 @@ test('Test 6: orphan detection finds stale sessions', () => { const basePath = makeTempDir(); try { // Write a session status with a dead PID - mkdirSync(join(basePath, ".gsd", "parallel"), { recursive: true }); + mkdirSync(join(basePath, ".sf", "parallel"), { recursive: true }); writeSessionStatus(basePath, { milestoneId: "M001", pid: 99999999, diff --git a/src/resources/extensions/sf/tests/parallel-eligibility-ghost.test.ts b/src/resources/extensions/sf/tests/parallel-eligibility-ghost.test.ts index 570f193bc..2c98f318b 100644 --- a/src/resources/extensions/sf/tests/parallel-eligibility-ghost.test.ts +++ b/src/resources/extensions/sf/tests/parallel-eligibility-ghost.test.ts @@ -26,7 +26,7 @@ import { function createFixtureBase(): string { const base = mkdtempSync(join(tmpdir(), "sf-parallel-elig-")); - mkdirSync(join(base, ".gsd", "milestones"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones"), { recursive: true }); return base; } @@ -36,13 +36,13 @@ function writeMilestoneFile( filename: string, content: string, ): void { - const filePath = join(base, ".gsd", "milestones", milestoneId, filename); + const filePath = join(base, ".sf", "milestones", milestoneId, filename); mkdirSync(join(filePath, ".."), { recursive: true }); writeFileSync(filePath, content); } function makeMilestoneDir(base: string, milestoneId: string): void { - mkdirSync(join(base, ".gsd", "milestones", milestoneId), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", milestoneId), { recursive: true }); } function cleanup(base: string): void { @@ -76,7 +76,7 @@ describe("parallel-eligibility: ghost milestone ineligibility (#2501)", () => { // Create ghost milestone M017 — directory with only slices/, no CONTEXT/ROADMAP/SUMMARY makeMilestoneDir(base, "M017"); - mkdirSync(join(base, ".gsd", "milestones", "M017", "slices"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M017", "slices"), { recursive: true }); invalidateStateCache(); const result = await analyzeParallelEligibility(base); diff --git a/src/resources/extensions/sf/tests/parallel-orchestration.test.ts b/src/resources/extensions/sf/tests/parallel-orchestration.test.ts index e72fe2e8f..062a3d2ca 100644 --- a/src/resources/extensions/sf/tests/parallel-orchestration.test.ts +++ b/src/resources/extensions/sf/tests/parallel-orchestration.test.ts @@ -61,7 +61,7 @@ import type { WorkerInfo } from "../parallel-orchestrator.js"; function makeTmpBase(): string { const base = mkdtempSync(join(tmpdir(), "sf-parallel-test-")); - mkdirSync(join(base, ".gsd"), { recursive: true }); + mkdirSync(join(base, ".sf"), { recursive: true }); return base; } @@ -304,7 +304,7 @@ describe("parallel-orchestrator: lifecycle", () => { startedAt: Date.now(), configSnapshot: { max_workers: 2 }, }; - writeFileSync(join(base, ".gsd", "orchestrator.json"), JSON.stringify(persisted, null, 2), "utf-8"); + writeFileSync(join(base, ".sf", "orchestrator.json"), JSON.stringify(persisted, null, 2), "utf-8"); const workers = getWorkerStatuses(base); assert.equal(workers.length, 1); assert.equal(workers[0].milestoneId, "M001"); diff --git a/src/resources/extensions/sf/tests/parallel-orchestrator-zombie-cleanup.test.ts b/src/resources/extensions/sf/tests/parallel-orchestrator-zombie-cleanup.test.ts index 09a228a3c..4692d396d 100644 --- a/src/resources/extensions/sf/tests/parallel-orchestrator-zombie-cleanup.test.ts +++ b/src/resources/extensions/sf/tests/parallel-orchestrator-zombie-cleanup.test.ts @@ -27,7 +27,7 @@ import { function makeTmpBase(): string { const base = join(tmpdir(), `sf-test-zombie-${randomUUID()}`); - mkdirSync(join(base, ".gsd", "parallel"), { recursive: true }); + mkdirSync(join(base, ".sf", "parallel"), { recursive: true }); return base; } @@ -39,18 +39,18 @@ function cleanup(base: string): void { /** Write a fake orchestrator.json to simulate persisted state. */ function writePersistedState(basePath: string, data: PersistedState): void { - const dest = join(basePath, ".gsd", "orchestrator.json"); + const dest = join(basePath, ".sf", "orchestrator.json"); writeFileSync(dest, JSON.stringify(data, null, 2), "utf-8"); } -/** Write a fake session status file to .gsd/parallel/<milestoneId>.status.json */ +/** Write a fake session status file to .sf/parallel/<milestoneId>.status.json */ function writeSessionStatusFile( basePath: string, milestoneId: string, state: "running" | "paused" | "stopped" | "error", pid: number, ): void { - const dest = join(basePath, ".gsd", "parallel", `${milestoneId}.status.json`); + const dest = join(basePath, ".sf", "parallel", `${milestoneId}.status.json`); writeFileSync( dest, JSON.stringify({ @@ -272,6 +272,6 @@ test("#2736: restoreRuntimeState clears stale state when all workers are stopped assert.equal(getOrchestratorState(), null, "state should be null"); // Verify the state file was removed - const stateFile = join(base, ".gsd", "orchestrator.json"); + const stateFile = join(base, ".sf", "orchestrator.json"); assert.equal(existsSync(stateFile), false, "orchestrator.json should be removed"); }); diff --git a/src/resources/extensions/sf/tests/parallel-research-dispatch.test.ts b/src/resources/extensions/sf/tests/parallel-research-dispatch.test.ts index 37d7bb00e..a803b87c4 100644 --- a/src/resources/extensions/sf/tests/parallel-research-dispatch.test.ts +++ b/src/resources/extensions/sf/tests/parallel-research-dispatch.test.ts @@ -25,7 +25,7 @@ const tmpDirs: string[] = []; function makeTmpProject(): string { const base = mkdtempSync(join(tmpdir(), "parallel-research-")); tmpDirs.push(base); - const milestoneDir = join(base, ".gsd", "milestones", "M001"); + const milestoneDir = join(base, ".sf", "milestones", "M001"); mkdirSync(milestoneDir, { recursive: true }); writeFileSync( join(milestoneDir, "M001-ROADMAP.md"), diff --git a/src/resources/extensions/sf/tests/parallel-worker-lock-contention.test.ts b/src/resources/extensions/sf/tests/parallel-worker-lock-contention.test.ts index ebc888d79..7706ceec5 100644 --- a/src/resources/extensions/sf/tests/parallel-worker-lock-contention.test.ts +++ b/src/resources/extensions/sf/tests/parallel-worker-lock-contention.test.ts @@ -77,21 +77,21 @@ describe("parallel-worker-lock-contention (#2184)", () => { // ─── Bug 1b: effectiveLockTarget returns per-milestone directory ───────── test("Bug 1b: effectiveLockTarget returns sfDir without parallel env", () => { delete process.env.SF_PARALLEL_WORKER; - const sfDir = "/tmp/test/.gsd"; + const sfDir = "/tmp/test/.sf"; assert.equal(effectiveLockTarget(sfDir), sfDir); }); test("Bug 1b: effectiveLockTarget returns parallel/<MID> in parallel mode", () => { process.env.SF_PARALLEL_WORKER = "1"; process.env.SF_MILESTONE_LOCK = "M003"; - const sfDir = "/tmp/test/.gsd"; + const sfDir = "/tmp/test/.sf"; assert.equal(effectiveLockTarget(sfDir), join(sfDir, "parallel", "M003")); }); // ─── Bug 1c: Two parallel workers acquire independent locks ────────────── test("Bug 1c: parallel workers use per-milestone lock files, not shared auto.lock", () => { const base = mkdtempSync(join(tmpdir(), "sf-parallel-lock-")); - mkdirSync(join(base, ".gsd"), { recursive: true }); + mkdirSync(join(base, ".sf"), { recursive: true }); try { // Simulate worker for M001 @@ -128,7 +128,7 @@ describe("parallel-worker-lock-contention (#2184)", () => { // ─── Bug 1d: crash-recovery uses per-milestone lock file ───────────────── test("Bug 1d: crash-recovery writeLock/readCrashLock uses per-milestone lock in parallel mode", () => { const base = mkdtempSync(join(tmpdir(), "sf-parallel-crash-")); - mkdirSync(join(base, ".gsd"), { recursive: true }); + mkdirSync(join(base, ".sf"), { recursive: true }); try { process.env.SF_PARALLEL_WORKER = "1"; @@ -154,7 +154,7 @@ describe("parallel-worker-lock-contention (#2184)", () => { }); // ─── Bug 3: syncProjectRootToWorktree skips same-path symlinks ─────────── - test("Bug 3: syncProjectRootToWorktree skips when .gsd resolves to same path (symlink)", () => { + test("Bug 3: syncProjectRootToWorktree skips when .sf resolves to same path (symlink)", () => { const base = mkdtempSync(join(tmpdir(), "sf-symlink-sync-")); const externalGsd = join(base, "external-sf"); const projectRoot = join(base, "project"); @@ -171,9 +171,9 @@ describe("parallel-worker-lock-contention (#2184)", () => { "# Roadmap", ); - // Symlink both project and worktree .gsd to the same external directory - symlinkSync(externalGsd, join(projectRoot, ".gsd")); - symlinkSync(externalGsd, join(worktreePath, ".gsd")); + // Symlink both project and worktree .sf to the same external directory + symlinkSync(externalGsd, join(projectRoot, ".sf")); + symlinkSync(externalGsd, join(worktreePath, ".sf")); try { // This should NOT throw ERR_FS_CP_EINVAL — it should skip silently @@ -199,16 +199,16 @@ describe("parallel-worker-lock-contention (#2184)", () => { }); // ─── Bug 3b: sync still works when paths are different ─────────────────── - test("Bug 3b: syncProjectRootToWorktree copies when .gsd paths are different", () => { + test("Bug 3b: syncProjectRootToWorktree copies when .sf paths are different", () => { const base = mkdtempSync(join(tmpdir(), "sf-diff-sync-")); const projectRoot = join(base, "project"); const worktreePath = join(base, "worktree"); - mkdirSync(join(projectRoot, ".gsd", "milestones", "M001"), { recursive: true }); - mkdirSync(join(worktreePath, ".gsd", "milestones"), { recursive: true }); + mkdirSync(join(projectRoot, ".sf", "milestones", "M001"), { recursive: true }); + mkdirSync(join(worktreePath, ".sf", "milestones"), { recursive: true }); writeFileSync( - join(projectRoot, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), + join(projectRoot, ".sf", "milestones", "M001", "M001-ROADMAP.md"), "# Roadmap content", ); @@ -216,7 +216,7 @@ describe("parallel-worker-lock-contention (#2184)", () => { syncProjectRootToWorktree(projectRoot, worktreePath, "M001"); // The roadmap should have been copied - const copied = join(worktreePath, ".gsd", "milestones", "M001", "M001-ROADMAP.md"); + const copied = join(worktreePath, ".sf", "milestones", "M001", "M001-ROADMAP.md"); assert.ok(existsSync(copied), "milestone roadmap copied to worktree"); assert.equal(readFileSync(copied, "utf-8"), "# Roadmap content"); } finally { diff --git a/src/resources/extensions/sf/tests/parallel-worker-monitoring.test.ts b/src/resources/extensions/sf/tests/parallel-worker-monitoring.test.ts index a7de63d6c..6c85a82ae 100644 --- a/src/resources/extensions/sf/tests/parallel-worker-monitoring.test.ts +++ b/src/resources/extensions/sf/tests/parallel-worker-monitoring.test.ts @@ -145,8 +145,8 @@ describe("parallel-worker-monitoring", () => { it("refreshWorkerStatuses restores persisted workers from disk", () => { const base = mkdtempSync(join(tmpdir(), "sf-parallel-monitoring-")); try { - mkdirSync(join(base, ".gsd"), { recursive: true }); - writeFileSync(join(base, ".gsd", "orchestrator.json"), JSON.stringify({ + mkdirSync(join(base, ".sf"), { recursive: true }); + writeFileSync(join(base, ".sf", "orchestrator.json"), JSON.stringify({ active: true, workers: [ { @@ -176,8 +176,8 @@ describe("parallel-worker-monitoring", () => { it("refreshWorkerStatuses restores persisted workers from live session status files", () => { const base = mkdtempSync(join(tmpdir(), "sf-parallel-stderr-")); try { - mkdirSync(join(base, ".gsd", "parallel"), { recursive: true }); - writeFileSync(join(base, ".gsd", "parallel", "M009.status.json"), JSON.stringify({ + mkdirSync(join(base, ".sf", "parallel"), { recursive: true }); + writeFileSync(join(base, ".sf", "parallel", "M009.status.json"), JSON.stringify({ milestoneId: "M009", pid: process.pid, state: "running", diff --git a/src/resources/extensions/sf/tests/park-db-sync.test.ts b/src/resources/extensions/sf/tests/park-db-sync.test.ts index e5b270a3a..93ba84ba0 100644 --- a/src/resources/extensions/sf/tests/park-db-sync.test.ts +++ b/src/resources/extensions/sf/tests/park-db-sync.test.ts @@ -21,9 +21,9 @@ import { function createBase(): string { const base = mkdtempSync(join(tmpdir(), "sf-park-db-")); - mkdirSync(join(base, ".gsd", "milestones", "M001"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M001"), { recursive: true }); writeFileSync( - join(base, ".gsd", "milestones", "M001", "M001-CONTEXT.md"), + join(base, ".sf", "milestones", "M001", "M001-CONTEXT.md"), "# M001\n\nContext.", ); return base; diff --git a/src/resources/extensions/sf/tests/park-edge-cases.test.ts b/src/resources/extensions/sf/tests/park-edge-cases.test.ts index 65ea6d97e..49825b796 100644 --- a/src/resources/extensions/sf/tests/park-edge-cases.test.ts +++ b/src/resources/extensions/sf/tests/park-edge-cases.test.ts @@ -25,12 +25,12 @@ import { parkMilestone, unparkMilestone, discardMilestone } from '../milestone-a function createFixture(): string { const b = mkdtempSync(join(tmpdir(), 'sf-edge-')); - mkdirSync(join(b, '.gsd', 'milestones'), { recursive: true }); + mkdirSync(join(b, '.sf', 'milestones'), { recursive: true }); return b; } function createM(b: string, mid: string, opts?: { roadmap?: boolean; summary?: boolean; dependsOn?: string[] }): void { - const d = join(b, '.gsd', 'milestones', mid); + const d = join(b, '.sf', 'milestones', mid); mkdirSync(d, { recursive: true }); if (opts?.dependsOn) { writeFileSync(join(d, `${mid}-CONTEXT.md`), `---\ndepends_on: [${opts.dependsOn.join(', ')}]\n---\n# ${mid}`, 'utf-8'); @@ -154,7 +154,7 @@ test('EDGE 6: Queue order after discard', async () => { createM(b, 'M002', { roadmap: true }); createM(b, 'M003', { roadmap: true }); writeFileSync( - join(b, '.gsd', 'QUEUE-ORDER.json'), + join(b, '.sf', 'QUEUE-ORDER.json'), JSON.stringify({ order: ['M003', 'M001', 'M002'], updatedAt: new Date().toISOString() }), 'utf-8', ); @@ -170,7 +170,7 @@ test('EDGE 6: Queue order after discard', async () => { assert.deepStrictEqual(s.activeMilestone?.id, 'M001', 'M001 active after M003 discarded'); // Verify queue order file was updated - const order = JSON.parse(readFileSync(join(b, '.gsd', 'QUEUE-ORDER.json'), 'utf-8')); + const order = JSON.parse(readFileSync(join(b, '.sf', 'QUEUE-ORDER.json'), 'utf-8')); assert.ok(!order.order.includes('M003'), 'M003 removed from QUEUE-ORDER.json'); } finally { cleanup(b); diff --git a/src/resources/extensions/sf/tests/park-milestone.test.ts b/src/resources/extensions/sf/tests/park-milestone.test.ts index bd852d06e..131ab6888 100644 --- a/src/resources/extensions/sf/tests/park-milestone.test.ts +++ b/src/resources/extensions/sf/tests/park-milestone.test.ts @@ -26,12 +26,12 @@ import { createWorktree } from "../worktree-manager.ts"; function createFixtureBase(): string { const base = mkdtempSync(join(tmpdir(), 'sf-park-test-')); - mkdirSync(join(base, '.gsd', 'milestones'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones'), { recursive: true }); return base; } function createMilestone(base: string, mid: string, opts?: { withRoadmap?: boolean; withSummary?: boolean; dependsOn?: string[] }): void { - const mDir = join(base, '.gsd', 'milestones', mid); + const mDir = join(base, '.sf', 'milestones', mid); mkdirSync(mDir, { recursive: true }); if (opts?.dependsOn) { @@ -86,7 +86,7 @@ function run(cmd: string, cwd: string): string { function initGitRepo(base: string): void { writeFileSync(join(base, "README.md"), "# test\n", "utf-8"); - writeFileSync(join(base, ".gsd", "STATE.md"), "# State\n", "utf-8"); + writeFileSync(join(base, ".sf", "STATE.md"), "# State\n", "utf-8"); run("git init", base); run("git config user.email test@test.com", base); run("git config user.name Test", base); @@ -289,7 +289,7 @@ test('discardMilestone removes directory', async () => { createMilestone(base, 'M001', { withRoadmap: true }); clearCaches(); - const mDir = join(base, '.gsd', 'milestones', 'M001'); + const mDir = join(base, '.sf', 'milestones', 'M001'); assert.ok(existsSync(mDir), 'milestone dir exists before discard'); const success = discardMilestone(base, 'M001'); @@ -312,7 +312,7 @@ test('discardMilestone updates queue order', () => { clearCaches(); // Write a queue order that includes M001 - const queuePath = join(base, '.gsd', 'QUEUE-ORDER.json'); + const queuePath = join(base, '.sf', 'QUEUE-ORDER.json'); writeFileSync(queuePath, JSON.stringify({ order: ['M001', 'M002'], updatedAt: new Date().toISOString() }), 'utf-8'); discardMilestone(base, 'M001'); @@ -333,7 +333,7 @@ test('discardMilestone removes DB rows, worktree, and milestone branch', () => { initGitRepo(base); clearCaches(); - assert.ok(openDatabase(join(base, '.gsd', 'sf.db')), 'database opens'); + assert.ok(openDatabase(join(base, '.sf', 'sf.db')), 'database opens'); insertMilestone({ id: 'M001', title: 'Discard me', status: 'active' }); insertSlice({ milestoneId: 'M001', id: 'S01', title: 'Only slice', status: 'pending' }); insertTask({ milestoneId: 'M001', sliceId: 'S01', id: 'T01', title: 'Only task', status: 'pending' }); diff --git a/src/resources/extensions/sf/tests/phase-anchor.test.ts b/src/resources/extensions/sf/tests/phase-anchor.test.ts index 5c7db3d9f..1332ac414 100644 --- a/src/resources/extensions/sf/tests/phase-anchor.test.ts +++ b/src/resources/extensions/sf/tests/phase-anchor.test.ts @@ -9,7 +9,7 @@ import type { PhaseAnchor } from "../phase-anchor.js"; function makeTempBase(): string { const tmp = mkdtempSync(join(tmpdir(), "sf-anchor-test-")); - mkdirSync(join(tmp, ".gsd", "milestones", "M001", "anchors"), { recursive: true }); + mkdirSync(join(tmp, ".sf", "milestones", "M001", "anchors"), { recursive: true }); return tmp; } @@ -26,7 +26,7 @@ test("writePhaseAnchor creates anchor file in correct location", () => { nextSteps: ["Plan the implementation slices"], }; writePhaseAnchor(base, "M001", anchor); - assert.ok(existsSync(join(base, ".gsd", "milestones", "M001", "anchors", "discuss.json"))); + assert.ok(existsSync(join(base, ".sf", "milestones", "M001", "anchors", "discuss.json"))); } finally { rmSync(base, { recursive: true, force: true }); } diff --git a/src/resources/extensions/sf/tests/plan-milestone-artifact-verification.test.ts b/src/resources/extensions/sf/tests/plan-milestone-artifact-verification.test.ts index 8d32d3b47..229c84846 100644 --- a/src/resources/extensions/sf/tests/plan-milestone-artifact-verification.test.ts +++ b/src/resources/extensions/sf/tests/plan-milestone-artifact-verification.test.ts @@ -8,12 +8,12 @@ import { verifyExpectedArtifact } from "../auto-recovery.ts"; function createFixtureBase(): string { const base = mkdtempSync(join(tmpdir(), "sf-plan-milestone-artifact-")); - mkdirSync(join(base, ".gsd", "milestones"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones"), { recursive: true }); return base; } function writeRoadmap(base: string, milestoneId: string, content: string): void { - const milestoneDir = join(base, ".gsd", "milestones", milestoneId); + const milestoneDir = join(base, ".sf", "milestones", milestoneId); mkdirSync(milestoneDir, { recursive: true }); writeFileSync(join(milestoneDir, `${milestoneId}-ROADMAP.md`), content, "utf-8"); } diff --git a/src/resources/extensions/sf/tests/plan-milestone-queue-context.test.ts b/src/resources/extensions/sf/tests/plan-milestone-queue-context.test.ts index 7100b52b2..abccd935e 100644 --- a/src/resources/extensions/sf/tests/plan-milestone-queue-context.test.ts +++ b/src/resources/extensions/sf/tests/plan-milestone-queue-context.test.ts @@ -8,7 +8,7 @@ import { buildPlanMilestonePrompt } from "../auto-prompts.ts"; function createBase(): string { const base = mkdtempSync(join(tmpdir(), "sf-plan-queue-")); - mkdirSync(join(base, ".gsd", "milestones", "M010"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M010"), { recursive: true }); return base; } @@ -21,7 +21,7 @@ describe("plan-milestone queue context", () => { const base = createBase(); try { writeFileSync( - join(base, ".gsd", "QUEUE.md"), + join(base, ".sf", "QUEUE.md"), [ "# Queue", "", @@ -38,7 +38,7 @@ describe("plan-milestone queue context", () => { const prompt = await buildPlanMilestonePrompt("M010", "M010", base); - assert.match(prompt, /Source: `\.gsd\/QUEUE\.md`/); + assert.match(prompt, /Source: `\.sf\/QUEUE\.md`/); assert.match(prompt, /Analytics Dashboard — Interactivity, Intelligence & Demo Readiness/); assert.match(prompt, /Ship a polished analytics dashboard/); } finally { diff --git a/src/resources/extensions/sf/tests/plan-milestone.test.ts b/src/resources/extensions/sf/tests/plan-milestone.test.ts index 84a779458..c7fe52782 100644 --- a/src/resources/extensions/sf/tests/plan-milestone.test.ts +++ b/src/resources/extensions/sf/tests/plan-milestone.test.ts @@ -10,7 +10,7 @@ import { parseRoadmap } from '../parsers-legacy.ts'; function makeTmpBase(): string { const base = mkdtempSync(join(tmpdir(), 'sf-plan-milestone-')); - mkdirSync(join(base, '.gsd', 'milestones', 'M001'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones', 'M001'), { recursive: true }); return base; } @@ -69,7 +69,7 @@ function validParams() { test('handlePlanMilestone writes milestone and slice planning state and renders roadmap', async () => { const base = makeTmpBase(); - const dbPath = join(base, '.gsd', 'sf.db'); + const dbPath = join(base, '.sf', 'sf.db'); openDatabase(dbPath); try { @@ -88,7 +88,7 @@ test('handlePlanMilestone writes milestone and slice planning state and renders assert.equal(slices[0]?.goal, 'Wire the handler.'); assert.equal(slices[1]?.depends[0], 'S01'); - const roadmapPath = join(base, '.gsd', 'milestones', 'M001', 'M001-ROADMAP.md'); + const roadmapPath = join(base, '.sf', 'milestones', 'M001', 'M001-ROADMAP.md'); assert.ok(existsSync(roadmapPath), 'roadmap should be rendered to disk'); const roadmap = readFileSync(roadmapPath, 'utf-8'); assert.match(roadmap, /# M001: DB-backed planning/); @@ -104,7 +104,7 @@ test('handlePlanMilestone writes milestone and slice planning state and renders test('handlePlanMilestone rejects invalid payloads', async () => { const base = makeTmpBase(); - const dbPath = join(base, '.gsd', 'sf.db'); + const dbPath = join(base, '.sf', 'sf.db'); openDatabase(dbPath); try { @@ -119,18 +119,18 @@ test('handlePlanMilestone rejects invalid payloads', async () => { test('handlePlanMilestone surfaces render failures and does not clear parse-visible state on failure', async () => { const base = makeTmpBase(); - const dbPath = join(base, '.gsd', 'sf.db'); + const dbPath = join(base, '.sf', 'sf.db'); openDatabase(dbPath); try { - const fallbackRoadmapPath = join(base, '.gsd', 'milestones', 'MISSING', 'MISSING-ROADMAP.md'); + const fallbackRoadmapPath = join(base, '.sf', 'milestones', 'MISSING', 'MISSING-ROADMAP.md'); mkdirSync(fallbackRoadmapPath, { recursive: true }); const result = await handlePlanMilestone({ ...validParams(), milestoneId: 'MISSING' }, base); assert.ok('error' in result); assert.match(result.error, /render failed:/); - const existingRoadmapPath = join(base, '.gsd', 'milestones', 'M001', 'M001-ROADMAP.md'); + const existingRoadmapPath = join(base, '.sf', 'milestones', 'M001', 'M001-ROADMAP.md'); writeFileSync(existingRoadmapPath, '# M001: Cached roadmap\n\n**Vision:** old value\n\n## Slices\n\n', 'utf-8'); const cachedAfter = parseRoadmap(readFileSync(existingRoadmapPath, 'utf-8')); assert.equal(cachedAfter.vision, 'old value'); @@ -141,11 +141,11 @@ test('handlePlanMilestone surfaces render failures and does not clear parse-visi test('handlePlanMilestone clears parse-visible roadmap state after successful render', async () => { const base = makeTmpBase(); - const dbPath = join(base, '.gsd', 'sf.db'); + const dbPath = join(base, '.sf', 'sf.db'); openDatabase(dbPath); try { - const roadmapPath = join(base, '.gsd', 'milestones', 'M001', 'M001-ROADMAP.md'); + const roadmapPath = join(base, '.sf', 'milestones', 'M001', 'M001-ROADMAP.md'); writeFileSync(roadmapPath, '# M001: Cached roadmap\n\n**Vision:** old value\n\n## Slices\n\n', 'utf-8'); const cachedBefore = parseRoadmap(readFileSync(roadmapPath, 'utf-8')); @@ -165,7 +165,7 @@ test('handlePlanMilestone clears parse-visible roadmap state after successful re test('handlePlanMilestone reruns idempotently and updates existing planning state', async () => { const base = makeTmpBase(); - const dbPath = join(base, '.gsd', 'sf.db'); + const dbPath = join(base, '.sf', 'sf.db'); openDatabase(dbPath); try { @@ -200,7 +200,7 @@ test('handlePlanMilestone reruns idempotently and updates existing planning stat test('handlePlanMilestone preserves completed slice status on re-plan (#2558)', async () => { const base = makeTmpBase(); - const dbPath = join(base, '.gsd', 'sf.db'); + const dbPath = join(base, '.sf', 'sf.db'); openDatabase(dbPath); try { @@ -230,7 +230,7 @@ test('handlePlanMilestone preserves completed slice status on re-plan (#2558)', test('plan-milestone re-plan preserves completed status and updates slice fields (#2558)', async () => { const base = makeTmpBase(); - const dbPath = join(base, '.gsd', 'sf.db'); + const dbPath = join(base, '.sf', 'sf.db'); openDatabase(dbPath); try { @@ -271,7 +271,7 @@ test('plan-milestone re-plan preserves completed status and updates slice fields test('handlePlanMilestone promotes pre-existing queued milestone to active (#3022)', async () => { const base = makeTmpBase(); - const dbPath = join(base, '.gsd', 'sf.db'); + const dbPath = join(base, '.sf', 'sf.db'); openDatabase(dbPath); try { diff --git a/src/resources/extensions/sf/tests/plan-slice-prompt.test.ts b/src/resources/extensions/sf/tests/plan-slice-prompt.test.ts index 67dcb25f7..0c8ff5684 100644 --- a/src/resources/extensions/sf/tests/plan-slice-prompt.test.ts +++ b/src/resources/extensions/sf/tests/plan-slice-prompt.test.ts @@ -19,13 +19,13 @@ function loadPrompt(name: string, vars: Record<string, string> = {}): string { const BASE_VARS = { workingDirectory: "/tmp/test-project", milestoneId: "M001", sliceId: "S01", sliceTitle: "Test Slice", - slicePath: ".gsd/milestones/M001/slices/S01", - roadmapPath: ".gsd/milestones/M001/M001-ROADMAP.md", - researchPath: ".gsd/milestones/M001/slices/S01/S01-RESEARCH.md", - outputPath: "/tmp/test-project/.gsd/milestones/M001/slices/S01/S01-PLAN.md", + slicePath: ".sf/milestones/M001/slices/S01", + roadmapPath: ".sf/milestones/M001/M001-ROADMAP.md", + researchPath: ".sf/milestones/M001/slices/S01/S01-RESEARCH.md", + outputPath: "/tmp/test-project/.sf/milestones/M001/slices/S01/S01-PLAN.md", inlinedContext: "--- test inlined context ---", dependencySummaries: "", executorContextConstraints: "", - sourceFilePaths: "- **Requirements**: `.gsd/REQUIREMENTS.md`", + sourceFilePaths: "- **Requirements**: `.sf/REQUIREMENTS.md`", skillActivation: "Load the relevant skills.", }; @@ -42,7 +42,7 @@ function promptUsesSkillActivation(name: string): boolean { } test("plan-slice prompt: commit instruction says do not commit (external state)", () => { - const result = loadPrompt("plan-slice", { ...BASE_VARS, commitInstruction: "Do not commit planning artifacts — .gsd/ is managed externally." }); + const result = loadPrompt("plan-slice", { ...BASE_VARS, commitInstruction: "Do not commit planning artifacts — .sf/ is managed externally." }); assert.ok(result.includes("Do not commit planning artifacts")); assert.ok(!result.includes("{{commitInstruction}}")); }); @@ -100,14 +100,14 @@ test("skillActivation default leaves no unresolved placeholder", () => { sliceTitle: "Test Slice", taskId: "T01", taskTitle: "Implement feature", - planPath: ".gsd/milestones/M001/slices/S01/S01-PLAN.md", - taskPlanPath: ".gsd/milestones/M001/slices/S01/tasks/T01-PLAN.md", + planPath: ".sf/milestones/M001/slices/S01/S01-PLAN.md", + taskPlanPath: ".sf/milestones/M001/slices/S01/tasks/T01-PLAN.md", taskPlanInline: "Task plan", slicePlanExcerpt: "Slice excerpt", carryForwardSection: "Carry forward", resumeSection: "Resume", priorTaskLines: "- (no prior tasks)", - taskSummaryPath: "/tmp/test-project/.gsd/milestones/M001/slices/S01/tasks/T01-SUMMARY.md", + taskSummaryPath: "/tmp/test-project/.sf/milestones/M001/slices/S01/tasks/T01-SUMMARY.md", inlinedTemplates: "Template", verificationBudget: "~10K chars", overridesSection: "", @@ -125,14 +125,14 @@ test("custom skillActivation is substituted into execute-task", () => { sliceTitle: "Test Slice", taskId: "T01", taskTitle: "Implement feature", - planPath: ".gsd/milestones/M001/slices/S01/S01-PLAN.md", - taskPlanPath: ".gsd/milestones/M001/slices/S01/tasks/T01-PLAN.md", + planPath: ".sf/milestones/M001/slices/S01/S01-PLAN.md", + taskPlanPath: ".sf/milestones/M001/slices/S01/tasks/T01-PLAN.md", taskPlanInline: "Task plan", slicePlanExcerpt: "Slice excerpt", carryForwardSection: "Carry forward", resumeSection: "Resume", priorTaskLines: "- (no prior tasks)", - taskSummaryPath: "/tmp/test-project/.gsd/milestones/M001/slices/S01/tasks/T01-SUMMARY.md", + taskSummaryPath: "/tmp/test-project/.sf/milestones/M001/slices/S01/tasks/T01-SUMMARY.md", inlinedTemplates: "Template", verificationBudget: "~10K chars", overridesSection: "", @@ -173,9 +173,9 @@ test("research-milestone prompt substitutes skillActivation", () => { workingDirectory: "/tmp/test-project", milestoneId: "M001", milestoneTitle: "Test Milestone", - milestonePath: ".gsd/milestones/M001", - contextPath: ".gsd/milestones/M001/M001-CONTEXT.md", - outputPath: "/tmp/test-project/.gsd/milestones/M001/M001-RESEARCH.md", + milestonePath: ".sf/milestones/M001", + contextPath: ".sf/milestones/M001/M001-CONTEXT.md", + outputPath: "/tmp/test-project/.sf/milestones/M001/M001-RESEARCH.md", inlinedContext: "Context", skillDiscoveryMode: "manual", skillDiscoveryInstructions: " Discover skills manually.", @@ -191,9 +191,9 @@ test("research-milestone prompt references sf_summary_save, not direct write", ( workingDirectory: "/tmp/test-project", milestoneId: "M001", milestoneTitle: "Test Milestone", - milestonePath: ".gsd/milestones/M001", - contextPath: ".gsd/milestones/M001/M001-CONTEXT.md", - outputPath: "/tmp/test-project/.gsd/milestones/M001/M001-RESEARCH.md", + milestonePath: ".sf/milestones/M001", + contextPath: ".sf/milestones/M001/M001-CONTEXT.md", + outputPath: "/tmp/test-project/.sf/milestones/M001/M001-RESEARCH.md", inlinedContext: "Context", skillDiscoveryMode: "manual", skillDiscoveryInstructions: " Discover skills manually.", @@ -220,11 +220,11 @@ test("research-slice prompt substitutes skillActivation", () => { milestoneId: "M001", sliceId: "S01", sliceTitle: "Test Slice", - slicePath: ".gsd/milestones/M001/slices/S01", - roadmapPath: ".gsd/milestones/M001/M001-ROADMAP.md", - contextPath: ".gsd/milestones/M001/M001-CONTEXT.md", - milestoneResearchPath: ".gsd/milestones/M001/M001-RESEARCH.md", - outputPath: "/tmp/test-project/.gsd/milestones/M001/slices/S01/S01-RESEARCH.md", + slicePath: ".sf/milestones/M001/slices/S01", + roadmapPath: ".sf/milestones/M001/M001-ROADMAP.md", + contextPath: ".sf/milestones/M001/M001-CONTEXT.md", + milestoneResearchPath: ".sf/milestones/M001/M001-RESEARCH.md", + outputPath: "/tmp/test-project/.sf/milestones/M001/slices/S01/S01-RESEARCH.md", inlinedContext: "Context", dependencySummaries: "", skillDiscoveryMode: "manual", @@ -241,12 +241,12 @@ test("plan-milestone prompt substitutes skillActivation", () => { workingDirectory: "/tmp/test-project", milestoneId: "M001", milestoneTitle: "Test Milestone", - milestonePath: ".gsd/milestones/M001", - contextPath: ".gsd/milestones/M001/M001-CONTEXT.md", - researchPath: ".gsd/milestones/M001/M001-RESEARCH.md", - researchOutputPath: "/tmp/test-project/.gsd/milestones/M001/M001-RESEARCH.md", - outputPath: "/tmp/test-project/.gsd/milestones/M001/M001-ROADMAP.md", - secretsOutputPath: "/tmp/test-project/.gsd/milestones/M001/M001-SECRETS.md", + milestonePath: ".sf/milestones/M001", + contextPath: ".sf/milestones/M001/M001-CONTEXT.md", + researchPath: ".sf/milestones/M001/M001-RESEARCH.md", + researchOutputPath: "/tmp/test-project/.sf/milestones/M001/M001-RESEARCH.md", + outputPath: "/tmp/test-project/.sf/milestones/M001/M001-ROADMAP.md", + secretsOutputPath: "/tmp/test-project/.sf/milestones/M001/M001-SECRETS.md", inlinedContext: "Context", sourceFilePaths: "- source", skillDiscoveryMode: "manual", @@ -262,7 +262,7 @@ test("guided plan milestone prompt substitutes skillActivation", () => { const result = loadPrompt("guided-plan-milestone", { milestoneId: "M001", milestoneTitle: "Test Milestone", - secretsOutputPath: ".gsd/milestones/M001/M001-SECRETS.md", + secretsOutputPath: ".sf/milestones/M001/M001-SECRETS.md", inlinedTemplates: "Templates", skillActivation: "Load guided planning skills first.", }); diff --git a/src/resources/extensions/sf/tests/plan-slice.test.ts b/src/resources/extensions/sf/tests/plan-slice.test.ts index 381b126d1..acdd14883 100644 --- a/src/resources/extensions/sf/tests/plan-slice.test.ts +++ b/src/resources/extensions/sf/tests/plan-slice.test.ts @@ -11,7 +11,7 @@ import { parseTaskPlanFile } from '../files.ts'; function makeTmpBase(): string { const base = mkdtempSync(join(tmpdir(), 'sf-plan-slice-')); - mkdirSync(join(base, '.gsd', 'milestones', 'M001', 'slices', 'S02', 'tasks'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones', 'M001', 'slices', 'S02', 'tasks'), { recursive: true }); return base; } @@ -63,7 +63,7 @@ function validParams() { test('handlePlanSlice writes slice/task planning state and renders plan artifacts', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { seedParentSlice(); @@ -82,14 +82,14 @@ test('handlePlanSlice writes slice/task planning state and renders plan artifact assert.equal(tasks[0]?.description, 'Implement the slice planning handler.'); assert.equal(tasks[1]?.estimate, '30m'); - const planPath = join(base, '.gsd', 'milestones', 'M001', 'slices', 'S02', 'S02-PLAN.md'); + const planPath = join(base, '.sf', 'milestones', 'M001', 'slices', 'S02', 'S02-PLAN.md'); assert.ok(existsSync(planPath), 'slice plan should be rendered to disk'); const parsedPlan = parsePlan(readFileSync(planPath, 'utf-8')); assert.equal(parsedPlan.goal, 'Persist slice planning through the DB.'); assert.equal(parsedPlan.tasks.length, 2); assert.equal(parsedPlan.tasks[0]?.id, 'T01'); - const taskPlanPath = join(base, '.gsd', 'milestones', 'M001', 'slices', 'S02', 'tasks', 'T01-PLAN.md'); + const taskPlanPath = join(base, '.sf', 'milestones', 'M001', 'slices', 'S02', 'tasks', 'T01-PLAN.md'); assert.ok(existsSync(taskPlanPath), 'task plan should be rendered to disk'); const taskPlan = parseTaskPlanFile(readFileSync(taskPlanPath, 'utf-8')); assert.deepEqual(taskPlan.frontmatter.skills_used, []); @@ -100,7 +100,7 @@ test('handlePlanSlice writes slice/task planning state and renders plan artifact test('handlePlanSlice rejects invalid payloads', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { seedParentSlice(); @@ -114,7 +114,7 @@ test('handlePlanSlice rejects invalid payloads', async () => { test('handlePlanSlice rejects missing parent slice', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { insertMilestone({ id: 'M001', title: 'Milestone', status: 'active' }); @@ -128,11 +128,11 @@ test('handlePlanSlice rejects missing parent slice', async () => { test('handlePlanSlice surfaces render failures without changing parse-visible task-plan state for the failing task', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { seedParentSlice(); - const failingTaskPlanPath = join(base, '.gsd', 'milestones', 'M001', 'slices', 'S02', 'tasks', 'T01-PLAN.md'); + const failingTaskPlanPath = join(base, '.sf', 'milestones', 'M001', 'slices', 'S02', 'tasks', 'T01-PLAN.md'); writeFileSync(failingTaskPlanPath, '---\nestimated_steps: 1\nestimated_files: 1\nskills_used: []\n---\n\n# T01: Cached task\n', 'utf-8'); rmSync(failingTaskPlanPath, { force: true }); mkdirSync(failingTaskPlanPath, { recursive: true }); @@ -150,11 +150,11 @@ test('handlePlanSlice surfaces render failures without changing parse-visible ta test('handlePlanSlice reruns idempotently and refreshes parse-visible state', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { seedParentSlice(); - writeFileSync(join(base, '.gsd', 'milestones', 'M001', 'slices', 'S02', 'S02-PLAN.md'), '# S02: Cached\n\n**Goal:** old value\n\n## Tasks\n\n- [ ] **T01: Cached task**\n', 'utf-8'); + writeFileSync(join(base, '.sf', 'milestones', 'M001', 'slices', 'S02', 'S02-PLAN.md'), '# S02: Cached\n\n**Goal:** old value\n\n## Tasks\n\n- [ ] **T01: Cached task**\n', 'utf-8'); const first = await handlePlanSlice(validParams(), base); assert.ok(!('error' in first)); @@ -169,7 +169,7 @@ test('handlePlanSlice reruns idempotently and refreshes parse-visible state', as }, base); assert.ok(!('error' in second)); - const parsedAfter = parsePlan(readFileSync(join(base, '.gsd', 'milestones', 'M001', 'slices', 'S02', 'S02-PLAN.md'), 'utf-8')); + const parsedAfter = parsePlan(readFileSync(join(base, '.sf', 'milestones', 'M001', 'slices', 'S02', 'S02-PLAN.md'), 'utf-8')); assert.equal(parsedAfter.goal, 'Updated goal from rerun.'); const task = getTask('M001', 'S02', 'T01'); assert.equal(task?.description, 'Updated slice handler description.'); diff --git a/src/resources/extensions/sf/tests/plan-task.test.ts b/src/resources/extensions/sf/tests/plan-task.test.ts index 5df6ef4bd..199868e55 100644 --- a/src/resources/extensions/sf/tests/plan-task.test.ts +++ b/src/resources/extensions/sf/tests/plan-task.test.ts @@ -10,7 +10,7 @@ import { parseTaskPlanFile } from '../files.ts'; function makeTmpBase(): string { const base = mkdtempSync(join(tmpdir(), 'sf-plan-task-')); - mkdirSync(join(base, '.gsd', 'milestones', 'M001', 'slices', 'S02', 'tasks'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones', 'M001', 'slices', 'S02', 'tasks'), { recursive: true }); return base; } @@ -42,7 +42,7 @@ function validParams() { test('handlePlanTask writes planning state and renders task plan', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { seedParent(); @@ -55,7 +55,7 @@ test('handlePlanTask writes planning state and renders task plan', async () => { assert.equal(task?.description, 'Implement the DB-backed task planning handler.'); assert.equal(task?.estimate, '30m'); - const taskPlanPath = join(base, '.gsd', 'milestones', 'M001', 'slices', 'S02', 'tasks', 'T02-PLAN.md'); + const taskPlanPath = join(base, '.sf', 'milestones', 'M001', 'slices', 'S02', 'tasks', 'T02-PLAN.md'); assert.ok(existsSync(taskPlanPath), 'task plan should be rendered to disk'); const taskPlan = parseTaskPlanFile(readFileSync(taskPlanPath, 'utf-8')); assert.equal(taskPlan.frontmatter.estimated_files, 1); @@ -67,7 +67,7 @@ test('handlePlanTask writes planning state and renders task plan', async () => { test('handlePlanTask rejects invalid payloads', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { seedParent(); @@ -81,7 +81,7 @@ test('handlePlanTask rejects invalid payloads', async () => { test('handlePlanTask rejects missing parent slice', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { insertMilestone({ id: 'M001', title: 'Milestone', status: 'active' }); @@ -95,12 +95,12 @@ test('handlePlanTask rejects missing parent slice', async () => { test('handlePlanTask surfaces render failures without changing parse-visible task plan state', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { seedParent(); insertTask({ id: 'T02', sliceId: 'S02', milestoneId: 'M001', title: 'Cached task', status: 'pending' }); - const taskPlanPath = join(base, '.gsd', 'milestones', 'M001', 'slices', 'S02', 'tasks', 'T02-PLAN.md'); + const taskPlanPath = join(base, '.sf', 'milestones', 'M001', 'slices', 'S02', 'tasks', 'T02-PLAN.md'); writeFileSync(taskPlanPath, '---\nestimated_steps: 1\nestimated_files: 1\nskills_used: []\n---\n\n# T02: Cached task\n', 'utf-8'); rmSync(taskPlanPath, { force: true }); mkdirSync(taskPlanPath, { recursive: true }); @@ -115,11 +115,11 @@ test('handlePlanTask surfaces render failures without changing parse-visible tas test('handlePlanTask reruns idempotently and refreshes parse-visible state', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { seedParent(); - const taskPlanPath = join(base, '.gsd', 'milestones', 'M001', 'slices', 'S02', 'tasks', 'T02-PLAN.md'); + const taskPlanPath = join(base, '.sf', 'milestones', 'M001', 'slices', 'S02', 'tasks', 'T02-PLAN.md'); writeFileSync(taskPlanPath, '---\nestimated_steps: 1\nestimated_files: 1\nskills_used: []\n---\n\n# T02: Cached task\n', 'utf-8'); const first = await handlePlanTask(validParams(), base); diff --git a/src/resources/extensions/sf/tests/planning-crossval.test.ts b/src/resources/extensions/sf/tests/planning-crossval.test.ts index b180ccd82..250f42f58 100644 --- a/src/resources/extensions/sf/tests/planning-crossval.test.ts +++ b/src/resources/extensions/sf/tests/planning-crossval.test.ts @@ -30,15 +30,15 @@ const { assertEq, assertTrue, report } = createTestContext(); function createFixtureBase(): string { const base = mkdtempSync(join(tmpdir(), 'sf-planning-crossval-')); - mkdirSync(join(base, '.gsd', 'milestones'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones'), { recursive: true }); return base; } /** Scaffold the minimal directory structure the renderers need on disk. */ function scaffoldDirs(base: string, milestoneId: string, sliceIds: string[]): void { - mkdirSync(join(base, '.gsd', 'milestones', milestoneId), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones', milestoneId), { recursive: true }); for (const sid of sliceIds) { - mkdirSync(join(base, '.gsd', 'milestones', milestoneId, 'slices', sid, 'tasks'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones', milestoneId, 'slices', sid, 'tasks'), { recursive: true }); } } @@ -53,7 +53,7 @@ function cleanup(base: string): void { console.log('\n=== planning-crossval Test 1: ROADMAP round-trip parity ==='); { const base = createFixtureBase(); - const dbPath = join(base, '.gsd', 'sf.db'); + const dbPath = join(base, '.sf', 'sf.db'); openDatabase(dbPath); try { scaffoldDirs(base, 'M001', ['S01', 'S02', 'S03', 'S04']); @@ -120,7 +120,7 @@ console.log('\n=== planning-crossval Test 1: ROADMAP round-trip parity ==='); console.log('\n=== planning-crossval Test 2: PLAN round-trip parity ==='); { const base = createFixtureBase(); - const dbPath = join(base, '.gsd', 'sf.db'); + const dbPath = join(base, '.sf', 'sf.db'); openDatabase(dbPath); try { scaffoldDirs(base, 'M001', ['S01']); @@ -248,7 +248,7 @@ console.log('\n=== planning-crossval Test 2: PLAN round-trip parity ==='); console.log('\n=== planning-crossval Test 3: Sequence ordering parity ==='); { const base = createFixtureBase(); - const dbPath = join(base, '.gsd', 'sf.db'); + const dbPath = join(base, '.sf', 'sf.db'); openDatabase(dbPath); try { scaffoldDirs(base, 'M001', ['S01', 'S02', 'S03', 'S04']); diff --git a/src/resources/extensions/sf/tests/post-exec-retry-bypass.test.ts b/src/resources/extensions/sf/tests/post-exec-retry-bypass.test.ts index b55b6de76..e62ec808e 100644 --- a/src/resources/extensions/sf/tests/post-exec-retry-bypass.test.ts +++ b/src/resources/extensions/sf/tests/post-exec-retry-bypass.test.ts @@ -64,7 +64,7 @@ function setupTestEnvironment(): void { tempDir = join(tmpdir(), `post-exec-retry-test-${Date.now()}-${Math.random().toString(36).slice(2)}`); mkdirSync(tempDir, { recursive: true }); - const sfDir = join(tempDir, ".gsd"); + const sfDir = join(tempDir, ".sf"); mkdirSync(sfDir, { recursive: true }); const milestonesDir = join(sfDir, "milestones", "M001", "slices", "S01", "tasks"); @@ -103,7 +103,7 @@ ${yamlLines.join("\n")} # SF Preferences `; - writeFileSync(join(tempDir, ".gsd", "PREFERENCES.md"), prefsContent); + writeFileSync(join(tempDir, ".sf", "PREFERENCES.md"), prefsContent); invalidateAllCaches(); _clearGsdRootCache(); } diff --git a/src/resources/extensions/sf/tests/post-mutation-hook.test.ts b/src/resources/extensions/sf/tests/post-mutation-hook.test.ts index 4aa99f4dc..75af5e132 100644 --- a/src/resources/extensions/sf/tests/post-mutation-hook.test.ts +++ b/src/resources/extensions/sf/tests/post-mutation-hook.test.ts @@ -22,7 +22,7 @@ function cleanupDir(dirPath: string): void { /** Create a minimal project directory with a PLAN.md for complete-task to find. */ function createProject(basePath: string): void { - const sliceDir = path.join(basePath, '.gsd', 'milestones', 'M001', 'slices', 'S01'); + const sliceDir = path.join(basePath, '.sf', 'milestones', 'M001', 'slices', 'S01'); const tasksDir = path.join(sliceDir, 'tasks'); fs.mkdirSync(tasksDir, { recursive: true }); fs.writeFileSync(path.join(sliceDir, 'S01-PLAN.md'), `# S01: Test Slice @@ -70,7 +70,7 @@ test('post-mutation-hook: event-log.jsonl exists after handleCompleteTask', asyn const result = await handleCompleteTask(makeCompleteTaskParams(), base); assert.ok(!('error' in result), `handler should succeed, got: ${JSON.stringify(result)}`); - const logPath = path.join(base, '.gsd', 'event-log.jsonl'); + const logPath = path.join(base, '.sf', 'event-log.jsonl'); assert.ok(fs.existsSync(logPath), 'event-log.jsonl should exist after handler completes'); } finally { closeDatabase(); @@ -87,7 +87,7 @@ test('post-mutation-hook: event log contains complete-task event with correct pa try { await handleCompleteTask(makeCompleteTaskParams(), base); - const logPath = path.join(base, '.gsd', 'event-log.jsonl'); + const logPath = path.join(base, '.sf', 'event-log.jsonl'); const events = readEvents(logPath); assert.ok(events.length > 0, 'event log should have at least one event'); @@ -115,7 +115,7 @@ test('post-mutation-hook: state-manifest.json exists after handleCompleteTask', const result = await handleCompleteTask(makeCompleteTaskParams(), base); assert.ok(!('error' in result), `handler should succeed, got: ${JSON.stringify(result)}`); - const manifestPath = path.join(base, '.gsd', 'state-manifest.json'); + const manifestPath = path.join(base, '.sf', 'state-manifest.json'); assert.ok(fs.existsSync(manifestPath), 'state-manifest.json should exist after handler completes'); } finally { closeDatabase(); @@ -150,14 +150,14 @@ test('post-mutation-hook: manifest has version 1 and includes completed task', a // ─── Post-mutation hook: non-fatal on hook failure ─────────────────────── test('post-mutation-hook: handler still returns success even if projections dir is missing', async () => { - // basePath with NO .gsd directory — projections will fail to find milestones + // basePath with NO .sf directory — projections will fail to find milestones // but handler should still return a result (not throw) const base = tempDir(); const dbPath = path.join(base, 'test.db'); openDatabase(dbPath); // Create tasks dir but NO plan file (projections will soft-fail) - const tasksDir = path.join(base, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'tasks'); + const tasksDir = path.join(base, '.sf', 'milestones', 'M001', 'slices', 'S01', 'tasks'); fs.mkdirSync(tasksDir, { recursive: true }); try { diff --git a/src/resources/extensions/sf/tests/post-unit-hooks.test.ts b/src/resources/extensions/sf/tests/post-unit-hooks.test.ts index 0ee529129..ac8f556df 100644 --- a/src/resources/extensions/sf/tests/post-unit-hooks.test.ts +++ b/src/resources/extensions/sf/tests/post-unit-hooks.test.ts @@ -25,7 +25,7 @@ import { function createFixtureBase(): string { const base = mkdtempSync(join(tmpdir(), "sf-hook-test-")); - mkdirSync(join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks"), { recursive: true }); return base; } @@ -44,7 +44,7 @@ test('resolveHookArtifactPath', () => { const taskPath = resolveHookArtifactPath(base, "M001/S01/T01", "REVIEW-PASS.md"); assert.deepStrictEqual( taskPath, - join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks", "T01-REVIEW-PASS.md"), + join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks", "T01-REVIEW-PASS.md"), "task-level artifact path", ); @@ -52,7 +52,7 @@ test('resolveHookArtifactPath', () => { const slicePath = resolveHookArtifactPath(base, "M001/S01", "REVIEW-PASS.md"); assert.deepStrictEqual( slicePath, - join(base, ".gsd", "milestones", "M001", "slices", "S01", "REVIEW-PASS.md"), + join(base, ".sf", "milestones", "M001", "slices", "S01", "REVIEW-PASS.md"), "slice-level artifact path", ); @@ -60,7 +60,7 @@ test('resolveHookArtifactPath', () => { const milestonePath = resolveHookArtifactPath(base, "M001", "REVIEW-PASS.md"); assert.deepStrictEqual( milestonePath, - join(base, ".gsd", "milestones", "M001", "REVIEW-PASS.md"), + join(base, ".sf", "milestones", "M001", "REVIEW-PASS.md"), "milestone-level artifact path", ); }); @@ -164,7 +164,7 @@ test('State persistence: persist and restore', () => { // Persist empty state persistHookState(base); - const filePath = join(base, ".gsd", "hook-state.json"); + const filePath = join(base, ".sf", "hook-state.json"); assert.ok(existsSync(filePath), "hook-state.json created"); const content = JSON.parse(readFileSync(filePath, "utf-8")); @@ -181,7 +181,7 @@ test('State persistence: restore from disk', () => { resetHookState(); // Write a state file with some cycle counts - const stateFile = join(base, ".gsd", "hook-state.json"); + const stateFile = join(base, ".sf", "hook-state.json"); writeFileSync(stateFile, JSON.stringify({ cycleCounts: { "review/execute-task/M001/S01/T01": 2, @@ -209,7 +209,7 @@ test('State persistence: clear', () => { resetHookState(); // Write then clear - const stateFile = join(base, ".gsd", "hook-state.json"); + const stateFile = join(base, ".sf", "hook-state.json"); writeFileSync(stateFile, JSON.stringify({ cycleCounts: { "review/execute-task/M001/S01/T01": 3 }, savedAt: new Date().toISOString(), @@ -240,7 +240,7 @@ test('State persistence: restore handles corrupt file', () => { const base = createFixtureBase(); try { resetHookState(); - writeFileSync(join(base, ".gsd", "hook-state.json"), "not json", "utf-8"); + writeFileSync(join(base, ".sf", "hook-state.json"), "not json", "utf-8"); // Should not throw restoreHookState(base); assert.deepStrictEqual(getActiveHook(), null, "no active hook after corrupt restore"); diff --git a/src/resources/extensions/sf/tests/pre-execution-fail-closed.test.ts b/src/resources/extensions/sf/tests/pre-execution-fail-closed.test.ts index 40923208a..9f0f3a971 100644 --- a/src/resources/extensions/sf/tests/pre-execution-fail-closed.test.ts +++ b/src/resources/extensions/sf/tests/pre-execution-fail-closed.test.ts @@ -79,7 +79,7 @@ function setupTestEnvironment(): void { tempDir = join(tmpdir(), `pre-exec-fail-closed-test-${Date.now()}-${Math.random().toString(36).slice(2)}`); mkdirSync(tempDir, { recursive: true }); - const sfDir = join(tempDir, ".gsd"); + const sfDir = join(tempDir, ".sf"); mkdirSync(sfDir, { recursive: true }); const milestonesDir = join(sfDir, "milestones", "M001", "slices", "S01", "tasks"); @@ -118,7 +118,7 @@ ${yamlLines.join("\n")} # SF Preferences `; - writeFileSync(join(tempDir, ".gsd", "PREFERENCES.md"), prefsContent); + writeFileSync(join(tempDir, ".sf", "PREFERENCES.md"), prefsContent); invalidateAllCaches(); _clearGsdRootCache(); } diff --git a/src/resources/extensions/sf/tests/pre-execution-pause-wiring.test.ts b/src/resources/extensions/sf/tests/pre-execution-pause-wiring.test.ts index 2022cb899..2416825fb 100644 --- a/src/resources/extensions/sf/tests/pre-execution-pause-wiring.test.ts +++ b/src/resources/extensions/sf/tests/pre-execution-pause-wiring.test.ts @@ -101,8 +101,8 @@ function setupTestEnvironment(): void { tempDir = join(tmpdir(), `pre-exec-pause-test-${Date.now()}-${Math.random().toString(36).slice(2)}`); mkdirSync(tempDir, { recursive: true }); - // Create .gsd directory structure - const sfDir = join(tempDir, ".gsd"); + // Create .sf directory structure + const sfDir = join(tempDir, ".sf"); mkdirSync(sfDir, { recursive: true }); // Create milestones directory structure @@ -112,7 +112,7 @@ function setupTestEnvironment(): void { // Change cwd so loadEffectiveSFPreferences finds our PREFERENCES.md process.chdir(tempDir); - // Clear sfRoot cache so it finds the new .gsd directory + // Clear sfRoot cache so it finds the new .sf directory _clearGsdRootCache(); // Initialize DB @@ -156,7 +156,7 @@ ${yamlLines.join("\n")} # SF Preferences `; - writeFileSync(join(tempDir, ".gsd", "PREFERENCES.md"), prefsContent); + writeFileSync(join(tempDir, ".sf", "PREFERENCES.md"), prefsContent); // Invalidate caches so the new preferences file is found invalidateAllCaches(); _clearGsdRootCache(); diff --git a/src/resources/extensions/sf/tests/preferences-formatting.test.ts b/src/resources/extensions/sf/tests/preferences-formatting.test.ts index f14a7a16e..e4d959ebe 100644 --- a/src/resources/extensions/sf/tests/preferences-formatting.test.ts +++ b/src/resources/extensions/sf/tests/preferences-formatting.test.ts @@ -77,11 +77,11 @@ describe("formatSkillRef", () => { const resolutions = makeResolutions([ ["lint-fix", { method: "project-skill", - resolvedPath: "/repo/.gsd/skills/lint-fix/SKILL.md", + resolvedPath: "/repo/.sf/skills/lint-fix/SKILL.md", }], ]); const result = formatSkillRef("lint-fix", resolutions); assert.match(result, /lint-fix/); - assert.match(result, /\.gsd\/skills\/lint-fix\/SKILL\.md/); + assert.match(result, /\.sf\/skills\/lint-fix\/SKILL\.md/); }); }); diff --git a/src/resources/extensions/sf/tests/preferences-worktree-sync.test.ts b/src/resources/extensions/sf/tests/preferences-worktree-sync.test.ts index 229a874a2..9747b35fc 100644 --- a/src/resources/extensions/sf/tests/preferences-worktree-sync.test.ts +++ b/src/resources/extensions/sf/tests/preferences-worktree-sync.test.ts @@ -61,8 +61,8 @@ test("syncSfStateToWorktree copies canonical PREFERENCES.md", async () => { // Functional test: create a mock source and destination, call the sync const srcBase = mkdtempSync(join(tmpdir(), "sf-wt-prefs-src-")); const dstBase = mkdtempSync(join(tmpdir(), "sf-wt-prefs-dst-")); - const srcGsd = join(srcBase, ".gsd"); - const dstGsd = join(dstBase, ".gsd"); + const srcGsd = join(srcBase, ".sf"); + const dstGsd = join(dstBase, ".sf"); mkdirSync(srcGsd, { recursive: true }); mkdirSync(dstGsd, { recursive: true }); @@ -97,8 +97,8 @@ test("syncSfStateToWorktree copies canonical PREFERENCES.md", async () => { test("syncSfStateToWorktree falls back to legacy lowercase preferences.md", async () => { const srcBase = mkdtempSync(join(tmpdir(), "sf-wt-prefs-legacy-src-")); const dstBase = mkdtempSync(join(tmpdir(), "sf-wt-prefs-legacy-dst-")); - const srcGsd = join(srcBase, ".gsd"); - const dstGsd = join(dstBase, ".gsd"); + const srcGsd = join(srcBase, ".sf"); + const dstGsd = join(dstBase, ".sf"); mkdirSync(srcGsd, { recursive: true }); mkdirSync(dstGsd, { recursive: true }); diff --git a/src/resources/extensions/sf/tests/preferences.test.ts b/src/resources/extensions/sf/tests/preferences.test.ts index 04ca2f962..6e78beea5 100644 --- a/src/resources/extensions/sf/tests/preferences.test.ts +++ b/src/resources/extensions/sf/tests/preferences.test.ts @@ -559,7 +559,7 @@ test("loadEffectiveSFPreferences preserves experimental prefs across global+proj const tempGsdHome = mkdtempSync(join(tmpdir(), "sf-prefs-home-")); try { - mkdirSync(join(tempProject, ".gsd"), { recursive: true }); + mkdirSync(join(tempProject, ".sf"), { recursive: true }); writeFileSync( join(tempGsdHome, "preferences.md"), @@ -574,7 +574,7 @@ test("loadEffectiveSFPreferences preserves experimental prefs across global+proj ); writeFileSync( - join(tempProject, ".gsd", "PREFERENCES.md"), + join(tempProject, ".sf", "PREFERENCES.md"), [ "---", "version: 1", diff --git a/src/resources/extensions/sf/tests/preflight-context-draft-filter.test.ts b/src/resources/extensions/sf/tests/preflight-context-draft-filter.test.ts index 9960c23e4..296b8e2f2 100644 --- a/src/resources/extensions/sf/tests/preflight-context-draft-filter.test.ts +++ b/src/resources/extensions/sf/tests/preflight-context-draft-filter.test.ts @@ -31,7 +31,7 @@ describe("pre-flight CONTEXT-DRAFT filter (#2473)", () => { beforeEach(() => { tmpBase = mkdtempSync(join(tmpdir(), "sf-preflight-draft-")); - sf = join(tmpBase, ".gsd"); + sf = join(tmpBase, ".sf"); // Create milestone directories with CONTEXT-DRAFT files for (const id of ["M001", "M002", "M003"]) { diff --git a/src/resources/extensions/sf/tests/project-relocation-recovery.test.ts b/src/resources/extensions/sf/tests/project-relocation-recovery.test.ts index b73331dd5..21952a12f 100644 --- a/src/resources/extensions/sf/tests/project-relocation-recovery.test.ts +++ b/src/resources/extensions/sf/tests/project-relocation-recovery.test.ts @@ -6,7 +6,7 @@ * should be based solely on the remote — making moves transparent. * * For local-only repos (no remote), ensureGsdSymlink should detect - * orphaned state directories with a matching .gsd-id marker and + * orphaned state directories with a matching .sf-id marker and * recover them automatically. */ @@ -186,25 +186,25 @@ describe("project-relocation-recovery (#2750)", () => { rmSync(repoB, { recursive: true, force: true }); }); - // ── Local-only repos: .gsd-id marker provides recovery ──────────────── + // ── Local-only repos: .sf-id marker provides recovery ──────────────── - test("ensureGsdSymlink writes a .gsd-id marker in the project root", () => { + test("ensureGsdSymlink writes a .sf-id marker in the project root", () => { const repo = realpathSync(mkdtempSync(join(tmpdir(), "sf-reloc-marker-"))); initRepo(repo); ensureGsdSymlink(repo); - const markerPath = join(repo, ".gsd-id"); - assert.ok(existsSync(markerPath), ".gsd-id marker must be written by ensureGsdSymlink"); + const markerPath = join(repo, ".sf-id"); + assert.ok(existsSync(markerPath), ".sf-id marker must be written by ensureGsdSymlink"); const markerId = readFileSync(markerPath, "utf-8").trim(); const computedId = repoIdentity(repo); - assert.strictEqual(markerId, computedId, ".gsd-id must contain the repo identity hash"); + assert.strictEqual(markerId, computedId, ".sf-id must contain the repo identity hash"); rmSync(repo, { recursive: true, force: true }); }); - test("local-only repo recovers state via .gsd-id marker after move", () => { + test("local-only repo recovers state via .sf-id marker after move", () => { const repoA = realpathSync(mkdtempSync(join(tmpdir(), "sf-reloc-local-a-"))); initRepo(repoA); // No remote — identity includes gitRoot @@ -235,11 +235,11 @@ describe("project-relocation-recovery (#2750)", () => { "local-only repo identity changes with move (expected)", ); - // But ensureGsdSymlink should detect .gsd-id marker and recover + // But ensureGsdSymlink should detect .sf-id marker and recover const externalB = ensureGsdSymlink(repoB); assert.ok( existsSync(join(externalB, "milestones", "M001.md")), - "local-only repo must recover state via .gsd-id marker after move", + "local-only repo must recover state via .sf-id marker after move", ); rmSync(repoB, { recursive: true, force: true }); diff --git a/src/resources/extensions/sf/tests/prompt-budget-enforcement.test.ts b/src/resources/extensions/sf/tests/prompt-budget-enforcement.test.ts index e4dd59a3c..660bc5cc4 100644 --- a/src/resources/extensions/sf/tests/prompt-budget-enforcement.test.ts +++ b/src/resources/extensions/sf/tests/prompt-budget-enforcement.test.ts @@ -43,7 +43,7 @@ function setupDependencyFixture( deps: string[], summaries: Record<string, string>, ): void { - const msDir = join(base, ".gsd", "milestones", mid); + const msDir = join(base, ".sf", "milestones", mid); mkdirSync(msDir, { recursive: true }); // Build roadmap content — sid depends on deps @@ -156,7 +156,7 @@ describe("prompt-budget: inlineDependencySummaries truncation", () => { }); it("returns no-dependencies marker when slice has no deps", async () => { - const msDir = join(base, ".gsd", "milestones", "M001"); + const msDir = join(base, ".sf", "milestones", "M001"); mkdirSync(msDir, { recursive: true }); const roadmap = "# Roadmap\n\n## Slices\n\n- [ ] **S01: Solo** `risk:low` `depends:[]`\n"; writeFileSync(join(msDir, "M001-ROADMAP.md"), roadmap); diff --git a/src/resources/extensions/sf/tests/prompt-db.test.ts b/src/resources/extensions/sf/tests/prompt-db.test.ts index 4a6a91162..ebef78e8b 100644 --- a/src/resources/extensions/sf/tests/prompt-db.test.ts +++ b/src/resources/extensions/sf/tests/prompt-db.test.ts @@ -63,7 +63,7 @@ console.log('\n=== prompt-db: scoped decisions from DB ==='); assert.match(formatted, /\| # \| When \| Scope/, 'formatted decisions have table header'); // Verify the expected wrapper format that inlineDecisionsFromDb would produce - const wrapped = `### Decisions\nSource: \`.gsd/DECISIONS.md\`\n\n${formatted}`; + const wrapped = `### Decisions\nSource: \`.sf/DECISIONS.md\`\n\n${formatted}`; assert.match(wrapped, /^### Decisions/, 'wrapped decisions start with ### Decisions'); assert.match(wrapped, /Source:.*DECISIONS\.md/, 'wrapped decisions have source path'); @@ -116,7 +116,7 @@ console.log('\n=== prompt-db: scoped requirements from DB ==='); assert.doesNotMatch(formatted, /### R003/, 'formatted requirements exclude R003'); // Verify the expected wrapper format that inlineRequirementsFromDb would produce - const wrapped = `### Requirements\nSource: \`.gsd/REQUIREMENTS.md\`\n\n${formatted}`; + const wrapped = `### Requirements\nSource: \`.sf/REQUIREMENTS.md\`\n\n${formatted}`; assert.match(wrapped, /^### Requirements/, 'wrapped requirements start with ### Requirements'); assert.match(wrapped, /Source:.*REQUIREMENTS\.md/, 'wrapped requirements have source path'); @@ -144,7 +144,7 @@ console.log('\n=== prompt-db: project content from DB ==='); assert.deepStrictEqual(content, '# Test Project\n\nThis is the project description.', 'queryProject returns content'); // Verify the expected wrapper format that inlineProjectFromDb would produce - const wrapped = `### Project\nSource: \`.gsd/PROJECT.md\`\n\n${content}`; + const wrapped = `### Project\nSource: \`.sf/PROJECT.md\`\n\n${content}`; assert.match(wrapped, /^### Project/, 'wrapped project starts with ### Project'); assert.match(wrapped, /Source:.*PROJECT\.md/, 'wrapped project has source path'); assert.match(wrapped, /# Test Project/, 'wrapped project includes content'); @@ -293,21 +293,21 @@ console.log('\n=== prompt-db: DB helpers wrapper format matches expected pattern const decisions = queryDecisions({ milestoneId: 'M001' }); assert.ok(decisions.length === 1, 'got 1 decision for M001'); const dFormatted = formatDecisionsForPrompt(decisions); - const dWrapped = `### Decisions\nSource: \`.gsd/DECISIONS.md\`\n\n${dFormatted}`; - assert.match(dWrapped, /^### Decisions\nSource: `.gsd\/DECISIONS\.md`\n\n\| #/, 'decisions wrapper format correct'); + const dWrapped = `### Decisions\nSource: \`.sf/DECISIONS.md\`\n\n${dFormatted}`; + assert.match(dWrapped, /^### Decisions\nSource: `.sf\/DECISIONS\.md`\n\n\| #/, 'decisions wrapper format correct'); // Simulate what inlineRequirementsFromDb does const reqs = queryRequirements({ sliceId: 'S01' }); assert.ok(reqs.length === 1, 'got 1 requirement for S01'); const rFormatted = formatRequirementsForPrompt(reqs); - const rWrapped = `### Requirements\nSource: \`.gsd/REQUIREMENTS.md\`\n\n${rFormatted}`; - assert.match(rWrapped, /^### Requirements\nSource: `.gsd\/REQUIREMENTS\.md`\n\n### R001/, 'requirements wrapper format correct'); + const rWrapped = `### Requirements\nSource: \`.sf/REQUIREMENTS.md\`\n\n${rFormatted}`; + assert.match(rWrapped, /^### Requirements\nSource: `.sf\/REQUIREMENTS\.md`\n\n### R001/, 'requirements wrapper format correct'); // Simulate what inlineProjectFromDb does const project = queryProject(); assert.ok(project !== null, 'project content exists'); - const pWrapped = `### Project\nSource: \`.gsd/PROJECT.md\`\n\n${project}`; - assert.match(pWrapped, /^### Project\nSource: `.gsd\/PROJECT\.md`\n\n# Project Name/, 'project wrapper format correct'); + const pWrapped = `### Project\nSource: \`.sf/PROJECT.md\`\n\n${project}`; + assert.match(pWrapped, /^### Project\nSource: `.sf\/PROJECT\.md`\n\n# Project Name/, 'project wrapper format correct'); closeDatabase(); } @@ -324,9 +324,9 @@ import { migrateFromMarkdown } from '../md-importer.ts'; describe('prompt-db', () => { test('prompt-db: re-import updates DB when source markdown changes', () => { - // Create a temp dir simulating a project with .gsd/DECISIONS.md + // Create a temp dir simulating a project with .sf/DECISIONS.md const tmpDir = mkdtempSync(join(tmpdir(), 'prompt-db-reimport-')); - const sfDir = join(tmpDir, '.gsd'); + const sfDir = join(tmpDir, '.sf'); mkdirSync(sfDir, { recursive: true }); // Write initial DECISIONS.md with 2 decisions diff --git a/src/resources/extensions/sf/tests/prompt-ordering.test.ts b/src/resources/extensions/sf/tests/prompt-ordering.test.ts index d32e3fbff..ff155e420 100644 --- a/src/resources/extensions/sf/tests/prompt-ordering.test.ts +++ b/src/resources/extensions/sf/tests/prompt-ordering.test.ts @@ -246,7 +246,7 @@ describe("real-world prompt reordering", () => { "T1.1 completed: scaffolded auth module.", "", "## Backing Source Artifacts", - "- Slice plan: `.gsd/slices/S1.md`", + "- Slice plan: `.sf/slices/S1.md`", "", "## Output Template", "Use standard task summary format.", diff --git a/src/resources/extensions/sf/tests/queue-draft-detection.test.ts b/src/resources/extensions/sf/tests/queue-draft-detection.test.ts index 870f5369d..8eafed92b 100644 --- a/src/resources/extensions/sf/tests/queue-draft-detection.test.ts +++ b/src/resources/extensions/sf/tests/queue-draft-detection.test.ts @@ -10,7 +10,7 @@ import { buildExistingMilestonesContext } from "../guided-flow.js"; describe('queue-draft-detection', () => { test('draft and context milestone detection', async () => { const tmpBase = mkdtempSync(join(tmpdir(), "sf-queue-draft-test-")); - const sf = join(tmpBase, ".gsd"); + const sf = join(tmpBase, ".sf"); try { // M001: has only CONTEXT-DRAFT.md (draft milestone) diff --git a/src/resources/extensions/sf/tests/queue-execution-guard.test.ts b/src/resources/extensions/sf/tests/queue-execution-guard.test.ts index b4ccf66a5..6ed669172 100644 --- a/src/resources/extensions/sf/tests/queue-execution-guard.test.ts +++ b/src/resources/extensions/sf/tests/queue-execution-guard.test.ts @@ -3,13 +3,13 @@ * * When queue phase is active, the agent should only create milestones — * not execute work. This guard blocks write/edit/bash tool calls that - * target source code (non-.gsd/ paths) during queue mode. + * target source code (non-.sf/ paths) during queue mode. * * Exercises shouldBlockQueueExecution() — a pure function that checks: * (a) queuePhaseActive false → pass (not in queue mode) * (b) toolName is read-only (read, grep, find, ls) → pass * (c) toolName is ask_user_questions → pass (discussion tool) - * (d) write/edit to .gsd/ path → pass (planning artifacts) + * (d) write/edit to .sf/ path → pass (planning artifacts) * (e) write/edit to source path → block * (f) bash command → block (could execute work) * (g) registered SF tools (sf_milestone_generate_id, sf_summary_save) → pass @@ -55,23 +55,23 @@ test('queue-guard: allows discussion and planning tools during queue mode', () = assert.strictEqual(r3.block, false, 'sf_summary_save should pass'); }); -// ─── Scenario 4: Write to .gsd/ paths passes (planning artifacts) ── +// ─── Scenario 4: Write to .sf/ paths passes (planning artifacts) ── -test('queue-guard: allows writes to .gsd/ paths during queue mode', () => { - const r1 = shouldBlockQueueExecution('write', '.gsd/milestones/M001/M001-CONTEXT.md', true); - assert.strictEqual(r1.block, false, 'write to .gsd/ should pass'); +test('queue-guard: allows writes to .sf/ paths during queue mode', () => { + const r1 = shouldBlockQueueExecution('write', '.sf/milestones/M001/M001-CONTEXT.md', true); + assert.strictEqual(r1.block, false, 'write to .sf/ should pass'); - const r2 = shouldBlockQueueExecution('write', '/project/.gsd/PROJECT.md', true); - assert.strictEqual(r2.block, false, 'write to .gsd/PROJECT.md should pass'); + const r2 = shouldBlockQueueExecution('write', '/project/.sf/PROJECT.md', true); + assert.strictEqual(r2.block, false, 'write to .sf/PROJECT.md should pass'); - const r3 = shouldBlockQueueExecution('edit', '.gsd/QUEUE.md', true); - assert.strictEqual(r3.block, false, 'edit to .gsd/QUEUE.md should pass'); + const r3 = shouldBlockQueueExecution('edit', '.sf/QUEUE.md', true); + assert.strictEqual(r3.block, false, 'edit to .sf/QUEUE.md should pass'); - const r4 = shouldBlockQueueExecution('write', '.gsd/REQUIREMENTS.md', true); - assert.strictEqual(r4.block, false, 'write to .gsd/REQUIREMENTS.md should pass'); + const r4 = shouldBlockQueueExecution('write', '.sf/REQUIREMENTS.md', true); + assert.strictEqual(r4.block, false, 'write to .sf/REQUIREMENTS.md should pass'); - const r5 = shouldBlockQueueExecution('write', '.gsd/DECISIONS.md', true); - assert.strictEqual(r5.block, false, 'write to .gsd/DECISIONS.md should pass'); + const r5 = shouldBlockQueueExecution('write', '.sf/DECISIONS.md', true); + assert.strictEqual(r5.block, false, 'write to .sf/DECISIONS.md should pass'); }); // ─── Scenario 5: Write/edit to source code paths blocked ── @@ -134,11 +134,11 @@ test('queue-guard: allows read-only bash commands during queue mode', () => { assert.strictEqual(r9.block, false, 'gh issue view should pass'); }); -// ─── Scenario 8: mkdir for .gsd/ milestone directories passes ── +// ─── Scenario 8: mkdir for .sf/ milestone directories passes ── -test('queue-guard: allows mkdir for .gsd/ milestone directories', () => { - const r1 = shouldBlockQueueExecution('bash', 'mkdir -p .gsd/milestones/M010/slices', true); - assert.strictEqual(r1.block, false, 'mkdir -p .gsd/ should pass'); +test('queue-guard: allows mkdir for .sf/ milestone directories', () => { + const r1 = shouldBlockQueueExecution('bash', 'mkdir -p .sf/milestones/M010/slices', true); + assert.strictEqual(r1.block, false, 'mkdir -p .sf/ should pass'); }); // ─── Scenario 9: Web search and library tools pass ── diff --git a/src/resources/extensions/sf/tests/queue-order.test.ts b/src/resources/extensions/sf/tests/queue-order.test.ts index 3e1b789b3..3b341f6cf 100644 --- a/src/resources/extensions/sf/tests/queue-order.test.ts +++ b/src/resources/extensions/sf/tests/queue-order.test.ts @@ -15,7 +15,7 @@ import { function createFixtureBase(): string { const base = mkdtempSync(join(tmpdir(), 'sf-queue-order-')); - mkdirSync(join(base, '.gsd'), { recursive: true }); + mkdirSync(join(base, '.sf'), { recursive: true }); return base; } @@ -77,7 +77,7 @@ test('test block at line 76', () => { assert.deepStrictEqual(loaded, ['M003', 'M001', 'M002'], 'round-trip preserves order'); // Verify file contains updatedAt - const raw = JSON.parse(readFileSync(join(base, '.gsd', 'QUEUE-ORDER.json'), 'utf-8')); + const raw = JSON.parse(readFileSync(join(base, '.sf', 'QUEUE-ORDER.json'), 'utf-8')); assert.ok(typeof raw.updatedAt === 'string' && raw.updatedAt.length > 0, 'file contains updatedAt'); cleanup(base); @@ -86,7 +86,7 @@ test('test block at line 76', () => { // Load returns null on corrupt JSON test('test block at line 90', () => { const base = createFixtureBase(); - writeFileSync(join(base, '.gsd', 'QUEUE-ORDER.json'), 'not json'); + writeFileSync(join(base, '.sf', 'QUEUE-ORDER.json'), 'not json'); assert.deepStrictEqual(loadQueueOrder(base), null, 'returns null on corrupt JSON'); cleanup(base); }); @@ -94,7 +94,7 @@ test('test block at line 90', () => { // Load returns null when order field is not an array test('test block at line 98', () => { const base = createFixtureBase(); - writeFileSync(join(base, '.gsd', 'QUEUE-ORDER.json'), '{"order": "invalid"}'); + writeFileSync(join(base, '.sf', 'QUEUE-ORDER.json'), '{"order": "invalid"}'); assert.deepStrictEqual(loadQueueOrder(base), null, 'returns null when order is not array'); cleanup(base); }); @@ -115,7 +115,7 @@ test('pruneQueueOrder', () => { test('test block at line 121', () => { const base = createFixtureBase(); pruneQueueOrder(base, ['M001']); // should not throw - assert.ok(!existsSync(join(base, '.gsd', 'QUEUE-ORDER.json')), 'prune does not create file'); + assert.ok(!existsSync(join(base, '.sf', 'QUEUE-ORDER.json')), 'prune does not create file'); cleanup(base); }); diff --git a/src/resources/extensions/sf/tests/reactive-executor.test.ts b/src/resources/extensions/sf/tests/reactive-executor.test.ts index a47a91849..d6a1b2216 100644 --- a/src/resources/extensions/sf/tests/reactive-executor.test.ts +++ b/src/resources/extensions/sf/tests/reactive-executor.test.ts @@ -76,7 +76,7 @@ test("reactive dispatch requires enabled config and multiple ready tasks", async // Build a minimal filesystem with a slice plan and task plans const repo = mkdtempSync(join(tmpdir(), "sf-reactive-dispatch-")); try { - const sf = join(repo, ".gsd", "milestones", "M001", "slices", "S01"); + const sf = join(repo, ".sf", "milestones", "M001", "slices", "S01"); mkdirSync(join(sf, "tasks"), { recursive: true }); // Slice plan with 3 tasks @@ -179,7 +179,7 @@ test("reactive dispatch requires enabled config and multiple ready tasks", async test("reactive dispatch falls back when graph is ambiguous (task without IO)", async () => { const repo = mkdtempSync(join(tmpdir(), "sf-reactive-ambiguous-")); try { - const sf = join(repo, ".gsd", "milestones", "M001", "slices", "S01"); + const sf = join(repo, ".sf", "milestones", "M001", "slices", "S01"); mkdirSync(join(sf, "tasks"), { recursive: true }); writeFileSync( @@ -219,7 +219,7 @@ test("reactive dispatch falls back when graph is ambiguous (task without IO)", a test("single ready task falls through to sequential", async () => { const repo = mkdtempSync(join(tmpdir(), "sf-reactive-single-")); try { - const sf = join(repo, ".gsd", "milestones", "M001", "slices", "S01"); + const sf = join(repo, ".sf", "milestones", "M001", "slices", "S01"); mkdirSync(join(sf, "tasks"), { recursive: true }); writeFileSync( @@ -262,7 +262,7 @@ test("single ready task falls through to sequential", async () => { test("saveReactiveState and loadReactiveState round-trip", () => { const repo = mkdtempSync(join(tmpdir(), "sf-reactive-state-")); - mkdirSync(join(repo, ".gsd", "runtime"), { recursive: true }); + mkdirSync(join(repo, ".sf", "runtime"), { recursive: true }); try { const state: ReactiveExecutionState = { sliceId: "S01", @@ -282,7 +282,7 @@ test("saveReactiveState and loadReactiveState round-trip", () => { test("clearReactiveState removes the file", () => { const repo = mkdtempSync(join(tmpdir(), "sf-reactive-clear-")); - mkdirSync(join(repo, ".gsd", "runtime"), { recursive: true }); + mkdirSync(join(repo, ".sf", "runtime"), { recursive: true }); try { const state: ReactiveExecutionState = { sliceId: "S01", @@ -293,10 +293,10 @@ test("clearReactiveState removes the file", () => { }; saveReactiveState(repo, "M001", "S01", state); - assert.ok(existsSync(join(repo, ".gsd", "runtime", "M001-S01-reactive.json"))); + assert.ok(existsSync(join(repo, ".sf", "runtime", "M001-S01-reactive.json"))); clearReactiveState(repo, "M001", "S01"); - assert.ok(!existsSync(join(repo, ".gsd", "runtime", "M001-S01-reactive.json"))); + assert.ok(!existsSync(join(repo, ".sf", "runtime", "M001-S01-reactive.json"))); } finally { rmSync(repo, { recursive: true, force: true }); } @@ -304,7 +304,7 @@ test("clearReactiveState removes the file", () => { test("loadReactiveState returns null when no file exists", () => { const repo = mkdtempSync(join(tmpdir(), "sf-reactive-nofile-")); - mkdirSync(join(repo, ".gsd", "runtime"), { recursive: true }); + mkdirSync(join(repo, ".sf", "runtime"), { recursive: true }); try { const loaded = loadReactiveState(repo, "M001", "S01"); assert.equal(loaded, null); @@ -316,9 +316,9 @@ test("loadReactiveState returns null when no file exists", () => { test("completed tasks are not re-dispatched on next iteration", async () => { const repo = mkdtempSync(join(tmpdir(), "sf-reactive-reentry-")); try { - const sf = join(repo, ".gsd", "milestones", "M001", "slices", "S01"); + const sf = join(repo, ".sf", "milestones", "M001", "slices", "S01"); mkdirSync(join(sf, "tasks"), { recursive: true }); - mkdirSync(join(repo, ".gsd", "runtime"), { recursive: true }); + mkdirSync(join(repo, ".sf", "runtime"), { recursive: true }); writeFileSync( join(sf, "S01-PLAN.md"), @@ -375,7 +375,7 @@ test("verifyExpectedArtifact: reactive-execute passes when all dispatched summar const { verifyExpectedArtifact } = await import("../auto-recovery.ts"); const repo = mkdtempSync(join(tmpdir(), "sf-reactive-verify-pass-")); try { - const tasksDir = join(repo, ".gsd", "milestones", "M001", "slices", "S01", "tasks"); + const tasksDir = join(repo, ".sf", "milestones", "M001", "slices", "S01", "tasks"); mkdirSync(tasksDir, { recursive: true }); writeFileSync(join(tasksDir, "T02-SUMMARY.md"), "---\nid: T02\n---\n# T02: Done\n"); writeFileSync(join(tasksDir, "T03-SUMMARY.md"), "---\nid: T03\n---\n# T03: Done\n"); @@ -391,7 +391,7 @@ test("verifyExpectedArtifact: reactive-execute fails when a dispatched summary i const { verifyExpectedArtifact } = await import("../auto-recovery.ts"); const repo = mkdtempSync(join(tmpdir(), "sf-reactive-verify-fail-")); try { - const tasksDir = join(repo, ".gsd", "milestones", "M001", "slices", "S01", "tasks"); + const tasksDir = join(repo, ".sf", "milestones", "M001", "slices", "S01", "tasks"); mkdirSync(tasksDir, { recursive: true }); // Only T02 has a summary, T03 does not writeFileSync(join(tasksDir, "T02-SUMMARY.md"), "---\nid: T02\n---\n# T02: Done\n"); @@ -407,7 +407,7 @@ test("verifyExpectedArtifact: reactive-execute fails even with pre-existing summ const { verifyExpectedArtifact } = await import("../auto-recovery.ts"); const repo = mkdtempSync(join(tmpdir(), "sf-reactive-verify-preexisting-")); try { - const tasksDir = join(repo, ".gsd", "milestones", "M001", "slices", "S01", "tasks"); + const tasksDir = join(repo, ".sf", "milestones", "M001", "slices", "S01", "tasks"); mkdirSync(tasksDir, { recursive: true }); // T01 summary exists from before, but T02 and T03 were dispatched writeFileSync(join(tasksDir, "T01-SUMMARY.md"), "---\nid: T01\n---\n# T01: Prior\n"); @@ -423,7 +423,7 @@ test("verifyExpectedArtifact: reactive-execute legacy format (no batch IDs) fall const { verifyExpectedArtifact } = await import("../auto-recovery.ts"); const repo = mkdtempSync(join(tmpdir(), "sf-reactive-verify-legacy-")); try { - const tasksDir = join(repo, ".gsd", "milestones", "M001", "slices", "S01", "tasks"); + const tasksDir = join(repo, ".sf", "milestones", "M001", "slices", "S01", "tasks"); mkdirSync(tasksDir, { recursive: true }); writeFileSync(join(tasksDir, "T01-SUMMARY.md"), "---\nid: T01\n---\n# T01\n"); @@ -457,7 +457,7 @@ test("getDependencyTaskSummaryPaths returns only dependency summaries", async () const { getDependencyTaskSummaryPaths } = await import("../auto-prompts.ts"); const repo = mkdtempSync(join(tmpdir(), "sf-reactive-depcarry-")); try { - const tasksDir = join(repo, ".gsd", "milestones", "M001", "slices", "S01", "tasks"); + const tasksDir = join(repo, ".sf", "milestones", "M001", "slices", "S01", "tasks"); mkdirSync(tasksDir, { recursive: true }); // T01, T02, T03 all have summaries writeFileSync(join(tasksDir, "T01-SUMMARY.md"), "---\nid: T01\n---\n# T01\n"); @@ -479,7 +479,7 @@ test("getDependencyTaskSummaryPaths falls back to order-based for root tasks", a const { getDependencyTaskSummaryPaths } = await import("../auto-prompts.ts"); const repo = mkdtempSync(join(tmpdir(), "sf-reactive-depcarry-root-")); try { - const tasksDir = join(repo, ".gsd", "milestones", "M001", "slices", "S01", "tasks"); + const tasksDir = join(repo, ".sf", "milestones", "M001", "slices", "S01", "tasks"); mkdirSync(tasksDir, { recursive: true }); writeFileSync(join(tasksDir, "T01-SUMMARY.md"), "---\nid: T01\n---\n# T01\n"); @@ -496,7 +496,7 @@ test("getDependencyTaskSummaryPaths handles missing dependency summaries gracefu const { getDependencyTaskSummaryPaths } = await import("../auto-prompts.ts"); const repo = mkdtempSync(join(tmpdir(), "sf-reactive-depcarry-missing-")); try { - const tasksDir = join(repo, ".gsd", "milestones", "M001", "slices", "S01", "tasks"); + const tasksDir = join(repo, ".sf", "milestones", "M001", "slices", "S01", "tasks"); mkdirSync(tasksDir, { recursive: true }); // Only T01 has a summary, T02 does not writeFileSync(join(tasksDir, "T01-SUMMARY.md"), "---\nid: T01\n---\n# T01\n"); diff --git a/src/resources/extensions/sf/tests/reassess-detection.test.ts b/src/resources/extensions/sf/tests/reassess-detection.test.ts index 19dadddb1..b6ad0be96 100644 --- a/src/resources/extensions/sf/tests/reassess-detection.test.ts +++ b/src/resources/extensions/sf/tests/reassess-detection.test.ts @@ -11,8 +11,8 @@ import type { SFState } from "../types.ts"; function makeTmpBase(): string { const base = join(tmpdir(), `sf-test-reassess-${randomUUID()}`); - mkdirSync(join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks"), { recursive: true }); - mkdirSync(join(base, ".gsd", "milestones", "M001", "slices", "S02", "tasks"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M001", "slices", "S02", "tasks"), { recursive: true }); return base; } @@ -21,19 +21,19 @@ function cleanup(base: string): void { } function writeRoadmap(base: string, content: string): void { - writeFileSync(join(base, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), content); + writeFileSync(join(base, ".sf", "milestones", "M001", "M001-ROADMAP.md"), content); } function writeSummary(base: string, sid: string): void { writeFileSync( - join(base, ".gsd", "milestones", "M001", "slices", sid, `${sid}-SUMMARY.md`), + join(base, ".sf", "milestones", "M001", "slices", sid, `${sid}-SUMMARY.md`), `---\nid: ${sid}\n---\n# ${sid} Summary\nDone.`, ); } function writeAssessment(base: string, sid: string): void { writeFileSync( - join(base, ".gsd", "milestones", "M001", "slices", sid, `${sid}-ASSESSMENT.md`), + join(base, ".sf", "milestones", "M001", "slices", sid, `${sid}-ASSESSMENT.md`), `# ${sid} Assessment\nNo changes needed.`, ); } diff --git a/src/resources/extensions/sf/tests/reassess-handler.test.ts b/src/resources/extensions/sf/tests/reassess-handler.test.ts index 40be4ed0d..1f091ad7d 100644 --- a/src/resources/extensions/sf/tests/reassess-handler.test.ts +++ b/src/resources/extensions/sf/tests/reassess-handler.test.ts @@ -19,9 +19,9 @@ import { handleReassessRoadmap } from '../tools/reassess-roadmap.ts'; function makeTmpBase(): string { const base = mkdtempSync(join(tmpdir(), 'sf-reassess-')); - mkdirSync(join(base, '.gsd', 'milestones', 'M001', 'slices', 'S01'), { recursive: true }); - mkdirSync(join(base, '.gsd', 'milestones', 'M001', 'slices', 'S02'), { recursive: true }); - mkdirSync(join(base, '.gsd', 'milestones', 'M001', 'slices', 'S03'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones', 'M001', 'slices', 'S01'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones', 'M001', 'slices', 'S02'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones', 'M001', 'slices', 'S03'), { recursive: true }); return base; } @@ -75,7 +75,7 @@ function validReassessParams() { test('handleReassessRoadmap rejects invalid payloads (missing milestoneId)', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { seedMilestoneWithSlices(); @@ -90,7 +90,7 @@ test('handleReassessRoadmap rejects invalid payloads (missing milestoneId)', asy test('handleReassessRoadmap rejects missing milestone', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { // No milestone seeded @@ -104,7 +104,7 @@ test('handleReassessRoadmap rejects missing milestone', async () => { test('handleReassessRoadmap rejects structural violation: modifying a completed slice', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { seedMilestoneWithSlices({ s01Status: 'complete', s02Status: 'pending', s03Status: 'pending' }); @@ -128,7 +128,7 @@ test('handleReassessRoadmap rejects structural violation: modifying a completed test('handleReassessRoadmap rejects structural violation: removing a completed slice', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { seedMilestoneWithSlices({ s01Status: 'complete', s02Status: 'pending', s03Status: 'pending' }); @@ -152,7 +152,7 @@ test('handleReassessRoadmap rejects structural violation: removing a completed s test('handleReassessRoadmap succeeds when modifying only pending slices', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { seedMilestoneWithSlices({ s01Status: 'complete', s02Status: 'pending', s03Status: 'pending' }); @@ -162,7 +162,7 @@ test('handleReassessRoadmap succeeds when modifying only pending slices', async assert.ok(!('error' in result), `unexpected error: ${'error' in result ? result.error : ''}`); // Verify assessments row exists in DB - const assessmentPath = join('.gsd', 'milestones', 'M001', 'slices', 'S01', 'S01-ASSESSMENT.md'); + const assessmentPath = join('.sf', 'milestones', 'M001', 'slices', 'S01', 'S01-ASSESSMENT.md'); const assessment = getAssessment(assessmentPath); assert.ok(assessment, 'assessment row should exist in DB'); assert.equal(assessment['milestone_id'], 'M001'); @@ -193,13 +193,13 @@ test('handleReassessRoadmap succeeds when modifying only pending slices', async assert.equal(s01?.status, 'complete'); // Verify ROADMAP.md re-rendered on disk - const roadmapPath = join(base, '.gsd', 'milestones', 'M001', 'M001-ROADMAP.md'); + const roadmapPath = join(base, '.sf', 'milestones', 'M001', 'M001-ROADMAP.md'); assert.ok(existsSync(roadmapPath), 'ROADMAP.md should be rendered to disk'); const roadmapContent = readFileSync(roadmapPath, 'utf-8'); assert.ok(roadmapContent.includes('Updated Slice Two'), 'ROADMAP.md should contain updated S02 title'); // Verify ASSESSMENT.md exists on disk - const assessmentDiskPath = join(base, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'S01-ASSESSMENT.md'); + const assessmentDiskPath = join(base, '.sf', 'milestones', 'M001', 'slices', 'S01', 'S01-ASSESSMENT.md'); assert.ok(existsSync(assessmentDiskPath), 'ASSESSMENT.md should be rendered to disk'); const assessmentContent = readFileSync(assessmentDiskPath, 'utf-8'); assert.ok(assessmentContent.includes('confirmed'), 'ASSESSMENT.md should contain verdict'); @@ -211,7 +211,7 @@ test('handleReassessRoadmap succeeds when modifying only pending slices', async test('handleReassessRoadmap cache invalidation: getMilestoneSlices reflects mutations', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { seedMilestoneWithSlices({ s01Status: 'complete', s02Status: 'pending', s03Status: 'pending' }); @@ -242,7 +242,7 @@ test('handleReassessRoadmap cache invalidation: getMilestoneSlices reflects muta test('handleReassessRoadmap is idempotent: calling twice with same params succeeds', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { seedMilestoneWithSlices({ s01Status: 'complete', s02Status: 'pending', s03Status: 'pending' }); @@ -267,7 +267,7 @@ test('handleReassessRoadmap is idempotent: calling twice with same params succee test('handleReassessRoadmap rejects slice with status "done" (alias for complete)', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { seedMilestoneWithSlices({ s01Status: 'done', s02Status: 'pending', s03Status: 'pending' }); @@ -291,7 +291,7 @@ test('handleReassessRoadmap rejects slice with status "done" (alias for complete test('handleReassessRoadmap returns structured error payloads with actionable messages', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { seedMilestoneWithSlices({ s01Status: 'complete', s02Status: 'complete', s03Status: 'pending' }); @@ -329,7 +329,7 @@ test('handleReassessRoadmap returns structured error payloads with actionable me test('handleReassessRoadmap invalidates stale milestone-validation when roadmap changes (#2957)', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { // Seed: M001 with S01-S04 all complete, plus a stale VALIDATION with needs-remediation @@ -340,7 +340,7 @@ test('handleReassessRoadmap invalidates stale milestone-validation when roadmap insertSlice({ id: 'S04', milestoneId: 'M001', title: 'Slice Four', status: 'complete', demo: 'Demo' }); // Insert milestone-validation assessment with needs-remediation verdict (stale) - const validationPath = join('.gsd', 'milestones', 'M001', 'M001-VALIDATION.md'); + const validationPath = join('.sf', 'milestones', 'M001', 'M001-VALIDATION.md'); insertAssessment({ path: validationPath, milestoneId: 'M001', @@ -395,7 +395,7 @@ test('handleReassessRoadmap invalidates stale milestone-validation when roadmap test('handleReassessRoadmap does NOT invalidate validation when no roadmap structural changes (#2957)', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { // Seed: M001 with slices, plus a validation with pass verdict @@ -404,7 +404,7 @@ test('handleReassessRoadmap does NOT invalidate validation when no roadmap struc insertSlice({ id: 'S02', milestoneId: 'M001', title: 'Slice Two', status: 'pending', demo: 'Demo' }); // Insert milestone-validation assessment with pass verdict - const validationPath = join('.gsd', 'milestones', 'M001', 'M001-VALIDATION.md'); + const validationPath = join('.sf', 'milestones', 'M001', 'M001-VALIDATION.md'); insertAssessment({ path: validationPath, milestoneId: 'M001', diff --git a/src/resources/extensions/sf/tests/reassess-prompt.test.ts b/src/resources/extensions/sf/tests/reassess-prompt.test.ts index deaed43b9..35e238653 100644 --- a/src/resources/extensions/sf/tests/reassess-prompt.test.ts +++ b/src/resources/extensions/sf/tests/reassess-prompt.test.ts @@ -3,7 +3,7 @@ import assert from 'node:assert/strict'; import { readFileSync } from "node:fs"; import { join, dirname } from "node:path"; import { fileURLToPath } from "node:url"; -// loadPrompt reads from ~/.gsd/agent/extensions/sf/prompts/ (main checkout). +// loadPrompt reads from ~/.sf/agent/extensions/sf/prompts/ (main checkout). // In a worktree the file may not exist there yet, so we resolve prompts // relative to this test file's location (the worktree copy). const __dirname = dirname(fileURLToPath(import.meta.url)); @@ -34,8 +34,8 @@ test('reassess-roadmap prompt loads and substitutes', () => { workingDirectory: "/tmp/test-project", milestoneId: "M099", completedSliceId: "S03", - assessmentPath: ".gsd/milestones/M099/slices/S03/S03-ASSESSMENT.md", - roadmapPath: ".gsd/milestones/M099/M099-ROADMAP.md", + assessmentPath: ".sf/milestones/M099/slices/S03/S03-ASSESSMENT.md", + roadmapPath: ".sf/milestones/M099/M099-ROADMAP.md", inlinedContext: "--- test inlined context block ---", }; @@ -55,8 +55,8 @@ test('reassess-roadmap prompt loads and substitutes', () => { // Verify all test variables were substituted into the output assert.ok(result.includes("M099"), "prompt contains milestoneId 'M099'"); assert.ok(result.includes("S03"), "prompt contains completedSliceId 'S03'"); - assert.ok(result.includes(".gsd/milestones/M099/slices/S03/S03-ASSESSMENT.md"), "prompt contains assessmentPath"); - assert.ok(result.includes(".gsd/milestones/M099/M099-ROADMAP.md"), "prompt contains roadmapPath"); + assert.ok(result.includes(".sf/milestones/M099/slices/S03/S03-ASSESSMENT.md"), "prompt contains assessmentPath"); + assert.ok(result.includes(".sf/milestones/M099/M099-ROADMAP.md"), "prompt contains roadmapPath"); assert.ok(result.includes("--- test inlined context block ---"), "prompt contains inlinedContext"); // Verify no un-substituted variables remain @@ -73,8 +73,8 @@ test('reassess-roadmap contains coverage-check instruction', () => { workingDirectory: "/tmp/test-project", milestoneId: "M001", completedSliceId: "S01", - assessmentPath: ".gsd/milestones/M001/slices/S01/S01-ASSESSMENT.md", - roadmapPath: ".gsd/milestones/M001/M001-ROADMAP.md", + assessmentPath: ".sf/milestones/M001/slices/S01/S01-ASSESSMENT.md", + roadmapPath: ".sf/milestones/M001/M001-ROADMAP.md", inlinedContext: "context", }); @@ -112,8 +112,8 @@ test('coverage-check requires at-least-one semantics', () => { workingDirectory: "/tmp/test-project", milestoneId: "M001", completedSliceId: "S01", - assessmentPath: ".gsd/milestones/M001/slices/S01/S01-ASSESSMENT.md", - roadmapPath: ".gsd/milestones/M001/M001-ROADMAP.md", + assessmentPath: ".sf/milestones/M001/slices/S01/S01-ASSESSMENT.md", + roadmapPath: ".sf/milestones/M001/M001-ROADMAP.md", inlinedContext: "context", }); diff --git a/src/resources/extensions/sf/tests/reconciliation-edge-cases.test.ts b/src/resources/extensions/sf/tests/reconciliation-edge-cases.test.ts index f33ebe600..08ffb21ff 100644 --- a/src/resources/extensions/sf/tests/reconciliation-edge-cases.test.ts +++ b/src/resources/extensions/sf/tests/reconciliation-edge-cases.test.ts @@ -141,8 +141,8 @@ describe("reconciliation-edge-cases", () => { // appendEvent — filesystem creation test("appendEvent creates event log if directory does not exist", () => { const base = tempDir(); - // Remove the .gsd directory if it somehow exists — appendEvent should create it. - const sfDir = path.join(base, ".gsd"); + // Remove the .sf directory if it somehow exists — appendEvent should create it. + const sfDir = path.join(base, ".sf"); if (fs.existsSync(sfDir)) fs.rmSync(sfDir, { recursive: true, force: true }); appendEvent(base, { @@ -152,7 +152,7 @@ describe("reconciliation-edge-cases", () => { actor: "agent", }); - const logPath = path.join(base, ".gsd", "event-log.jsonl"); + const logPath = path.join(base, ".sf", "event-log.jsonl"); assert.ok(fs.existsSync(logPath), "event-log.jsonl should be created by appendEvent"); const events = readEvents(logPath); diff --git a/src/resources/extensions/sf/tests/recovery-attempts-reset.test.ts b/src/resources/extensions/sf/tests/recovery-attempts-reset.test.ts index c5912133d..9f00e73fd 100644 --- a/src/resources/extensions/sf/tests/recovery-attempts-reset.test.ts +++ b/src/resources/extensions/sf/tests/recovery-attempts-reset.test.ts @@ -26,7 +26,7 @@ const { assertEq, assertTrue, report } = createTestContext(); // ═══ Setup ════════════════════════════════════════════════════════════════════ const base = mkdtempSync(join(tmpdir(), "sf-recovery-reset-test-")); -mkdirSync(join(base, ".gsd", "runtime", "units"), { recursive: true }); +mkdirSync(join(base, ".sf", "runtime", "units"), { recursive: true }); try { // ═══ #2322: recoveryAttempts should reset on re-dispatch ═══════════════════ diff --git a/src/resources/extensions/sf/tests/regex-hardening.test.ts b/src/resources/extensions/sf/tests/regex-hardening.test.ts index 386652820..17801680b 100644 --- a/src/resources/extensions/sf/tests/regex-hardening.test.ts +++ b/src/resources/extensions/sf/tests/regex-hardening.test.ts @@ -173,11 +173,11 @@ async function main(): Promise<void> { // Classic format matches assertTrue(CONTEXT_RE.test('M001-CONTEXT.md'), 'context matches M001-CONTEXT.md'); - assertTrue(CONTEXT_RE.test('.gsd/milestones/M001/M001-CONTEXT.md'), 'context matches full path classic format'); + assertTrue(CONTEXT_RE.test('.sf/milestones/M001/M001-CONTEXT.md'), 'context matches full path classic format'); // Unique format matches assertTrue(CONTEXT_RE.test('M001-abc123-CONTEXT.md'), 'context matches M001-abc123-CONTEXT.md'); - assertTrue(CONTEXT_RE.test('.gsd/milestones/M001-abc123/M001-abc123-CONTEXT.md'), 'context matches full path unique format'); + assertTrue(CONTEXT_RE.test('.sf/milestones/M001-abc123/M001-abc123-CONTEXT.md'), 'context matches full path unique format'); // Rejects assertTrue(!CONTEXT_RE.test('M001-ROADMAP.md'), 'context rejects M001-ROADMAP.md'); diff --git a/src/resources/extensions/sf/tests/register-shortcuts.test.ts b/src/resources/extensions/sf/tests/register-shortcuts.test.ts index 758b41aa7..49912c660 100644 --- a/src/resources/extensions/sf/tests/register-shortcuts.test.ts +++ b/src/resources/extensions/sf/tests/register-shortcuts.test.ts @@ -25,8 +25,8 @@ function cleanup(dir: string): void { test("dashboard shortcut resolves the project root instead of the current worktree path", async (t) => { const projectRoot = makeTempDir("project"); - const worktreeRoot = join(projectRoot, ".gsd", "worktrees", "M001"); - mkdirSync(join(projectRoot, ".gsd"), { recursive: true }); + const worktreeRoot = join(projectRoot, ".sf", "worktrees", "M001"); + mkdirSync(join(projectRoot, ".sf"), { recursive: true }); mkdirSync(worktreeRoot, { recursive: true }); const originalCwd = process.cwd(); @@ -68,7 +68,7 @@ test("dashboard shortcut resolves the project root instead of the current worktr }); assert.ok(customCalls > 0, "shortcut opens the dashboard overlay when project root is resolved"); - assert.equal(notices.length, 0, "shortcut does not fall back to the missing-.gsd warning"); + assert.equal(notices.length, 0, "shortcut does not fall back to the missing-.sf warning"); assert.equal(shortcuts.length, 5, "all SF shortcuts are still registered"); const keys = shortcuts.map((shortcut) => shortcut.key); assert.ok(keys.includes("ctrl+alt+g"), "primary dashboard shortcut is registered"); @@ -82,8 +82,8 @@ test("dashboard shortcut resolves the project root instead of the current worktr test("parallel shortcut passes resolved project root into overlay", async (t) => { const base = makeTempDir("parallel-root"); - const worktreeRoot = join(base, ".gsd", "worktrees", "M001"); - mkdirSync(join(base, ".gsd", "parallel"), { recursive: true }); + const worktreeRoot = join(base, ".sf", "worktrees", "M001"); + mkdirSync(join(base, ".sf", "parallel"), { recursive: true }); mkdirSync(worktreeRoot, { recursive: true }); const originalCwd = process.cwd(); diff --git a/src/resources/extensions/sf/tests/remediation-completion-guard.test.ts b/src/resources/extensions/sf/tests/remediation-completion-guard.test.ts index 87a3b2ce8..ccf443f86 100644 --- a/src/resources/extensions/sf/tests/remediation-completion-guard.test.ts +++ b/src/resources/extensions/sf/tests/remediation-completion-guard.test.ts @@ -23,12 +23,12 @@ test("completing-milestone dispatch rule exists", () => { test("completing-milestone blocks when VALIDATION verdict is needs-remediation (#2675)", async () => { const base = mkdtempSync(join(tmpdir(), "sf-remediation-")); - mkdirSync(join(base, ".gsd", "milestones", "M001"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M001"), { recursive: true }); try { // Write a VALIDATION file with needs-remediation verdict writeFileSync( - join(base, ".gsd", "milestones", "M001", "M001-VALIDATION.md"), + join(base, ".sf", "milestones", "M001", "M001-VALIDATION.md"), [ "---", "verdict: needs-remediation", @@ -68,12 +68,12 @@ test("completing-milestone blocks when VALIDATION verdict is needs-remediation ( test("completing-milestone proceeds normally when VALIDATION verdict is pass (#2675 guard)", async () => { const base = mkdtempSync(join(tmpdir(), "sf-remediation-")); - mkdirSync(join(base, ".gsd", "milestones", "M001"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M001"), { recursive: true }); try { // Write a VALIDATION file with pass verdict writeFileSync( - join(base, ".gsd", "milestones", "M001", "M001-VALIDATION.md"), + join(base, ".sf", "milestones", "M001", "M001-VALIDATION.md"), [ "---", "verdict: pass", diff --git a/src/resources/extensions/sf/tests/remote-status.test.ts b/src/resources/extensions/sf/tests/remote-status.test.ts index 07cf0c73a..f1b27f814 100644 --- a/src/resources/extensions/sf/tests/remote-status.test.ts +++ b/src/resources/extensions/sf/tests/remote-status.test.ts @@ -11,7 +11,7 @@ function withTempHome(fn: (tempHome: string) => void | Promise<void>) { const savedHome = process.env.HOME; const savedUserProfile = process.env.USERPROFILE; const tempHome = join(tmpdir(), `sf-remote-status-${Date.now()}-${Math.random().toString(36).slice(2)}`); - mkdirSync(join(tempHome, ".gsd", "runtime", "remote-questions"), { recursive: true }); + mkdirSync(join(tempHome, ".sf", "runtime", "remote-questions"), { recursive: true }); process.env.HOME = tempHome; process.env.USERPROFILE = tempHome; try { diff --git a/src/resources/extensions/sf/tests/reopen-slice.test.ts b/src/resources/extensions/sf/tests/reopen-slice.test.ts index 05dd67e7a..316e93d69 100644 --- a/src/resources/extensions/sf/tests/reopen-slice.test.ts +++ b/src/resources/extensions/sf/tests/reopen-slice.test.ts @@ -20,7 +20,7 @@ import { handleReopenSlice } from '../tools/reopen-slice.ts'; function makeTmpBase(): string { const base = mkdtempSync(join(tmpdir(), 'sf-reopen-slice-')); - mkdirSync(join(base, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'tasks'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones', 'M001', 'slices', 'S01', 'tasks'), { recursive: true }); return base; } @@ -40,7 +40,7 @@ function seedCompleteSlice(): void { test('handleReopenSlice: resets a complete slice to in_progress and all tasks to pending', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { seedCompleteSlice(); @@ -68,7 +68,7 @@ test('handleReopenSlice: resets a complete slice to in_progress and all tasks to test('handleReopenSlice: works with a single task', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { insertMilestone({ id: 'M001', title: 'Test', status: 'active' }); insertSlice({ id: 'S01', milestoneId: 'M001', status: 'complete' }); @@ -87,7 +87,7 @@ test('handleReopenSlice: works with a single task', async () => { test('handleReopenSlice: rejects empty sliceId', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { const result = await handleReopenSlice({ milestoneId: 'M001', sliceId: '' }, base); assert.ok('error' in result); @@ -99,7 +99,7 @@ test('handleReopenSlice: rejects empty sliceId', async () => { test('handleReopenSlice: rejects non-existent milestone', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { const result = await handleReopenSlice({ milestoneId: 'M999', sliceId: 'S01' }, base); assert.ok('error' in result); @@ -111,7 +111,7 @@ test('handleReopenSlice: rejects non-existent milestone', async () => { test('handleReopenSlice: rejects slice in a closed milestone', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { insertMilestone({ id: 'M001', title: 'Done', status: 'complete' }); insertSlice({ id: 'S01', milestoneId: 'M001', status: 'complete' }); @@ -127,7 +127,7 @@ test('handleReopenSlice: rejects slice in a closed milestone', async () => { test('handleReopenSlice: rejects reopening a slice that is not complete', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { insertMilestone({ id: 'M001', title: 'Active', status: 'active' }); insertSlice({ id: 'S01', milestoneId: 'M001', status: 'in_progress' }); @@ -142,7 +142,7 @@ test('handleReopenSlice: rejects reopening a slice that is not complete', async test('handleReopenSlice: rejects non-existent slice', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { insertMilestone({ id: 'M001', title: 'Active', status: 'active' }); diff --git a/src/resources/extensions/sf/tests/reopen-task.test.ts b/src/resources/extensions/sf/tests/reopen-task.test.ts index 169bda5a6..cb0588146 100644 --- a/src/resources/extensions/sf/tests/reopen-task.test.ts +++ b/src/resources/extensions/sf/tests/reopen-task.test.ts @@ -19,7 +19,7 @@ import { handleReopenTask } from '../tools/reopen-task.ts'; function makeTmpBase(): string { const base = mkdtempSync(join(tmpdir(), 'sf-reopen-task-')); - mkdirSync(join(base, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'tasks'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones', 'M001', 'slices', 'S01', 'tasks'), { recursive: true }); return base; } @@ -39,7 +39,7 @@ function seedCompleteTask(): void { test('handleReopenTask: resets a complete task to pending', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { seedCompleteTask(); @@ -63,7 +63,7 @@ test('handleReopenTask: resets a complete task to pending', async () => { test('handleReopenTask: does not affect other tasks in the slice', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { seedCompleteTask(); @@ -81,7 +81,7 @@ test('handleReopenTask: does not affect other tasks in the slice', async () => { test('handleReopenTask: rejects empty taskId', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { const result = await handleReopenTask({ milestoneId: 'M001', sliceId: 'S01', taskId: '' }, base); assert.ok('error' in result); @@ -93,7 +93,7 @@ test('handleReopenTask: rejects empty taskId', async () => { test('handleReopenTask: rejects non-existent milestone', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { const result = await handleReopenTask({ milestoneId: 'M999', sliceId: 'S01', taskId: 'T01' }, base); assert.ok('error' in result); @@ -105,7 +105,7 @@ test('handleReopenTask: rejects non-existent milestone', async () => { test('handleReopenTask: rejects task in a closed milestone', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { insertMilestone({ id: 'M001', title: 'Done', status: 'complete' }); insertSlice({ id: 'S01', milestoneId: 'M001', status: 'complete' }); @@ -121,7 +121,7 @@ test('handleReopenTask: rejects task in a closed milestone', async () => { test('handleReopenTask: rejects task inside a closed slice', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { insertMilestone({ id: 'M001', title: 'Active', status: 'active' }); insertSlice({ id: 'S01', milestoneId: 'M001', status: 'complete' }); @@ -137,7 +137,7 @@ test('handleReopenTask: rejects task inside a closed slice', async () => { test('handleReopenTask: rejects reopening a task that is not complete', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { seedCompleteTask(); @@ -151,7 +151,7 @@ test('handleReopenTask: rejects reopening a task that is not complete', async () test('handleReopenTask: rejects non-existent task', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { insertMilestone({ id: 'M001', title: 'Active', status: 'active' }); insertSlice({ id: 'S01', milestoneId: 'M001', status: 'in_progress' }); diff --git a/src/resources/extensions/sf/tests/replan-handler.test.ts b/src/resources/extensions/sf/tests/replan-handler.test.ts index 2c7d20259..a24f2583a 100644 --- a/src/resources/extensions/sf/tests/replan-handler.test.ts +++ b/src/resources/extensions/sf/tests/replan-handler.test.ts @@ -21,7 +21,7 @@ import { parsePlan } from '../parsers-legacy.ts'; function makeTmpBase(): string { const base = mkdtempSync(join(tmpdir(), 'sf-replan-')); - mkdirSync(join(base, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'tasks'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones', 'M001', 'slices', 'S01', 'tasks'), { recursive: true }); return base; } @@ -98,7 +98,7 @@ function validReplanParams() { test('handleReplanSlice rejects invalid payloads (missing milestoneId)', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { seedSliceWithTasks(); @@ -113,7 +113,7 @@ test('handleReplanSlice rejects invalid payloads (missing milestoneId)', async ( test('handleReplanSlice rejects structural violation: updating a completed task', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { seedSliceWithTasks({ t01Status: 'complete', t02Status: 'pending' }); @@ -145,7 +145,7 @@ test('handleReplanSlice rejects structural violation: updating a completed task' test('handleReplanSlice rejects structural violation: removing a completed task', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { seedSliceWithTasks({ t01Status: 'complete', t02Status: 'pending' }); @@ -166,7 +166,7 @@ test('handleReplanSlice rejects structural violation: removing a completed task' test('handleReplanSlice succeeds when modifying only incomplete tasks', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { seedSliceWithTasks({ t01Status: 'complete', t02Status: 'pending', t03Status: 'pending' }); @@ -230,11 +230,11 @@ test('handleReplanSlice succeeds when modifying only incomplete tasks', async () assert.equal(t01?.status, 'complete'); // Verify rendered PLAN.md exists on disk - const planPath = join(base, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'S01-PLAN.md'); + const planPath = join(base, '.sf', 'milestones', 'M001', 'slices', 'S01', 'S01-PLAN.md'); assert.ok(existsSync(planPath), 'PLAN.md should be rendered to disk'); // Verify REPLAN.md exists on disk - const replanPath = join(base, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'S01-REPLAN.md'); + const replanPath = join(base, '.sf', 'milestones', 'M001', 'slices', 'S01', 'S01-REPLAN.md'); assert.ok(existsSync(replanPath), 'REPLAN.md should be rendered to disk'); const replanContent = readFileSync(replanPath, 'utf-8'); assert.ok(replanContent.includes('Blocker Description'), 'REPLAN.md should contain blocker section'); @@ -246,7 +246,7 @@ test('handleReplanSlice succeeds when modifying only incomplete tasks', async () test('handleReplanSlice cache invalidation: re-parsing PLAN.md reflects mutations', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { seedSliceWithTasks({ t01Status: 'complete', t02Status: 'pending', t03Status: 'pending' }); @@ -272,7 +272,7 @@ test('handleReplanSlice cache invalidation: re-parsing PLAN.md reflects mutation assert.ok(!('error' in result), `unexpected error: ${'error' in result ? result.error : ''}`); // Re-parse PLAN.md from disk to verify cache invalidation worked - const planPath = join(base, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'S01-PLAN.md'); + const planPath = join(base, '.sf', 'milestones', 'M001', 'slices', 'S01', 'S01-PLAN.md'); const content = readFileSync(planPath, 'utf-8'); const parsed = parsePlan(content); @@ -295,7 +295,7 @@ test('handleReplanSlice cache invalidation: re-parsing PLAN.md reflects mutation test('handleReplanSlice is idempotent: calling twice with same params succeeds', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { seedSliceWithTasks({ t01Status: 'complete', t02Status: 'pending', t03Status: 'pending' }); @@ -333,7 +333,7 @@ test('handleReplanSlice is idempotent: calling twice with same params succeeds', test('handleReplanSlice returns missing parent slice error', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { insertMilestone({ id: 'M001', title: 'Milestone', status: 'active' }); @@ -349,7 +349,7 @@ test('handleReplanSlice returns missing parent slice error', async () => { test('handleReplanSlice rejects task with status "done" (alias for complete)', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { seedSliceWithTasks({ t01Status: 'done', t02Status: 'pending' }); @@ -381,7 +381,7 @@ test('handleReplanSlice rejects task with status "done" (alias for complete)', a test('handleReplanSlice returns structured error payloads with actionable messages', async () => { const base = makeTmpBase(); - openDatabase(join(base, '.gsd', 'sf.db')); + openDatabase(join(base, '.sf', 'sf.db')); try { seedSliceWithTasks({ t01Status: 'complete', t02Status: 'complete', t03Status: 'pending' }); diff --git a/src/resources/extensions/sf/tests/replan-slice.test.ts b/src/resources/extensions/sf/tests/replan-slice.test.ts index 31bffa1e7..4299927bd 100644 --- a/src/resources/extensions/sf/tests/replan-slice.test.ts +++ b/src/resources/extensions/sf/tests/replan-slice.test.ts @@ -28,37 +28,37 @@ function loadPromptFromWorktree(name: string, vars: Record<string, string> = {}) function createFixtureBase(): string { const base = mkdtempSync(join(tmpdir(), 'sf-replan-test-')); - mkdirSync(join(base, '.gsd', 'milestones'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones'), { recursive: true }); return base; } function writeRoadmap(base: string, mid: string, content: string): void { - const dir = join(base, '.gsd', 'milestones', mid); + const dir = join(base, '.sf', 'milestones', mid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${mid}-ROADMAP.md`), content); } function writePlan(base: string, mid: string, sid: string, content: string): void { - const dir = join(base, '.gsd', 'milestones', mid, 'slices', sid); + const dir = join(base, '.sf', 'milestones', mid, 'slices', sid); mkdirSync(join(dir, 'tasks'), { recursive: true }); writeFileSync(join(dir, "tasks", "T01-PLAN.md"), "# T01 Plan\n"); writeFileSync(join(dir, `${sid}-PLAN.md`), content); } function writeTaskSummary(base: string, mid: string, sid: string, tid: string, content: string): void { - const dir = join(base, '.gsd', 'milestones', mid, 'slices', sid, 'tasks'); + const dir = join(base, '.sf', 'milestones', mid, 'slices', sid, 'tasks'); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${tid}-SUMMARY.md`), content); } function writeReplanFile(base: string, mid: string, sid: string, content: string): void { - const dir = join(base, '.gsd', 'milestones', mid, 'slices', sid); + const dir = join(base, '.sf', 'milestones', mid, 'slices', sid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${sid}-REPLAN.md`), content); } function writeReplanTrigger(base: string, mid: string, sid: string, content: string): void { - const dir = join(base, '.gsd', 'milestones', mid, 'slices', sid); + const dir = join(base, '.sf', 'milestones', mid, 'slices', sid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${sid}-REPLAN-TRIGGER.md`), content); } @@ -372,15 +372,15 @@ console.log('\n=== prompt: replan-slice template loads and substitutes variables milestoneId: 'M001', sliceId: 'S01', sliceTitle: 'Test Slice', - slicePath: '.gsd/milestones/M001/slices/S01', - planPath: '.gsd/milestones/M001/slices/S01/S01-PLAN.md', + slicePath: '.sf/milestones/M001/slices/S01', + planPath: '.sf/milestones/M001/slices/S01/S01-PLAN.md', inlinedContext: '## Inlined Context\n\nTest context here.', }); assert.ok(prompt.includes('M001'), 'prompt contains milestoneId'); assert.ok(prompt.includes('S01'), 'prompt contains sliceId'); assert.ok(prompt.includes('Test Slice'), 'prompt contains sliceTitle'); - assert.ok(prompt.includes('.gsd/milestones/M001/slices/S01/S01-PLAN.md'), 'prompt contains planPath'); + assert.ok(prompt.includes('.sf/milestones/M001/slices/S01/S01-PLAN.md'), 'prompt contains planPath'); assert.ok(prompt.includes('Test context here'), 'prompt contains inlined context'); } @@ -391,10 +391,10 @@ console.log('\n=== prompt: replan-slice contains preserve-completed-tasks instru milestoneId: 'M001', sliceId: 'S01', sliceTitle: 'Test Slice', - slicePath: '.gsd/milestones/M001/slices/S01', - planPath: '.gsd/milestones/M001/slices/S01/S01-PLAN.md', + slicePath: '.sf/milestones/M001/slices/S01', + planPath: '.sf/milestones/M001/slices/S01/S01-PLAN.md', blockerTaskId: 'T01', - replanPath: '.gsd/milestones/M001/slices/S01/S01-REPLAN.md', + replanPath: '.sf/milestones/M001/slices/S01/S01-REPLAN.md', inlinedContext: '', }); @@ -438,8 +438,8 @@ console.log('\n=== display: replan-slice prompt template has correct unit header milestoneId: 'M001', sliceId: 'S01', sliceTitle: 'Test Slice', - slicePath: '.gsd/milestones/M001/slices/S01', - planPath: '.gsd/milestones/M001/slices/S01/S01-PLAN.md', + slicePath: '.sf/milestones/M001/slices/S01', + planPath: '.sf/milestones/M001/slices/S01/S01-PLAN.md', blockerTaskId: 'T01', inlinedContext: '', }); diff --git a/src/resources/extensions/sf/tests/repo-identity-worktree.test.ts b/src/resources/extensions/sf/tests/repo-identity-worktree.test.ts index 45300d863..07caa8287 100644 --- a/src/resources/extensions/sf/tests/repo-identity-worktree.test.ts +++ b/src/resources/extensions/sf/tests/repo-identity-worktree.test.ts @@ -40,7 +40,7 @@ describe('repo-identity-worktree', () => { run("git add README.md", base); run('git commit -m "chore: init"', base); - worktreePath = join(base, ".gsd", "worktrees", "M001"); + worktreePath = join(base, ".sf", "worktrees", "M001"); run(`git worktree add -b milestone/M001 ${worktreePath}`, base); expectedExternalState = externalGsdRoot(base); @@ -55,32 +55,32 @@ describe('repo-identity-worktree', () => { test('ensureGsdSymlink points worktree at main repo external state dir', () => { const mainState = ensureGsdSymlink(base); - assert.deepStrictEqual(mainState, realpathSync(join(base, ".gsd")), "ensureGsdSymlink(base) returns the current main repo .gsd target"); + assert.deepStrictEqual(mainState, realpathSync(join(base, ".sf")), "ensureGsdSymlink(base) returns the current main repo .sf target"); const worktreeState = ensureGsdSymlink(worktreePath); assert.deepStrictEqual(worktreeState, expectedExternalState, "worktree symlink target matches main repo external state dir"); - assert.ok(existsSync(join(worktreePath, ".gsd")), "worktree .gsd exists"); - assert.ok(lstatSync(join(worktreePath, ".gsd")).isSymbolicLink(), "worktree .gsd is a symlink"); - assert.deepStrictEqual(realpathSync(join(worktreePath, ".gsd")), realpathSync(expectedExternalState), "worktree .gsd symlink resolves to main repo external state dir"); + assert.ok(existsSync(join(worktreePath, ".sf")), "worktree .sf exists"); + assert.ok(lstatSync(join(worktreePath, ".sf")).isSymbolicLink(), "worktree .sf is a symlink"); + assert.deepStrictEqual(realpathSync(join(worktreePath, ".sf")), realpathSync(expectedExternalState), "worktree .sf symlink resolves to main repo external state dir"); }); test('ensureGsdSymlink heals stale worktree symlinks', () => { const staleState = join(stateDir, "projects", "stale-worktree-state"); mkdirSync(staleState, { recursive: true }); - rmSync(join(worktreePath, ".gsd"), { recursive: true, force: true }); - symlinkSync(staleState, join(worktreePath, ".gsd"), "junction"); + rmSync(join(worktreePath, ".sf"), { recursive: true, force: true }); + symlinkSync(staleState, join(worktreePath, ".sf"), "junction"); const healedState = ensureGsdSymlink(worktreePath); assert.deepStrictEqual(healedState, expectedExternalState, "stale worktree symlink is repaired to canonical external state dir"); - assert.deepStrictEqual(realpathSync(join(worktreePath, ".gsd")), realpathSync(expectedExternalState), "healed worktree symlink resolves to canonical external state dir"); + assert.deepStrictEqual(realpathSync(join(worktreePath, ".sf")), realpathSync(expectedExternalState), "healed worktree symlink resolves to canonical external state dir"); }); -test('ensureGsdSymlink preserves worktree .gsd directories', () => { - rmSync(join(worktreePath, ".gsd"), { recursive: true, force: true }); - mkdirSync(join(worktreePath, ".gsd", "milestones"), { recursive: true }); - writeFileSync(join(worktreePath, ".gsd", "milestones", "stale.txt"), "stale\n", "utf-8"); +test('ensureGsdSymlink preserves worktree .sf directories', () => { + rmSync(join(worktreePath, ".sf"), { recursive: true, force: true }); + mkdirSync(join(worktreePath, ".sf", "milestones"), { recursive: true }); + writeFileSync(join(worktreePath, ".sf", "milestones", "stale.txt"), "stale\n", "utf-8"); const preservedDirState = ensureGsdSymlink(worktreePath); - assert.deepStrictEqual(preservedDirState, join(worktreePath, ".gsd"), "worktree .gsd directory is left in place for sync-based refresh"); - assert.ok(lstatSync(join(worktreePath, ".gsd")).isDirectory(), "worktree .gsd directory remains a directory"); - assert.ok(existsSync(join(worktreePath, ".gsd", "milestones", "stale.txt")), "existing worktree .gsd directory contents remain available for sync logic"); + assert.deepStrictEqual(preservedDirState, join(worktreePath, ".sf"), "worktree .sf directory is left in place for sync-based refresh"); + assert.ok(lstatSync(join(worktreePath, ".sf")).isDirectory(), "worktree .sf directory remains a directory"); + assert.ok(existsSync(join(worktreePath, ".sf", "milestones", "stale.txt")), "existing worktree .sf directory contents remain available for sync logic"); }); test('SF_PROJECT_ID overrides computed repo hash', () => { @@ -132,7 +132,7 @@ test('ensureGsdSymlink refreshes repo-meta gitRoot after repo move with fixed pr delete process.env.SF_PROJECT_ID; }); -test('isInheritedRepo detects subdirectory of parent repo without .gsd (#1639)', () => { +test('isInheritedRepo detects subdirectory of parent repo without .sf (#1639)', () => { const parentRepo = realpathSync(mkdtempSync(join(tmpdir(), "sf-inherited-parent-"))); run("git init -b main", parentRepo); run('git config user.name "Pi Test"', parentRepo); @@ -143,10 +143,10 @@ test('isInheritedRepo detects subdirectory of parent repo without .gsd (#1639)', const subdir = join(parentRepo, "newproject"); mkdirSync(subdir, { recursive: true }); - assert.ok(isInheritedRepo(subdir), "subdirectory of parent repo without .gsd is inherited"); + assert.ok(isInheritedRepo(subdir), "subdirectory of parent repo without .sf is inherited"); - mkdirSync(join(parentRepo, ".gsd"), { recursive: true }); - assert.ok(!isInheritedRepo(subdir), "subdirectory of parent repo WITH .gsd is NOT inherited"); + mkdirSync(join(parentRepo, ".sf"), { recursive: true }); + assert.ok(!isInheritedRepo(subdir), "subdirectory of parent repo WITH .sf is NOT inherited"); assert.ok(!isInheritedRepo(parentRepo), "git root is not inherited"); @@ -184,7 +184,7 @@ test('subdirectory of parent repo gets unique identity after git init (#1639)', rmSync(parentRepo, { recursive: true, force: true }); }); -test('ensureGsdSymlink from subdirectory does not create .gsd in subdir when git-root .gsd exists (#2380)', () => { +test('ensureGsdSymlink from subdirectory does not create .sf in subdir when git-root .sf exists (#2380)', () => { const repo = realpathSync(mkdtempSync(join(tmpdir(), "sf-subdir-symlink-"))); run("git init -b main", repo); run('git config user.name "Pi Test"', repo); @@ -194,24 +194,24 @@ test('ensureGsdSymlink from subdirectory does not create .gsd in subdir when git run("git add README.md", repo); run('git commit -m "init"', repo); - // Set up .gsd symlink at the git root (normal project initialisation) + // Set up .sf symlink at the git root (normal project initialisation) ensureGsdSymlink(repo); - assert.ok(existsSync(join(repo, ".gsd")), "root .gsd exists after ensureGsdSymlink"); - assert.ok(lstatSync(join(repo, ".gsd")).isSymbolicLink(), "root .gsd is a symlink"); + assert.ok(existsSync(join(repo, ".sf")), "root .sf exists after ensureGsdSymlink"); + assert.ok(lstatSync(join(repo, ".sf")).isSymbolicLink(), "root .sf is a symlink"); // Create a subdirectory and call ensureGsdSymlink from there const subdir = join(repo, "src", "lib"); mkdirSync(subdir, { recursive: true }); ensureGsdSymlink(subdir); - // ensureGsdSymlink should NOT create a .gsd in the subdirectory - // because the git root already has a valid .gsd symlink. - assert.ok(!existsSync(join(subdir, ".gsd")), "no .gsd created in subdirectory when git-root .gsd exists (#2380)"); - assert.ok(!existsSync(join(repo, "src", ".gsd")), "no .gsd created in intermediate directory"); + // ensureGsdSymlink should NOT create a .sf in the subdirectory + // because the git root already has a valid .sf symlink. + assert.ok(!existsSync(join(subdir, ".sf")), "no .sf created in subdirectory when git-root .sf exists (#2380)"); + assert.ok(!existsSync(join(repo, "src", ".sf")), "no .sf created in intermediate directory"); - // The root .gsd should still be intact - assert.ok(existsSync(join(repo, ".gsd")), "root .gsd still exists"); - assert.ok(lstatSync(join(repo, ".gsd")).isSymbolicLink(), "root .gsd is still a symlink"); + // The root .sf should still be intact + assert.ok(existsSync(join(repo, ".sf")), "root .sf still exists"); + assert.ok(lstatSync(join(repo, ".sf")).isSymbolicLink(), "root .sf is still a symlink"); rmSync(repo, { recursive: true, force: true }); }); diff --git a/src/resources/extensions/sf/tests/requirements.test.ts b/src/resources/extensions/sf/tests/requirements.test.ts index 5ffec7fb4..58e2c0090 100644 --- a/src/resources/extensions/sf/tests/requirements.test.ts +++ b/src/resources/extensions/sf/tests/requirements.test.ts @@ -42,7 +42,7 @@ describe('requirements', () => { }); const base = mkdtempSync(join(tmpdir(), "sf-requirements-test-")); - const sf = join(base, ".gsd"); + const sf = join(base, ".sf"); const mDir = join(sf, "milestones", "M001"); const sDir = join(mDir, "slices", "S01"); const tDir = join(sDir, "tasks"); diff --git a/src/resources/extensions/sf/tests/resource-loader-import-path.test.ts b/src/resources/extensions/sf/tests/resource-loader-import-path.test.ts index 19cf77643..cbd59657a 100644 --- a/src/resources/extensions/sf/tests/resource-loader-import-path.test.ts +++ b/src/resources/extensions/sf/tests/resource-loader-import-path.test.ts @@ -1,6 +1,6 @@ // SF2 — Regression test for broken resource-loader import path // Ensures auto.ts imports resource-loader via package resolution, not a -// relative path that breaks when deployed to ~/.gsd/agent/extensions/sf/. +// relative path that breaks when deployed to ~/.sf/agent/extensions/sf/. import { describe, test } from "node:test"; import assert from "node:assert/strict"; @@ -12,13 +12,13 @@ const autoSrc = readFileSync(join(import.meta.dirname, "..", "auto.ts"), "utf-8" describe("resource-loader import path", () => { test("must not use relative import reaching above extensions/", () => { // The old broken pattern: import("../../../" + "resource-loader.js") - // This resolves to ~/.gsd/resource-loader.js from deployed location, which + // This resolves to ~/.sf/resource-loader.js from deployed location, which // doesn't exist. Regression introduced in #3899. const brokenPattern = /import\(\s*["']\.\.\/\.\.\/\.\..*resource-loader/; assert.ok( !brokenPattern.test(autoSrc), "auto.ts must not import resource-loader via relative path above extensions/ — " + - "breaks when deployed to ~/.gsd/agent/extensions/sf/ (see #3899)", + "breaks when deployed to ~/.sf/agent/extensions/sf/ (see #3899)", ); }); diff --git a/src/resources/extensions/sf/tests/retry-diagnostic-reasoning.test.ts b/src/resources/extensions/sf/tests/retry-diagnostic-reasoning.test.ts index e163bf4f2..e0b4ba419 100644 --- a/src/resources/extensions/sf/tests/retry-diagnostic-reasoning.test.ts +++ b/src/resources/extensions/sf/tests/retry-diagnostic-reasoning.test.ts @@ -82,7 +82,7 @@ describe("retry diagnostic excludes lastReasoning (#2195)", () => { test("getDeepDiagnostic output does NOT contain lastReasoning", () => { // Create a temporary activity directory with a JSONL file const tempBase = mkdtempSync(join(tmpdir(), "sf-diag-test-")); - const sfDir = join(tempBase, ".gsd"); + const sfDir = join(tempBase, ".sf"); const activityDir = join(sfDir, "activity"); mkdirSync(activityDir, { recursive: true }); @@ -123,7 +123,7 @@ describe("retry diagnostic excludes lastReasoning (#2195)", () => { test("getDeepDiagnostic still includes errors and file operations", () => { const tempBase = mkdtempSync(join(tmpdir(), "sf-diag-test-")); - const sfDir = join(tempBase, ".gsd"); + const sfDir = join(tempBase, ".sf"); const activityDir = join(sfDir, "activity"); mkdirSync(activityDir, { recursive: true }); diff --git a/src/resources/extensions/sf/tests/retry-state-reset.test.ts b/src/resources/extensions/sf/tests/retry-state-reset.test.ts index 0d4d211c7..40310ae19 100644 --- a/src/resources/extensions/sf/tests/retry-state-reset.test.ts +++ b/src/resources/extensions/sf/tests/retry-state-reset.test.ts @@ -23,12 +23,12 @@ import { parseUnitId } from "../unit-id.ts"; function createRetryFixture(): { base: string; cleanup: () => void } { const base = mkdtempSync(join(tmpdir(), "sf-retry-reset-")); - // Create the .gsd structure for M001/S01/T01 - const milestonesTasksDir = join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks"); + // Create the .sf structure for M001/S01/T01 + const milestonesTasksDir = join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks"); mkdirSync(milestonesTasksDir, { recursive: true }); // Write a PLAN.md with T01 checked [x] (as doctor would do) - const planFile = join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-PLAN.md"); + const planFile = join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-PLAN.md"); writeFileSync(planFile, [ "# S01: Test Slice", "", @@ -46,7 +46,7 @@ function createRetryFixture(): { base: string; cleanup: () => void } { // Write completed-units.json with T01 writeFileSync( - join(base, ".gsd", "completed-units.json"), + join(base, ".sf", "completed-units.json"), JSON.stringify(["execute-task/M001/S01/T01"]), "utf-8", ); @@ -79,7 +79,7 @@ test('consumeRetryTrigger: returns null when no retry pending', () => { test('Retry reset step 1: uncheck [x] → [ ] in PLAN.md', () => { const { base, cleanup } = createRetryFixture(); try { - const planFile = join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-PLAN.md"); + const planFile = join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-PLAN.md"); // Precondition: T01 is checked const before = readFileSync(planFile, "utf-8"); @@ -107,7 +107,7 @@ test('Retry reset step 1: uncheck [x] → [ ] in PLAN.md', () => { test('Retry reset step 2: delete SUMMARY.md', () => { const { base, cleanup } = createRetryFixture(); try { - const summaryFile = join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks", "T01-SUMMARY.md"); + const summaryFile = join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks", "T01-SUMMARY.md"); // Precondition: SUMMARY exists assert.ok(existsSync(summaryFile), "precondition: SUMMARY.md exists"); @@ -141,7 +141,7 @@ test('Retry reset step 3: remove from completedUnits', () => { assert.deepStrictEqual(filtered[0].id, "M001/S01/T02", "T02 still in completedUnits"); // Flush to completed-units.json - const completedKeysPath = join(base, ".gsd", "completed-units.json"); + const completedKeysPath = join(base, ".sf", "completed-units.json"); const keys = filtered.map(u => `${u.type}/${u.id}`); writeFileSync(completedKeysPath, JSON.stringify(keys, null, 2), "utf-8"); @@ -199,7 +199,7 @@ test('Full retry reset: all steps combined', () => { } // Step 2: Delete SUMMARY (in milestones path) - const tasksDir = join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks"); + const tasksDir = join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks"); const summaryFile = join(tasksDir, `${tid}-SUMMARY.md`); if (existsSync(summaryFile)) { unlinkSync(summaryFile); @@ -209,7 +209,7 @@ test('Full retry reset: all steps combined', () => { completedUnits = completedUnits.filter( u => !(u.type === trigger.unitType && u.id === trigger.unitId), ); - const completedKeysPath = join(base, ".gsd", "completed-units.json"); + const completedKeysPath = join(base, ".sf", "completed-units.json"); writeFileSync(completedKeysPath, JSON.stringify( completedUnits.map(u => `${u.type}/${u.id}`), null, 2, @@ -224,7 +224,7 @@ test('Full retry reset: all steps combined', () => { // ── Verify all state is reset ── // PLAN.md: T01 unchecked - const planFile = join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-PLAN.md"); + const planFile = join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-PLAN.md"); const planContent = readFileSync(planFile, "utf-8"); assert.ok(planContent.includes("- [ ] **T01:"), "after reset: T01 unchecked in PLAN"); assert.ok(!planContent.includes("- [x] **T01:"), "after reset: T01 not checked in PLAN"); @@ -250,9 +250,9 @@ test('Retry reset: idempotent when artifacts already missing', () => { const base = mkdtempSync(join(tmpdir(), "sf-retry-idempotent-")); try { // Create minimal structure — NO summary, NO retry artifact, NO plan - mkdirSync(join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks"), { recursive: true }); writeFileSync( - join(base, ".gsd", "completed-units.json"), + join(base, ".sf", "completed-units.json"), JSON.stringify([]), "utf-8", ); @@ -271,7 +271,7 @@ test('Retry reset: idempotent when artifacts already missing', () => { assert.ok(!uncheckResult, "uncheck returns false when no PLAN exists"); // Summary does not exist — no crash - const summaryFile = join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks", `${tid}-SUMMARY.md`); + const summaryFile = join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks", `${tid}-SUMMARY.md`); assert.ok(!existsSync(summaryFile), "no summary to delete — safe"); // Retry artifact does not exist — no crash @@ -297,7 +297,7 @@ test('resolveHookArtifactPath: correct path for retry artifacts', () => { const path = resolveHookArtifactPath(base, "M001/S01/T01", "NEEDS-REWORK.md"); assert.deepStrictEqual( path, - join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks", "T01-NEEDS-REWORK.md"), + join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks", "T01-NEEDS-REWORK.md"), "retry artifact path resolves to task directory with task prefix", ); }); diff --git a/src/resources/extensions/sf/tests/rewrite-count-persist.test.ts b/src/resources/extensions/sf/tests/rewrite-count-persist.test.ts index d664c8c99..e408f73fe 100644 --- a/src/resources/extensions/sf/tests/rewrite-count-persist.test.ts +++ b/src/resources/extensions/sf/tests/rewrite-count-persist.test.ts @@ -7,7 +7,7 @@ * dispatch rule to fire indefinitely, never tripping the MAX_REWRITE_ATTEMPTS * circuit breaker. * - * The fix persists the counter to `.gsd/runtime/rewrite-count.json`. + * The fix persists the counter to `.sf/runtime/rewrite-count.json`. */ import { describe, test, beforeEach, afterEach } from "node:test"; import assert from "node:assert/strict"; @@ -22,8 +22,8 @@ describe("rewrite-docs circuit breaker persistence (#2203)", () => { beforeEach(() => { tempBase = mkdtempSync(join(tmpdir(), "sf-rewrite-test-")); - // Create .gsd/ directory so sfRoot resolves to it - mkdirSync(join(tempBase, ".gsd", "runtime"), { recursive: true }); + // Create .sf/ directory so sfRoot resolves to it + mkdirSync(join(tempBase, ".sf", "runtime"), { recursive: true }); }); afterEach(() => { @@ -63,16 +63,16 @@ describe("rewrite-docs circuit breaker persistence (#2203)", () => { }); test("getRewriteCount handles corrupt JSON gracefully", () => { - const filePath = join(tempBase, ".gsd", "runtime", "rewrite-count.json"); + const filePath = join(tempBase, ".sf", "runtime", "rewrite-count.json"); // writeFileSync is imported at the top of this file writeFileSync(filePath, "not json{{{"); const count = getRewriteCount(tempBase); assert.equal(count, 0, "corrupt file should return 0"); }); - test("rewrite-count.json is written to .gsd/runtime/", () => { + test("rewrite-count.json is written to .sf/runtime/", () => { setRewriteCount(tempBase, 1); - const filePath = join(tempBase, ".gsd", "runtime", "rewrite-count.json"); + const filePath = join(tempBase, ".sf", "runtime", "rewrite-count.json"); assert.ok(existsSync(filePath), "rewrite-count.json should exist"); const content = JSON.parse(readFileSync(filePath, "utf-8")); diff --git a/src/resources/extensions/sf/tests/rogue-file-detection.test.ts b/src/resources/extensions/sf/tests/rogue-file-detection.test.ts index b4f37523e..5b5750f33 100644 --- a/src/resources/extensions/sf/tests/rogue-file-detection.test.ts +++ b/src/resources/extensions/sf/tests/rogue-file-detection.test.ts @@ -20,10 +20,10 @@ function createTmpBase(): string { } /** - * Create a minimal .gsd/ directory structure with a task summary file. + * Create a minimal .sf/ directory structure with a task summary file. */ function createTaskSummaryOnDisk(basePath: string, mid: string, sid: string, tid: string): string { - const tasksDir = join(basePath, ".gsd", "milestones", mid, "slices", sid, "tasks"); + const tasksDir = join(basePath, ".sf", "milestones", mid, "slices", sid, "tasks"); mkdirSync(tasksDir, { recursive: true }); const summaryFile = join(tasksDir, `${tid}-SUMMARY.md`); writeFileSync(summaryFile, `---\nid: ${tid}\nparent: ${sid}\nmilestone: ${mid}\n---\n# ${tid}: Test\n`, "utf-8"); @@ -31,10 +31,10 @@ function createTaskSummaryOnDisk(basePath: string, mid: string, sid: string, tid } /** - * Create a minimal .gsd/ directory structure with a slice summary file. + * Create a minimal .sf/ directory structure with a slice summary file. */ function createSliceSummaryOnDisk(basePath: string, mid: string, sid: string): string { - const sliceDir = join(basePath, ".gsd", "milestones", mid, "slices", sid); + const sliceDir = join(basePath, ".sf", "milestones", mid, "slices", sid); mkdirSync(sliceDir, { recursive: true }); const summaryFile = join(sliceDir, `${sid}-SUMMARY.md`); writeFileSync(summaryFile, `---\nid: ${sid}\nmilestone: ${mid}\n---\n# ${sid}: Test Slice\n`, "utf-8"); @@ -42,7 +42,7 @@ function createSliceSummaryOnDisk(basePath: string, mid: string, sid: string): s } function createRoadmapOnDisk(basePath: string, mid: string): string { - const milestoneDir = join(basePath, ".gsd", "milestones", mid); + const milestoneDir = join(basePath, ".sf", "milestones", mid); mkdirSync(milestoneDir, { recursive: true }); const roadmapFile = join(milestoneDir, `${mid}-ROADMAP.md`); writeFileSync(roadmapFile, `# ${mid}: Test Roadmap\n`, "utf-8"); @@ -50,7 +50,7 @@ function createRoadmapOnDisk(basePath: string, mid: string): string { } function createSlicePlanOnDisk(basePath: string, mid: string, sid: string): string { - const sliceDir = join(basePath, ".gsd", "milestones", mid, "slices", sid); + const sliceDir = join(basePath, ".sf", "milestones", mid, "slices", sid); mkdirSync(sliceDir, { recursive: true }); const planFile = join(sliceDir, `${sid}-PLAN.md`); writeFileSync(planFile, `# ${sid}: Test Plan\n`, "utf-8"); @@ -62,8 +62,8 @@ function createSlicePlanOnDisk(basePath: string, mid: string, sid: string): stri test("rogue detection: task summary on disk, no DB row → detected as rogue", () => { const basePath = createTmpBase(); - const dbPath = join(basePath, ".gsd", "sf.db"); - mkdirSync(join(basePath, ".gsd"), { recursive: true }); + const dbPath = join(basePath, ".sf", "sf.db"); + mkdirSync(join(basePath, ".sf"), { recursive: true }); try { openDatabase(dbPath); @@ -85,8 +85,8 @@ test("rogue detection: task summary on disk, no DB row → detected as rogue", ( test("rogue detection: task summary on disk, DB row with status 'complete' → NOT rogue", () => { const basePath = createTmpBase(); - const dbPath = join(basePath, ".gsd", "sf.db"); - mkdirSync(join(basePath, ".gsd"), { recursive: true }); + const dbPath = join(basePath, ".sf", "sf.db"); + mkdirSync(join(basePath, ".sf"), { recursive: true }); try { openDatabase(dbPath); @@ -117,8 +117,8 @@ test("rogue detection: task summary on disk, DB row with status 'complete' → N test("rogue detection: no summary file on disk → NOT rogue regardless of DB state", () => { const basePath = createTmpBase(); - const dbPath = join(basePath, ".gsd", "sf.db"); - mkdirSync(join(basePath, ".gsd"), { recursive: true }); + const dbPath = join(basePath, ".sf", "sf.db"); + mkdirSync(join(basePath, ".sf"), { recursive: true }); try { openDatabase(dbPath); @@ -151,8 +151,8 @@ test("rogue detection: DB not available → returns empty array (graceful degrad test("rogue detection: slice summary on disk, no DB row → auto-remediated (not rogue)", () => { const basePath = createTmpBase(); - const dbPath = join(basePath, ".gsd", "sf.db"); - mkdirSync(join(basePath, ".gsd"), { recursive: true }); + const dbPath = join(basePath, ".sf", "sf.db"); + mkdirSync(join(basePath, ".sf"), { recursive: true }); try { openDatabase(dbPath); @@ -172,8 +172,8 @@ test("rogue detection: slice summary on disk, no DB row → auto-remediated (not test("rogue detection: slice summary on disk, DB row with status 'complete' → NOT rogue", () => { const basePath = createTmpBase(); - const dbPath = join(basePath, ".gsd", "sf.db"); - mkdirSync(join(basePath, ".gsd"), { recursive: true }); + const dbPath = join(basePath, ".sf", "sf.db"); + mkdirSync(join(basePath, ".sf"), { recursive: true }); try { openDatabase(dbPath); @@ -202,8 +202,8 @@ test("rogue detection: slice summary on disk, DB row with status 'complete' → test("rogue detection: plan milestone roadmap on disk, no milestone planning row → detected as rogue", () => { const basePath = createTmpBase(); - const dbPath = join(basePath, ".gsd", "sf.db"); - mkdirSync(join(basePath, ".gsd"), { recursive: true }); + const dbPath = join(basePath, ".sf", "sf.db"); + mkdirSync(join(basePath, ".sf"), { recursive: true }); try { openDatabase(dbPath); @@ -224,8 +224,8 @@ test("rogue detection: plan milestone roadmap on disk, no milestone planning row test("rogue detection: plan milestone roadmap on disk, DB milestone planning row exists → NOT rogue", () => { const basePath = createTmpBase(); - const dbPath = join(basePath, ".gsd", "sf.db"); - mkdirSync(join(basePath, ".gsd"), { recursive: true }); + const dbPath = join(basePath, ".sf", "sf.db"); + mkdirSync(join(basePath, ".sf"), { recursive: true }); try { openDatabase(dbPath); @@ -248,8 +248,8 @@ test("rogue detection: plan milestone roadmap on disk, DB milestone planning row test("rogue detection: slice plan on disk, no slice planning row → detected as rogue", () => { const basePath = createTmpBase(); - const dbPath = join(basePath, ".gsd", "sf.db"); - mkdirSync(join(basePath, ".gsd"), { recursive: true }); + const dbPath = join(basePath, ".sf", "sf.db"); + mkdirSync(join(basePath, ".sf"), { recursive: true }); try { openDatabase(dbPath); @@ -270,8 +270,8 @@ test("rogue detection: slice plan on disk, no slice planning row → detected as test("rogue detection: slice plan on disk, DB slice planning row exists → NOT rogue", () => { const basePath = createTmpBase(); - const dbPath = join(basePath, ".gsd", "sf.db"); - mkdirSync(join(basePath, ".gsd"), { recursive: true }); + const dbPath = join(basePath, ".sf", "sf.db"); + mkdirSync(join(basePath, ".sf"), { recursive: true }); try { openDatabase(dbPath); diff --git a/src/resources/extensions/sf/tests/routing-history.test.ts b/src/resources/extensions/sf/tests/routing-history.test.ts index 1dc4e64c1..045f32a99 100644 --- a/src/resources/extensions/sf/tests/routing-history.test.ts +++ b/src/resources/extensions/sf/tests/routing-history.test.ts @@ -18,7 +18,7 @@ import { function makeTmpDir(): string { const dir = join(tmpdir(), `sf-routing-test-${Date.now()}-${Math.random().toString(36).slice(2)}`); - mkdirSync(join(dir, ".gsd"), { recursive: true }); + mkdirSync(join(dir, ".sf"), { recursive: true }); return dir; } diff --git a/src/resources/extensions/sf/tests/run-manager.test.ts b/src/resources/extensions/sf/tests/run-manager.test.ts index a86431547..8a4f98d46 100644 --- a/src/resources/extensions/sf/tests/run-manager.test.ts +++ b/src/resources/extensions/sf/tests/run-manager.test.ts @@ -45,7 +45,7 @@ function writeDefinition( name: string, content: string, ): void { - const defsDir = join(basePath, ".gsd", "workflow-defs"); + const defsDir = join(basePath, ".sf", "workflow-defs"); mkdirSync(defsDir, { recursive: true }); writeFileSync(join(defsDir, `${name}.yaml`), content, "utf-8"); } @@ -114,7 +114,7 @@ describe("createRun", () => { assert.ok(!existsSync(join(runDir, "PARAMS.json")), "PARAMS.json should not exist without overrides"); // Run directory path matches convention - assert.ok(runDir.includes(join(".gsd", "workflow-runs", "test-workflow")), "path should follow convention"); + assert.ok(runDir.includes(join(".sf", "workflow-runs", "test-workflow")), "path should follow convention"); }); it("writes PARAMS.json and substituted prompts when overrides provided", () => { @@ -215,7 +215,7 @@ describe("listRuns", () => { const run1 = createRun(base, "test-workflow"); // Ensure different timestamp by creating run dir manually with earlier timestamp - const earlyDir = join(base, ".gsd", "workflow-runs", "test-workflow", "2020-01-01T00-00-00"); + const earlyDir = join(base, ".sf", "workflow-runs", "test-workflow", "2020-01-01T00-00-00"); mkdirSync(earlyDir, { recursive: true }); // Copy GRAPH.yaml to make it a valid run const graphContent = readFileSync(join(run1, "GRAPH.yaml"), "utf-8"); diff --git a/src/resources/extensions/sf/tests/secure-env-collect.test.ts b/src/resources/extensions/sf/tests/secure-env-collect.test.ts index 3249880dc..9a3c20637 100644 --- a/src/resources/extensions/sf/tests/secure-env-collect.test.ts +++ b/src/resources/extensions/sf/tests/secure-env-collect.test.ts @@ -230,7 +230,7 @@ function makeManifest(entries: Array<{ key: string; status?: string; formatHint? async function writeManifestFile(dir: string, manifest: any): Promise<string> { const { formatSecretsManifest } = await loadFilesExports(); - const milestoneDir = join(dir, ".gsd", "milestones", "M001"); + const milestoneDir = join(dir, ".sf", "milestones", "M001"); mkdirSync(milestoneDir, { recursive: true }); const filePath = join(milestoneDir, "M001-SECRETS.md"); writeFileSync(filePath, formatSecretsManifest(manifest)); diff --git a/src/resources/extensions/sf/tests/session-lock-multipath.test.ts b/src/resources/extensions/sf/tests/session-lock-multipath.test.ts index 680495312..3340fc48b 100644 --- a/src/resources/extensions/sf/tests/session-lock-multipath.test.ts +++ b/src/resources/extensions/sf/tests/session-lock-multipath.test.ts @@ -30,7 +30,7 @@ describe('session-lock-multipath', async () => { console.log('\n=== 1. Lock dir registry tracks sfDir on acquisition ==='); { const base = mkdtempSync(join(tmpdir(), 'sf-multipath-')); - mkdirSync(join(base, '.gsd'), { recursive: true }); + mkdirSync(join(base, '.sf'), { recursive: true }); try { const result = acquireSessionLock(base); @@ -54,9 +54,9 @@ describe('session-lock-multipath', async () => { console.log('\n=== 2. Release cleans lock files at all registered paths ==='); { const base = mkdtempSync(join(tmpdir(), 'sf-multipath-')); - mkdirSync(join(base, '.gsd'), { recursive: true }); + mkdirSync(join(base, '.sf'), { recursive: true }); - // Simulate a secondary lock dir (e.g. worktree .gsd/ or projects registry) + // Simulate a secondary lock dir (e.g. worktree .sf/ or projects registry) const secondaryDir = join(base, 'secondary-sf'); mkdirSync(secondaryDir, { recursive: true }); @@ -86,7 +86,7 @@ describe('session-lock-multipath', async () => { assert.ok(!existsSync(primaryLockFile), 'primary auto.lock removed after release'); const primaryLockDir = sfRoot(base) + '.lock'; - assert.ok(!existsSync(primaryLockDir), 'primary .gsd.lock/ removed after release'); + assert.ok(!existsSync(primaryLockDir), 'primary .sf.lock/ removed after release'); } finally { rmSync(base, { recursive: true, force: true }); } @@ -96,7 +96,7 @@ describe('session-lock-multipath', async () => { console.log('\n=== 3. Re-entrant acquisition registers path once ==='); { const base = mkdtempSync(join(tmpdir(), 'sf-multipath-')); - mkdirSync(join(base, '.gsd'), { recursive: true }); + mkdirSync(join(base, '.sf'), { recursive: true }); try { acquireSessionLock(base); @@ -119,8 +119,8 @@ describe('session-lock-multipath', async () => { { const base1 = mkdtempSync(join(tmpdir(), 'sf-multipath-a-')); const base2 = mkdtempSync(join(tmpdir(), 'sf-multipath-b-')); - mkdirSync(join(base1, '.gsd'), { recursive: true }); - mkdirSync(join(base2, '.gsd'), { recursive: true }); + mkdirSync(join(base1, '.sf'), { recursive: true }); + mkdirSync(join(base2, '.sf'), { recursive: true }); try { const r1 = acquireSessionLock(base1); @@ -147,7 +147,7 @@ describe('session-lock-multipath', async () => { console.log('\n=== 5. Full acquire/release cycle cleans all artifacts ==='); { const base = mkdtempSync(join(tmpdir(), 'sf-multipath-')); - mkdirSync(join(base, '.gsd'), { recursive: true }); + mkdirSync(join(base, '.sf'), { recursive: true }); try { acquireSessionLock(base); @@ -157,7 +157,7 @@ describe('session-lock-multipath', async () => { const lockFile = join(sfRoot(base), 'auto.lock'); const lockDir = sfRoot(base) + '.lock'; assert.ok(!existsSync(lockFile), 'auto.lock cleaned'); - assert.ok(!existsSync(lockDir), '.gsd.lock/ cleaned'); + assert.ok(!existsSync(lockDir), '.sf.lock/ cleaned'); assert.deepStrictEqual(_getRegisteredLockDirs().length, 0, 'registry empty'); } finally { rmSync(base, { recursive: true, force: true }); diff --git a/src/resources/extensions/sf/tests/session-lock-regression.test.ts b/src/resources/extensions/sf/tests/session-lock-regression.test.ts index 559c561da..49c0e3685 100644 --- a/src/resources/extensions/sf/tests/session-lock-regression.test.ts +++ b/src/resources/extensions/sf/tests/session-lock-regression.test.ts @@ -3,7 +3,7 @@ * * Regression coverage for: * #1257 False-positive "Session lock lost" during auto-mode - * #1245 Stranded .gsd.lock/ directory preventing new sessions + * #1245 Stranded .sf.lock/ directory preventing new sessions * #1251 Same root cause as #1245 * * Tests the acquire → validate → release lifecycle and edge cases @@ -47,7 +47,7 @@ describe('session-lock-regression', async () => { console.log('\n=== 1. acquire → validate → release lifecycle ==='); { const base = mkdtempSync(join(tmpdir(), 'sf-session-lock-')); - mkdirSync(join(base, '.gsd'), { recursive: true }); + mkdirSync(join(base, '.sf'), { recursive: true }); try { const result = acquireSessionLock(base); @@ -64,9 +64,9 @@ describe('session-lock-regression', async () => { const lockFile = join(sfRoot(base), 'auto.lock'); assert.ok(!existsSync(lockFile), 'lock file removed after release'); - // The .gsd.lock/ directory should be cleaned up + // The .sf.lock/ directory should be cleaned up const lockDir = sfRoot(base) + '.lock'; - assert.ok(!existsSync(lockDir), '.gsd.lock/ directory removed after release (#1245)'); + assert.ok(!existsSync(lockDir), '.sf.lock/ directory removed after release (#1245)'); } finally { rmSync(base, { recursive: true, force: true }); } @@ -76,7 +76,7 @@ describe('session-lock-regression', async () => { console.log('\n=== 2. double release does not throw ==='); { const base = mkdtempSync(join(tmpdir(), 'sf-session-lock-')); - mkdirSync(join(base, '.gsd'), { recursive: true }); + mkdirSync(join(base, '.sf'), { recursive: true }); try { acquireSessionLock(base); @@ -98,7 +98,7 @@ describe('session-lock-regression', async () => { console.log('\n=== 3. updateSessionLock writes metadata ==='); { const base = mkdtempSync(join(tmpdir(), 'sf-session-lock-')); - mkdirSync(join(base, '.gsd'), { recursive: true }); + mkdirSync(join(base, '.sf'), { recursive: true }); try { acquireSessionLock(base); @@ -124,7 +124,7 @@ describe('session-lock-regression', async () => { console.log('\n=== 4. stale lock from dead PID → re-acquirable ==='); { const base = mkdtempSync(join(tmpdir(), 'sf-session-lock-')); - mkdirSync(join(base, '.gsd'), { recursive: true }); + mkdirSync(join(base, '.sf'), { recursive: true }); try { // Write a lock file with a definitely-dead PID @@ -152,7 +152,7 @@ describe('session-lock-regression', async () => { console.log('\n=== 5. readSessionLockData with no lock → null ==='); { const base = mkdtempSync(join(tmpdir(), 'sf-session-lock-')); - mkdirSync(join(base, '.gsd'), { recursive: true }); + mkdirSync(join(base, '.sf'), { recursive: true }); try { const data = readSessionLockData(base); @@ -166,7 +166,7 @@ describe('session-lock-regression', async () => { console.log('\n=== 6. validateSessionLock after own acquisition → true ==='); { const base = mkdtempSync(join(tmpdir(), 'sf-session-lock-')); - mkdirSync(join(base, '.gsd'), { recursive: true }); + mkdirSync(join(base, '.sf'), { recursive: true }); try { acquireSessionLock(base); @@ -187,7 +187,7 @@ describe('session-lock-regression', async () => { console.log('\n=== 7. corrupt lock file → null ==='); { const base = mkdtempSync(join(tmpdir(), 'sf-session-lock-')); - mkdirSync(join(base, '.gsd'), { recursive: true }); + mkdirSync(join(base, '.sf'), { recursive: true }); try { const lockFile = join(sfRoot(base), 'auto.lock'); @@ -204,7 +204,7 @@ describe('session-lock-regression', async () => { console.log('\n=== 7b. missing lock metadata → structured reason ==='); { const base = mkdtempSync(join(tmpdir(), 'sf-session-lock-')); - mkdirSync(join(base, '.gsd'), { recursive: true }); + mkdirSync(join(base, '.sf'), { recursive: true }); try { const status = getSessionLockStatus(base); @@ -220,7 +220,7 @@ describe('session-lock-regression', async () => { console.log('\n=== 7c. foreign PID in lock file → structured reason ==='); { const base = mkdtempSync(join(tmpdir(), 'sf-session-lock-')); - mkdirSync(join(base, '.gsd'), { recursive: true }); + mkdirSync(join(base, '.sf'), { recursive: true }); try { const foreignPid = process.pid + 1000; @@ -247,7 +247,7 @@ describe('session-lock-regression', async () => { console.log('\n=== 8. acquire after release → re-acquirable ==='); { const base = mkdtempSync(join(tmpdir(), 'sf-session-lock-')); - mkdirSync(join(base, '.gsd'), { recursive: true }); + mkdirSync(join(base, '.sf'), { recursive: true }); try { const r1 = acquireSessionLock(base); @@ -266,7 +266,7 @@ describe('session-lock-regression', async () => { console.log('\n=== 9. re-entrant acquire without explicit release ==='); { const base = mkdtempSync(join(tmpdir(), 'sf-session-lock-')); - mkdirSync(join(base, '.gsd'), { recursive: true }); + mkdirSync(join(base, '.sf'), { recursive: true }); try { const r1 = acquireSessionLock(base); @@ -288,7 +288,7 @@ describe('session-lock-regression', async () => { console.log('\n=== 10. re-entrant acquire refreshes lock artifacts ==='); { const base = mkdtempSync(join(tmpdir(), 'sf-session-lock-')); - mkdirSync(join(base, '.gsd'), { recursive: true }); + mkdirSync(join(base, '.sf'), { recursive: true }); try { const r1 = acquireSessionLock(base); @@ -296,18 +296,18 @@ describe('session-lock-regression', async () => { const lockDir = sfRoot(base) + '.lock'; if (properLockfileAvailable) { - assert.ok(existsSync(lockDir), '.gsd.lock/ exists after first acquisition'); + assert.ok(existsSync(lockDir), '.sf.lock/ exists after first acquisition'); } const r2 = acquireSessionLock(base); assert.ok(r2.acquired, 'second acquisition succeeds'); if (properLockfileAvailable) { - assert.ok(existsSync(lockDir), '.gsd.lock/ exists after re-entrant acquisition'); + assert.ok(existsSync(lockDir), '.sf.lock/ exists after re-entrant acquisition'); } assert.ok(validateSessionLock(base), 'lock remains valid after re-entrant acquisition'); releaseSessionLock(base); - assert.ok(!existsSync(lockDir), '.gsd.lock/ is removed after release'); + assert.ok(!existsSync(lockDir), '.sf.lock/ is removed after release'); } finally { rmSync(base, { recursive: true, force: true }); } diff --git a/src/resources/extensions/sf/tests/session-lock-transient-read.test.ts b/src/resources/extensions/sf/tests/session-lock-transient-read.test.ts index 2746e0db2..f7f6d11a4 100644 --- a/src/resources/extensions/sf/tests/session-lock-transient-read.test.ts +++ b/src/resources/extensions/sf/tests/session-lock-transient-read.test.ts @@ -36,7 +36,7 @@ async function main(): Promise<void> { console.log('\n=== 1. readExistingLockDataWithRetry reads file normally ==='); { const base = mkdtempSync(join(tmpdir(), 'sf-transient-')); - mkdirSync(join(base, '.gsd'), { recursive: true }); + mkdirSync(join(base, '.sf'), { recursive: true }); try { const lockFile = join(sfRoot(base), 'auto.lock'); @@ -63,7 +63,7 @@ async function main(): Promise<void> { console.log('\n=== 2. readExistingLockDataWithRetry returns null for missing file ==='); { const base = mkdtempSync(join(tmpdir(), 'sf-transient-')); - mkdirSync(join(base, '.gsd'), { recursive: true }); + mkdirSync(join(base, '.sf'), { recursive: true }); try { const lockFile = join(sfRoot(base), 'auto.lock'); @@ -79,7 +79,7 @@ async function main(): Promise<void> { console.log('\n=== 3. readExistingLockDataWithRetry recovers after transient unavailability ==='); { const base = mkdtempSync(join(tmpdir(), 'sf-transient-')); - mkdirSync(join(base, '.gsd'), { recursive: true }); + mkdirSync(join(base, '.sf'), { recursive: true }); try { const lockFile = join(sfRoot(base), 'auto.lock'); @@ -116,7 +116,7 @@ async function main(): Promise<void> { console.log('\n=== 4. readExistingLockDataWithRetry recovers from transient permission error ==='); { const base = mkdtempSync(join(tmpdir(), 'sf-transient-')); - mkdirSync(join(base, '.gsd'), { recursive: true }); + mkdirSync(join(base, '.sf'), { recursive: true }); try { const lockFile = join(sfRoot(base), 'auto.lock'); @@ -153,7 +153,7 @@ async function main(): Promise<void> { console.log('\n=== 5. getSessionLockStatus tolerates transient lock file unavailability ==='); { const base = mkdtempSync(join(tmpdir(), 'sf-transient-')); - mkdirSync(join(base, '.gsd'), { recursive: true }); + mkdirSync(join(base, '.sf'), { recursive: true }); try { const result = acquireSessionLock(base); @@ -193,7 +193,7 @@ async function main(): Promise<void> { console.log('\n=== 6. Default retry params: function works with defaults ==='); { const base = mkdtempSync(join(tmpdir(), 'sf-transient-')); - mkdirSync(join(base, '.gsd'), { recursive: true }); + mkdirSync(join(base, '.sf'), { recursive: true }); try { const lockFile = join(sfRoot(base), 'auto.lock'); diff --git a/src/resources/extensions/sf/tests/sf-recover.test.ts b/src/resources/extensions/sf/tests/sf-recover.test.ts index 59a912281..f7631026d 100644 --- a/src/resources/extensions/sf/tests/sf-recover.test.ts +++ b/src/resources/extensions/sf/tests/sf-recover.test.ts @@ -28,12 +28,12 @@ import { deriveStateFromDb, invalidateStateCache } from '../state.ts'; function createFixtureBase(): string { const base = mkdtempSync(join(tmpdir(), 'sf-recover-')); - mkdirSync(join(base, '.gsd', 'milestones'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones'), { recursive: true }); return base; } function writeFile(base: string, relativePath: string, content: string): void { - const full = join(base, '.gsd', relativePath); + const full = join(base, '.sf', relativePath); mkdirSync(join(full, '..'), { recursive: true }); writeFileSync(full, content); } diff --git a/src/resources/extensions/sf/tests/sf-tools.test.ts b/src/resources/extensions/sf/tests/sf-tools.test.ts index 36147344a..725954508 100644 --- a/src/resources/extensions/sf/tests/sf-tools.test.ts +++ b/src/resources/extensions/sf/tests/sf-tools.test.ts @@ -34,7 +34,7 @@ import type { Requirement } from '../types.ts'; function makeTmpDir(): string { const dir = fs.mkdtempSync(path.join(os.tmpdir(), 'sf-tools-')); - fs.mkdirSync(path.join(dir, '.gsd'), { recursive: true }); + fs.mkdirSync(path.join(dir, '.sf'), { recursive: true }); return dir; } @@ -54,7 +54,7 @@ describe('sf-tools', () => { test('sf_decision_save', async () => { const tmpDir = makeTmpDir(); try { - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); openDatabase(dbPath); assert.ok(isDbAvailable(), 'DB should be available after open'); @@ -81,7 +81,7 @@ describe('sf-tools', () => { assert.deepStrictEqual(row!.choice, 'SQLite', 'Decision choice should match'); // Verify DECISIONS.md was generated - const mdPath = path.join(tmpDir, '.gsd', 'DECISIONS.md'); + const mdPath = path.join(tmpDir, '.sf', 'DECISIONS.md'); assert.ok(fs.existsSync(mdPath), 'DECISIONS.md should be created'); const mdContent = fs.readFileSync(mdPath, 'utf-8'); assert.ok(mdContent.includes('D001'), 'DECISIONS.md should contain D001'); @@ -119,7 +119,7 @@ describe('sf-tools', () => { test('sf_requirement_update', async () => { const tmpDir = makeTmpDir(); try { - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); openDatabase(dbPath); // Seed a requirement @@ -156,7 +156,7 @@ describe('sf-tools', () => { assert.deepStrictEqual(updated!.primary_owner, 'S03', 'Primary owner should be preserved'); // Verify REQUIREMENTS.md was generated - const mdPath = path.join(tmpDir, '.gsd', 'REQUIREMENTS.md'); + const mdPath = path.join(tmpDir, '.sf', 'REQUIREMENTS.md'); assert.ok(fs.existsSync(mdPath), 'REQUIREMENTS.md should be created'); const mdContent = fs.readFileSync(mdPath, 'utf-8'); assert.ok(mdContent.includes('R001'), 'REQUIREMENTS.md should contain R001'); @@ -177,7 +177,7 @@ describe('sf-tools', () => { test('sf_summary_save', async () => { const tmpDir = makeTmpDir(); try { - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); openDatabase(dbPath); // (c) Summary tool creates artifact row @@ -204,7 +204,7 @@ describe('sf-tools', () => { assert.deepStrictEqual(rows[0]['slice_id'] as string, 'S01', 'Slice ID should match'); // Verify file was written to disk - const filePath = path.join(tmpDir, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'S01-SUMMARY.md'); + const filePath = path.join(tmpDir, '.sf', 'milestones', 'M001', 'slices', 'S01', 'S01-SUMMARY.md'); assert.ok(fs.existsSync(filePath), 'Summary file should be written to disk'); const fileContent = fs.readFileSync(filePath, 'utf-8'); assert.ok(fileContent.includes('S01 Summary'), 'File should contain summary content'); @@ -220,7 +220,7 @@ describe('sf-tools', () => { tmpDir, ); - const mFilePath = path.join(tmpDir, '.gsd', 'milestones', 'M001', 'M001-CONTEXT.md'); + const mFilePath = path.join(tmpDir, '.sf', 'milestones', 'M001', 'M001-CONTEXT.md'); assert.ok(fs.existsSync(mFilePath), 'Milestone-level artifact file should be created'); // Test task-level artifact @@ -236,7 +236,7 @@ describe('sf-tools', () => { tmpDir, ); - const tFilePath = path.join(tmpDir, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'tasks', 'T01-SUMMARY.md'); + const tFilePath = path.join(tmpDir, '.sf', 'milestones', 'M001', 'slices', 'S01', 'tasks', 'T01-SUMMARY.md'); assert.ok(fs.existsSync(tFilePath), 'Task-level artifact file should be created'); closeDatabase(); @@ -248,7 +248,7 @@ describe('sf-tools', () => { test('sf_summary_save supports CONTEXT-DRAFT persistence', async () => { const tmpDir = makeTmpDir(); try { - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); openDatabase(dbPath); await saveArtifactToDb( @@ -261,7 +261,7 @@ describe('sf-tools', () => { tmpDir, ); - const draftPath = path.join(tmpDir, '.gsd', 'milestones', 'M001', 'M001-CONTEXT-DRAFT.md'); + const draftPath = path.join(tmpDir, '.sf', 'milestones', 'M001', 'M001-CONTEXT-DRAFT.md'); assert.ok(fs.existsSync(draftPath), 'Draft context file should be created'); const draftContent = fs.readFileSync(draftPath, 'utf-8'); assert.ok(draftContent.includes('Draft Context'), 'Draft context file should contain draft content'); @@ -296,7 +296,7 @@ describe('sf-tools', () => { test('sf_requirement_save creates new requirement', async () => { const tmpDir = makeTmpDir(); try { - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); openDatabase(dbPath); // (a) saveRequirementToDb creates a new requirement with auto-assigned ID @@ -321,7 +321,7 @@ describe('sf-tools', () => { assert.deepStrictEqual(row!.status, 'active', 'Status should match'); // Verify REQUIREMENTS.md was generated - const mdPath = path.join(tmpDir, '.gsd', 'REQUIREMENTS.md'); + const mdPath = path.join(tmpDir, '.sf', 'REQUIREMENTS.md'); assert.ok(fs.existsSync(mdPath), 'REQUIREMENTS.md should be created'); const mdContent = fs.readFileSync(mdPath, 'utf-8'); assert.ok(mdContent.includes('R001'), 'REQUIREMENTS.md should contain R001'); @@ -349,7 +349,7 @@ describe('sf-tools', () => { test('nextRequirementId computes correct next ID', async () => { const tmpDir = makeTmpDir(); try { - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); openDatabase(dbPath); // No requirements yet @@ -384,7 +384,7 @@ describe('sf-tools', () => { test('sf_requirement_update upserts when requirement not in DB', async () => { const tmpDir = makeTmpDir(); try { - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); openDatabase(dbPath); // Requirement R025 does NOT exist in DB — simulates the bug scenario @@ -402,7 +402,7 @@ describe('sf-tools', () => { assert.deepStrictEqual(created!.validation, 'Integration tests pass', 'Validation should be set'); // Verify REQUIREMENTS.md was generated - const mdPath = path.join(tmpDir, '.gsd', 'REQUIREMENTS.md'); + const mdPath = path.join(tmpDir, '.sf', 'REQUIREMENTS.md'); assert.ok(fs.existsSync(mdPath), 'REQUIREMENTS.md should be created'); closeDatabase(); @@ -414,7 +414,7 @@ describe('sf-tools', () => { test('Tool result format', async () => { const tmpDir = makeTmpDir(); try { - const dbPath = path.join(tmpDir, '.gsd', 'sf.db'); + const dbPath = path.join(tmpDir, '.sf', 'sf.db'); openDatabase(dbPath); // Verify result follows AgentToolResult interface: {content: [{type: "text", text}], details} diff --git a/src/resources/extensions/sf/tests/sfroot-worktree-detection.test.ts b/src/resources/extensions/sf/tests/sfroot-worktree-detection.test.ts index e010fbaf4..6a3b82076 100644 --- a/src/resources/extensions/sf/tests/sfroot-worktree-detection.test.ts +++ b/src/resources/extensions/sf/tests/sfroot-worktree-detection.test.ts @@ -1,15 +1,15 @@ /** * sfroot-worktree-detection.test.ts — Regression test for #2594. * - * sfRoot() must return the worktree's own .gsd directory when the basePath - * is inside a .gsd/worktrees/<name>/ structure, not walk up to the project - * root's .gsd via the git-root probe. + * sfRoot() must return the worktree's own .sf directory when the basePath + * is inside a .sf/worktrees/<name>/ structure, not walk up to the project + * root's .sf via the git-root probe. * - * The bug: when a git worktree lives at /project/.gsd/worktrees/M008/, + * The bug: when a git worktree lives at /project/.sf/worktrees/M008/, * probeGsdRoot() runs `git rev-parse --show-toplevel` which can return the * main project root (not the worktree root) depending on git version and - * worktree setup. The walk-up then finds /project/.gsd and returns that - * instead of the worktree's own .gsd path. + * worktree setup. The walk-up then finds /project/.sf and returns that + * instead of the worktree's own .sf path. */ import { describe, test, beforeEach, afterEach } from "node:test"; @@ -31,7 +31,7 @@ describe("sfRoot() worktree detection (#2594)", () => { // Create a temporary project with a git repo to simulate real conditions. // realpathSync handles macOS /tmp -> /private/tmp. projectRoot = realpathSync(mkdtempSync(join(tmpdir(), "sfroot-wt-"))); - projectGsd = join(projectRoot, ".gsd"); + projectGsd = join(projectRoot, ".sf"); mkdirSync(projectGsd, { recursive: true }); // Initialize a git repo in the project root so git rev-parse works @@ -61,52 +61,52 @@ describe("sfRoot() worktree detection (#2594)", () => { rmSync(projectRoot, { recursive: true, force: true }); }); - test("returns worktree .gsd when basePath is a worktree with its own .gsd (fast path)", () => { + test("returns worktree .sf when basePath is a worktree with its own .sf (fast path)", () => { // Simulates a worktree that already had copyPlanningArtifacts() run, - // so it has its own .gsd/ directory. + // so it has its own .sf/ directory. const worktreeBase = join(projectGsd, "worktrees", "M008"); - const worktreeGsd = join(worktreeBase, ".gsd"); + const worktreeGsd = join(worktreeBase, ".sf"); mkdirSync(worktreeGsd, { recursive: true }); const result = sfRoot(worktreeBase); assert.equal( result, worktreeGsd, - `Expected worktree .gsd (${worktreeGsd}), got ${result}. ` + - "sfRoot() should use the fast path for an existing worktree .gsd.", + `Expected worktree .sf (${worktreeGsd}), got ${result}. ` + + "sfRoot() should use the fast path for an existing worktree .sf.", ); }); - test("returns worktree .gsd path (not project root .gsd) when worktree .gsd does not exist yet", () => { - // This is the core #2594 bug: the worktree directory exists but its .gsd + test("returns worktree .sf path (not project root .sf) when worktree .sf does not exist yet", () => { + // This is the core #2594 bug: the worktree directory exists but its .sf // subdirectory hasn't been created yet. Without the fix, probeGsdRoot() - // walks up from the worktree path, finds /project/.gsd, and returns it. - // With the fix, it detects the .gsd/worktrees/<name>/ pattern and returns - // the worktree-local .gsd path as the creation fallback. + // walks up from the worktree path, finds /project/.sf, and returns it. + // With the fix, it detects the .sf/worktrees/<name>/ pattern and returns + // the worktree-local .sf path as the creation fallback. const worktreeBase = join(projectGsd, "worktrees", "M008"); mkdirSync(worktreeBase, { recursive: true }); - // NOTE: no .gsd/ inside worktreeBase + // NOTE: no .sf/ inside worktreeBase const result = sfRoot(worktreeBase); - const expected = join(worktreeBase, ".gsd"); + const expected = join(worktreeBase, ".sf"); - // Without the fix, this returns projectGsd (/project/.gsd) because the + // Without the fix, this returns projectGsd (/project/.sf) because the // walk-up from worktreeBase finds it. With the fix, it returns the // worktree-local path. assert.notEqual( result, projectGsd, - "sfRoot() must NOT return the project root .gsd when basePath is inside .gsd/worktrees/", + "sfRoot() must NOT return the project root .sf when basePath is inside .sf/worktrees/", ); assert.equal( result, expected, - `Expected worktree-local .gsd (${expected}), got ${result}.`, + `Expected worktree-local .sf (${expected}), got ${result}.`, ); }); - test("returns worktree .gsd when basePath is a real git worktree inside .gsd/worktrees/", () => { - // Create a real git worktree at .gsd/worktrees/M010 + test("returns worktree .sf when basePath is a real git worktree inside .sf/worktrees/", () => { + // Create a real git worktree at .sf/worktrees/M010 const worktreeName = "M010"; const worktreeBase = join(projectGsd, "worktrees", worktreeName); @@ -123,19 +123,19 @@ describe("sfRoot() worktree detection (#2594)", () => { return; } - // The real git worktree exists at worktreeBase but has NO .gsd/ subdir yet + // The real git worktree exists at worktreeBase but has NO .sf/ subdir yet const sfResult = sfRoot(worktreeBase); - const expected = join(worktreeBase, ".gsd"); + const expected = join(worktreeBase, ".sf"); assert.notEqual( sfResult, projectGsd, - "sfRoot() must NOT escape to project root .gsd from inside a git worktree", + "sfRoot() must NOT escape to project root .sf from inside a git worktree", ); assert.equal( sfResult, expected, - `Expected worktree-local .gsd (${expected}), got ${sfResult}`, + `Expected worktree-local .sf (${expected}), got ${sfResult}`, ); // Cleanup worktree @@ -145,12 +145,12 @@ describe("sfRoot() worktree detection (#2594)", () => { }); }); - test("still returns project .gsd for normal (non-worktree) basePath", () => { + test("still returns project .sf for normal (non-worktree) basePath", () => { const result = sfRoot(projectRoot); assert.equal(result, projectGsd); }); - test("still returns project .gsd for a subdirectory of the project", () => { + test("still returns project .sf for a subdirectory of the project", () => { const subdir = join(projectRoot, "src", "lib"); mkdirSync(subdir, { recursive: true }); @@ -158,7 +158,7 @@ describe("sfRoot() worktree detection (#2594)", () => { assert.equal( result, projectGsd, - "Non-worktree subdirectories should still resolve to project .gsd", + "Non-worktree subdirectories should still resolve to project .sf", ); }); }); diff --git a/src/resources/extensions/sf/tests/shared-wal.test.ts b/src/resources/extensions/sf/tests/shared-wal.test.ts index bf2c7edd9..50918a2e0 100644 --- a/src/resources/extensions/sf/tests/shared-wal.test.ts +++ b/src/resources/extensions/sf/tests/shared-wal.test.ts @@ -35,9 +35,9 @@ describe('shared-wal', async () => { console.log('\n=== shared-wal: resolve worktree path to project root DB ==='); { const projectRoot = '/home/user/myproject'; - const worktreePath = join(projectRoot, '.gsd', 'worktrees', 'M001'); + const worktreePath = join(projectRoot, '.sf', 'worktrees', 'M001'); const result = resolveProjectRootDbPath(worktreePath); - assert.deepStrictEqual(result, join(projectRoot, '.gsd', 'sf.db'), + assert.deepStrictEqual(result, join(projectRoot, '.sf', 'sf.db'), 'worktree path resolves to project root DB'); } @@ -46,7 +46,7 @@ describe('shared-wal', async () => { { const projectRoot = '/home/user/myproject'; const result = resolveProjectRootDbPath(projectRoot); - assert.deepStrictEqual(result, join(projectRoot, '.gsd', 'sf.db'), + assert.deepStrictEqual(result, join(projectRoot, '.sf', 'sf.db'), 'project root path stays at project root DB'); } @@ -54,26 +54,26 @@ describe('shared-wal', async () => { console.log('\n=== shared-wal: resolve nested worktree subdir ==='); { const projectRoot = '/home/user/myproject'; - const nestedPath = join(projectRoot, '.gsd', 'worktrees', 'M002', 'src', 'lib'); + const nestedPath = join(projectRoot, '.sf', 'worktrees', 'M002', 'src', 'lib'); const result = resolveProjectRootDbPath(nestedPath); - assert.deepStrictEqual(result, join(projectRoot, '.gsd', 'sf.db'), + assert.deepStrictEqual(result, join(projectRoot, '.sf', 'sf.db'), 'nested worktree subdir resolves to project root DB'); } // ─── Test (d): resolve with forward slashes (cross-platform) ────────── console.log('\n=== shared-wal: resolve forward-slash path ==='); { - const result = resolveProjectRootDbPath('/proj/.gsd/worktrees/M001'); - assert.deepStrictEqual(result, join('/proj', '.gsd', 'sf.db'), + const result = resolveProjectRootDbPath('/proj/.sf/worktrees/M001'); + assert.deepStrictEqual(result, join('/proj', '.sf', 'sf.db'), 'forward-slash worktree path resolves correctly'); } // ─── Test (e1): external-state worktree resolves to project state DB (#2952) ─── console.log('\n=== shared-wal: resolve external-state worktree path (#2952) ==='); { - // External-state layout: ~/.gsd/projects/<hash>/worktrees/<MID> - // Should resolve to: ~/.gsd/projects/<hash>/sf.db - const stateRoot = '/home/user/.gsd/projects/a1b2c3d4'; + // External-state layout: ~/.sf/projects/<hash>/worktrees/<MID> + // Should resolve to: ~/.sf/projects/<hash>/sf.db + const stateRoot = '/home/user/.sf/projects/a1b2c3d4'; const worktreePath = join(stateRoot, 'worktrees', 'M002'); const result = resolveProjectRootDbPath(worktreePath); assert.deepStrictEqual(result, join(stateRoot, 'sf.db'), @@ -83,7 +83,7 @@ describe('shared-wal', async () => { // ─── Test (e2): external-state worktree nested subdir (#2952) ───────── console.log('\n=== shared-wal: resolve external-state worktree nested subdir (#2952) ==='); { - const stateRoot = '/home/user/.gsd/projects/deadbeef42'; + const stateRoot = '/home/user/.sf/projects/deadbeef42'; const nestedPath = join(stateRoot, 'worktrees', 'M003', 'src', 'lib'); const result = resolveProjectRootDbPath(nestedPath); assert.deepStrictEqual(result, join(stateRoot, 'sf.db'), @@ -93,8 +93,8 @@ describe('shared-wal', async () => { // ─── Test (e3): external-state worktree with forward slashes (#2952) ── console.log('\n=== shared-wal: resolve external-state worktree forward-slash (#2952) ==='); { - const result = resolveProjectRootDbPath('/Users/dev/.gsd/projects/cafe0123/worktrees/M001'); - assert.deepStrictEqual(result, join('/Users/dev/.gsd/projects/cafe0123', 'sf.db'), + const result = resolveProjectRootDbPath('/Users/dev/.sf/projects/cafe0123/worktrees/M001'); + assert.deepStrictEqual(result, join('/Users/dev/.sf/projects/cafe0123', 'sf.db'), 'external-state forward-slash worktree path resolves correctly (#2952)'); } diff --git a/src/resources/extensions/sf/tests/single-writer-invariant.test.ts b/src/resources/extensions/sf/tests/single-writer-invariant.test.ts index fdaf78f6c..6966765ab 100644 --- a/src/resources/extensions/sf/tests/single-writer-invariant.test.ts +++ b/src/resources/extensions/sf/tests/single-writer-invariant.test.ts @@ -1,4 +1,4 @@ -// Structural invariant: sf-db.ts is the single writer for .gsd/sf.db. +// Structural invariant: sf-db.ts is the single writer for .sf/sf.db. // // No file under src/resources/extensions/sf/ may issue raw write SQL // (INSERT/UPDATE/DELETE/REPLACE) or raw transaction control (BEGIN/COMMIT/ @@ -7,7 +7,7 @@ // // Allowlist: // - sf-db.ts itself — the single writer -// - unit-ownership.ts — manages a separate .gsd/unit-claims.db for +// - unit-ownership.ts — manages a separate .sf/unit-claims.db for // cross-worktree claim races; intentionally outside this invariant // - tests/** — fixtures and direct DB inspection are fair game // diff --git a/src/resources/extensions/sf/tests/slice-disk-reconcile.test.ts b/src/resources/extensions/sf/tests/slice-disk-reconcile.test.ts index 957497e8e..3c3055c19 100644 --- a/src/resources/extensions/sf/tests/slice-disk-reconcile.test.ts +++ b/src/resources/extensions/sf/tests/slice-disk-reconcile.test.ts @@ -30,12 +30,12 @@ const { assertEq, assertTrue, report } = createTestContext(); function createFixtureBase(): string { const base = mkdtempSync(join(tmpdir(), "sf-slice-reconcile-")); - mkdirSync(join(base, ".gsd", "milestones"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones"), { recursive: true }); return base; } function writeFile(base: string, relativePath: string, content: string): void { - const full = join(base, ".gsd", relativePath); + const full = join(base, ".sf", relativePath); mkdirSync(join(full, ".."), { recursive: true }); writeFileSync(full, content); } @@ -73,7 +73,7 @@ async function testMissingSlicesCauseBlock(): Promise<void> { console.log("\n--- Test: missing DB slices cause permanent block (pre-fix) ---"); const base = createFixtureBase(); - const dbPath = join(base, ".gsd", "sf.db"); + const dbPath = join(base, ".sf", "sf.db"); try { openDatabase(dbPath); @@ -151,7 +151,7 @@ async function testSliceReconciliationIdempotent(): Promise<void> { console.log("\n--- Test: slice reconciliation is idempotent ---"); const base = createFixtureBase(); - const dbPath = join(base, ".gsd", "sf.db"); + const dbPath = join(base, ".sf", "sf.db"); try { openDatabase(dbPath); @@ -193,7 +193,7 @@ async function testNoRoadmapSkipsReconciliation(): Promise<void> { console.log("\n--- Test: no ROADMAP file skips slice reconciliation ---"); const base = createFixtureBase(); - const dbPath = join(base, ".gsd", "sf.db"); + const dbPath = join(base, ".sf", "sf.db"); try { openDatabase(dbPath); diff --git a/src/resources/extensions/sf/tests/slice-parallel-conflict.test.ts b/src/resources/extensions/sf/tests/slice-parallel-conflict.test.ts index 8886b4753..8bac21907 100644 --- a/src/resources/extensions/sf/tests/slice-parallel-conflict.test.ts +++ b/src/resources/extensions/sf/tests/slice-parallel-conflict.test.ts @@ -16,12 +16,12 @@ import { hasFileConflict } from "../slice-parallel-conflict.js"; function makeTmpBase(): string { const base = mkdtempSync(join(tmpdir(), "sf-slice-conflict-test-")); - mkdirSync(join(base, ".gsd"), { recursive: true }); + mkdirSync(join(base, ".sf"), { recursive: true }); return base; } function writeSlicePlan(base: string, mid: string, sid: string, content: string): void { - const dir = join(base, ".gsd", "milestones", mid, sid); + const dir = join(base, ".sf", "milestones", mid, sid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, "PLAN.md"), content, "utf-8"); } diff --git a/src/resources/extensions/sf/tests/smart-entry-complete.test.ts b/src/resources/extensions/sf/tests/smart-entry-complete.test.ts index 10ee5c638..ce9468972 100644 --- a/src/resources/extensions/sf/tests/smart-entry-complete.test.ts +++ b/src/resources/extensions/sf/tests/smart-entry-complete.test.ts @@ -10,7 +10,7 @@ test("deriveState reports the last completed milestone when all milestone slices const base = mkdtempSync(join(tmpdir(), "sf-smart-entry-complete-")); try { - const milestoneDir = join(base, ".gsd", "milestones", "M001"); + const milestoneDir = join(base, ".sf", "milestones", "M001"); mkdirSync(milestoneDir, { recursive: true }); writeFileSync( diff --git a/src/resources/extensions/sf/tests/smart-entry-draft.test.ts b/src/resources/extensions/sf/tests/smart-entry-draft.test.ts index 037a9804b..d96a63345 100644 --- a/src/resources/extensions/sf/tests/smart-entry-draft.test.ts +++ b/src/resources/extensions/sf/tests/smart-entry-draft.test.ts @@ -20,7 +20,7 @@ function assert(condition: boolean, message: string): void { // ─── Fixture: milestone with only CONTEXT-DRAFT.md ────────────────────── const tmpBase = mkdtempSync(join(tmpdir(), "sf-smart-entry-draft-test-")); -const sf = join(tmpBase, ".gsd"); +const sf = join(tmpBase, ".sf"); mkdirSync(join(sf, "milestones", "M001"), { recursive: true }); diff --git a/src/resources/extensions/sf/tests/stale-queued-milestone.test.ts b/src/resources/extensions/sf/tests/stale-queued-milestone.test.ts index 6eb0ca270..a2ac536f8 100644 --- a/src/resources/extensions/sf/tests/stale-queued-milestone.test.ts +++ b/src/resources/extensions/sf/tests/stale-queued-milestone.test.ts @@ -24,12 +24,12 @@ import { function createFixtureBase(): string { const base = mkdtempSync(join(tmpdir(), "sf-stale-milestone-")); - mkdirSync(join(base, ".gsd", "milestones"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones"), { recursive: true }); return base; } function writeFile(base: string, relativePath: string, content: string): void { - const full = join(base, ".gsd", relativePath); + const full = join(base, ".sf", relativePath); mkdirSync(join(full, ".."), { recursive: true }); writeFileSync(full, content); } diff --git a/src/resources/extensions/sf/tests/stale-worktree-cwd.test.ts b/src/resources/extensions/sf/tests/stale-worktree-cwd.test.ts index a7a0356e9..1621af43e 100644 --- a/src/resources/extensions/sf/tests/stale-worktree-cwd.test.ts +++ b/src/resources/extensions/sf/tests/stale-worktree-cwd.test.ts @@ -1,7 +1,7 @@ /** * stale-worktree-cwd.test.ts — Tests for #608 fix. * - * Verifies that when process.cwd() is inside a stale .gsd/worktrees/ path, + * Verifies that when process.cwd() is inside a stale .sf/worktrees/ path, * startAuto escapes back to the project root before proceeding. */ @@ -39,11 +39,11 @@ function createTempRepo(): string { // ─── escapeStaleWorktree is called by startAuto, test the detection logic ──── test("detects stale worktree path and extracts project root", () => { - // Simulate the path pattern: /project/.gsd/worktrees/M004/... + // Simulate the path pattern: /project/.sf/worktrees/M004/... const projectRoot = "/Users/test/myproject"; - const stalePath = `${projectRoot}${sep}.gsd${sep}worktrees${sep}M004`; + const stalePath = `${projectRoot}${sep}.sf${sep}worktrees${sep}M004`; - const marker = `${sep}.gsd${sep}worktrees${sep}`; + const marker = `${sep}.sf${sep}worktrees${sep}`; const idx = stalePath.indexOf(marker); assert.ok(idx !== -1, "marker found in stale path"); @@ -52,7 +52,7 @@ test("detects stale worktree path and extracts project root", () => { test("does not trigger on normal project path", () => { const normalPath = "/Users/test/myproject"; - const marker = `${sep}.gsd${sep}worktrees${sep}`; + const marker = `${sep}.sf${sep}worktrees${sep}`; const idx = normalPath.indexOf(marker); assert.equal(idx, -1, "marker not found in normal path"); @@ -75,7 +75,7 @@ test("mergeMilestoneToMain restores cwd to project root", () => { tempDir = createTempRepo(); // Create milestone planning artifacts - const msDir = join(tempDir, ".gsd", "milestones", "M050"); + const msDir = join(tempDir, ".sf", "milestones", "M050"); mkdirSync(msDir, { recursive: true }); writeFileSync(join(msDir, "CONTEXT.md"), "# M050 Context\n"); const roadmap = [ @@ -125,17 +125,17 @@ test("process.cwd() inside removed worktree is recoverable", () => { try { tempDir = createTempRepo(); - // Create a .gsd/worktrees/M099 directory to simulate stale state - const staleWtDir = join(tempDir, ".gsd", "worktrees", "M099"); + // Create a .sf/worktrees/M099 directory to simulate stale state + const staleWtDir = join(tempDir, ".sf", "worktrees", "M099"); mkdirSync(staleWtDir, { recursive: true }); // Enter the stale directory process.chdir(staleWtDir); const cwdBefore = process.cwd(); - assert.ok(cwdBefore.includes(`${sep}.gsd${sep}worktrees${sep}`), "cwd is inside worktree dir"); + assert.ok(cwdBefore.includes(`${sep}.sf${sep}worktrees${sep}`), "cwd is inside worktree dir"); // Simulate escapeStaleWorktree logic - const marker = `${sep}.gsd${sep}worktrees${sep}`; + const marker = `${sep}.sf${sep}worktrees${sep}`; const idx = cwdBefore.indexOf(marker); assert.ok(idx !== -1, "marker found"); diff --git a/src/resources/extensions/sf/tests/stalled-tool-recovery.test.ts b/src/resources/extensions/sf/tests/stalled-tool-recovery.test.ts index 0968859fb..c79b9cfe9 100644 --- a/src/resources/extensions/sf/tests/stalled-tool-recovery.test.ts +++ b/src/resources/extensions/sf/tests/stalled-tool-recovery.test.ts @@ -47,7 +47,7 @@ function makeMockPi() { const pi = makeMockPi(); // Simulate the bug: buildRecoveryContext returns {} (empty object). - // basePath is undefined, which causes join(undefined, ".gsd") to throw. + // basePath is undefined, which causes join(undefined, ".sf") to throw. const emptyRctx = {} as RecoveryContext; let crashed = false; @@ -68,8 +68,8 @@ function makeMockPi() { { console.log("\n=== #1855: recoverTimedOutUnit succeeds with valid RecoveryContext ==="); const base = mkdtempSync(join(tmpdir(), "sf-stalled-tool-test-")); - mkdirSync(join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks"), { recursive: true }); - mkdirSync(join(base, ".gsd", "runtime", "units"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks"), { recursive: true }); + mkdirSync(join(base, ".sf", "runtime", "units"), { recursive: true }); try { const ctx = makeMockCtx(); diff --git a/src/resources/extensions/sf/tests/stash-pop-sf-conflict.test.ts b/src/resources/extensions/sf/tests/stash-pop-sf-conflict.test.ts index 71bd36690..1643fbf29 100644 --- a/src/resources/extensions/sf/tests/stash-pop-sf-conflict.test.ts +++ b/src/resources/extensions/sf/tests/stash-pop-sf-conflict.test.ts @@ -1,9 +1,9 @@ /** * stash-pop-sf-conflict.test.ts — Regression test for #2766. * - * When a squash merge stash-pops and hits conflicts on .gsd/ state files, + * When a squash merge stash-pops and hits conflicts on .sf/ state files, * the UU entries block every subsequent merge. This test verifies that - * mergeMilestoneToMain auto-resolves .gsd/ conflicts by accepting HEAD + * mergeMilestoneToMain auto-resolves .sf/ conflicts by accepting HEAD * and drops the stash, leaving the repo in a clean state. */ @@ -47,8 +47,8 @@ function createTempRepo(): string { run("git config user.email test@test.com", dir); run("git config user.name Test", dir); writeFileSync(join(dir, "README.md"), "# test\n"); - mkdirSync(join(dir, ".gsd"), { recursive: true }); - writeFileSync(join(dir, ".gsd", "STATE.md"), "version: 1\n"); + mkdirSync(join(dir, ".sf"), { recursive: true }); + writeFileSync(join(dir, ".sf", "STATE.md"), "version: 1\n"); run("git add .", dir); run("git commit -m init", dir); run("git branch -M main", dir); @@ -60,7 +60,7 @@ function makeRoadmap(milestoneId: string, title: string, slices: Array<{ id: str return `# ${milestoneId}: ${title}\n\n## Slices\n${sliceLines}\n`; } -test("#2766: stash pop conflict on .gsd/ files is auto-resolved", () => { +test("#2766: stash pop conflict on .sf/ files is auto-resolved", () => { const repo = createTempRepo(); try { const wtPath = createAutoWorktree(repo, "M300"); @@ -72,25 +72,25 @@ test("#2766: stash pop conflict on .gsd/ files is auto-resolved", () => { run(`git checkout -b "${sliceBranch}"`, wtPath); writeFileSync(join(wtPath, "feature.ts"), "export const feature = true;\n"); - // Modify .gsd/STATE.md on the milestone branch (diverges from main) - writeFileSync(join(wtPath, ".gsd", "STATE.md"), "version: 2-milestone\n"); + // Modify .sf/STATE.md on the milestone branch (diverges from main) + writeFileSync(join(wtPath, ".sf", "STATE.md"), "version: 2-milestone\n"); run("git add .", wtPath); run('git commit -m "add feature and update state"', wtPath); run("git checkout milestone/M300", wtPath); run(`git merge --no-ff "${sliceBranch}" -m "merge S01: feature"`, wtPath); - // Dirty .gsd/STATE.md in the main repo (stash will conflict on pop) - writeFileSync(join(repo, ".gsd", "STATE.md"), "version: 2-main-dirty\n"); + // Dirty .sf/STATE.md in the main repo (stash will conflict on pop) + writeFileSync(join(repo, ".sf", "STATE.md"), "version: 2-main-dirty\n"); const roadmap = makeRoadmap("M300", "Stash pop conflict test", [ { id: "S01", title: "Feature" }, ]); - // mergeMilestoneToMain should succeed — .gsd/ conflict auto-resolved + // mergeMilestoneToMain should succeed — .sf/ conflict auto-resolved const result = mergeMilestoneToMain(repo, "M300", roadmap); assert.ok( result.commitMessage.includes("SF-Milestone: M300"), - "merge succeeds despite stash pop conflict on .gsd/ file", + "merge succeeds despite stash pop conflict on .sf/ file", ); assert.ok(existsSync(join(repo, "feature.ts")), "milestone code merged to main"); @@ -104,13 +104,13 @@ test("#2766: stash pop conflict on .gsd/ files is auto-resolved", () => { // Stash should be dropped (no remaining stash entries) let stashList = ""; try { stashList = run("git stash list", repo); } catch { /* empty stash */ } - assert.strictEqual(stashList, "", "stash is empty after .gsd/ conflict auto-resolution"); + assert.strictEqual(stashList, "", "stash is empty after .sf/ conflict auto-resolution"); } finally { try { rmSync(repo, { recursive: true, force: true, maxRetries: 3, retryDelay: 100 }); } catch { /* cleanup best-effort */ } } }); -test("#2766: stash pop conflict on non-.gsd files preserves stash for manual resolution", () => { +test("#2766: stash pop conflict on non-.sf files preserves stash for manual resolution", () => { const repo = createTempRepo(); try { const wtPath = createAutoWorktree(repo, "M301"); @@ -127,7 +127,7 @@ test("#2766: stash pop conflict on non-.gsd files preserves stash for manual res run(`git merge --no-ff "${sliceBranch}" -m "merge S01: readme"`, wtPath); // Dirty README.md in the main repo — this will conflict on stash pop - // and is NOT a .gsd/ file, so it should be left for manual resolution + // and is NOT a .sf/ file, so it should be left for manual resolution writeFileSync(join(repo, "README.md"), "# locally modified\n"); const roadmap = makeRoadmap("M301", "Non-sf stash conflict", [ @@ -138,7 +138,7 @@ test("#2766: stash pop conflict on non-.gsd files preserves stash for manual res const result = mergeMilestoneToMain(repo, "M301", roadmap); assert.ok( result.commitMessage.includes("SF-Milestone: M301"), - "merge succeeds even with non-.gsd stash pop conflict", + "merge succeeds even with non-.sf stash pop conflict", ); } finally { try { rmSync(repo, { recursive: true, force: true, maxRetries: 3, retryDelay: 100 }); } catch { /* cleanup best-effort */ } diff --git a/src/resources/extensions/sf/tests/stash-queued-context-files.test.ts b/src/resources/extensions/sf/tests/stash-queued-context-files.test.ts index c76dd792a..d39a81cf7 100644 --- a/src/resources/extensions/sf/tests/stash-queued-context-files.test.ts +++ b/src/resources/extensions/sf/tests/stash-queued-context-files.test.ts @@ -2,12 +2,12 @@ * stash-queued-context-files.test.ts — Regression test for #2505. * * When mergeMilestoneToMain runs `git stash push --include-untracked`, - * untracked `.gsd/milestones/M<queued>/` directories created by `/sf queue` + * untracked `.sf/milestones/M<queued>/` directories created by `/sf queue` * are swept into the stash. If stash pop fails (conflict on tracked files), * the queued milestone CONTEXT files are permanently lost. * * The fix: drop `--include-untracked` from the stash push, since the stash - * only needs to handle tracked dirty files. Untracked `.gsd/` files are + * only needs to handle tracked dirty files. Untracked `.sf/` files are * already handled separately by clearProjectRootStateFiles. */ @@ -63,13 +63,13 @@ function createTempRepo(): string { run("git config user.email test@test.com", dir); run("git config user.name Test", dir); writeFileSync(join(dir, "README.md"), "# test\n"); - mkdirSync(join(dir, ".gsd"), { recursive: true }); - writeFileSync(join(dir, ".gsd", "STATE.md"), "version: 1\n"); - // In projects with tracked .gsd/ files (hasGitTrackedGsdFiles=true), - // .gsd is NOT added to .gitignore. This means untracked files under - // .gsd/ are visible to --include-untracked and get swept into the + mkdirSync(join(dir, ".sf"), { recursive: true }); + writeFileSync(join(dir, ".sf", "STATE.md"), "version: 1\n"); + // In projects with tracked .sf/ files (hasGitTrackedGsdFiles=true), + // .sf is NOT added to .gitignore. This means untracked files under + // .sf/ are visible to --include-untracked and get swept into the // stash, destroying queued milestone CONTEXT files (#2505). - run("git add -f .gsd/STATE.md", dir); + run("git add -f .sf/STATE.md", dir); run("git add .", dir); run("git commit -m init", dir); run("git branch -M main", dir); @@ -99,14 +99,14 @@ test("#2505: git stash --include-untracked sweeps queued CONTEXT files (demonstr run("git config user.email test@test.com", dir); run("git config user.name Test", dir); writeFileSync(join(dir, "README.md"), "# test\n"); - mkdirSync(join(dir, ".gsd"), { recursive: true }); - writeFileSync(join(dir, ".gsd", "STATE.md"), "version: 1\n"); - run("git add -f .gsd/STATE.md", dir); + mkdirSync(join(dir, ".sf"), { recursive: true }); + writeFileSync(join(dir, ".sf", "STATE.md"), "version: 1\n"); + run("git add -f .sf/STATE.md", dir); run("git add .", dir); run("git commit -m init", dir); // Create queued milestone CONTEXT files (untracked, not gitignored) - const m013Dir = join(dir, ".gsd", "milestones", "M013"); + const m013Dir = join(dir, ".sf", "milestones", "M013"); mkdirSync(m013Dir, { recursive: true }); writeFileSync( join(m013Dir, "M013-CONTEXT.md"), @@ -118,7 +118,7 @@ test("#2505: git stash --include-untracked sweeps queued CONTEXT files (demonstr // Verify the CONTEXT file is untracked const status = run("git status --porcelain", dir); - assert.ok(status.includes("?? .gsd/milestones/"), "precondition: M013 dir is untracked"); + assert.ok(status.includes("?? .sf/milestones/"), "precondition: M013 dir is untracked"); // Stash WITH --include-untracked (the bug) run('git stash push --include-untracked -m "test stash"', dir); @@ -168,10 +168,10 @@ test("#2505: mergeMilestoneToMain preserves queued CONTEXT files (not swept into run(`git merge --no-ff "${sliceBranch}" -m "merge S01"`, wtPath); // Simulate `/sf queue` creating queued milestone CONTEXT files at the - // project root. These are untracked, and in repos with tracked .gsd/ + // project root. These are untracked, and in repos with tracked .sf/ // files they are NOT gitignored. - const m013Dir = join(repo, ".gsd", "milestones", "M013"); - const m014Dir = join(repo, ".gsd", "milestones", "M014"); + const m013Dir = join(repo, ".sf", "milestones", "M013"); + const m014Dir = join(repo, ".sf", "milestones", "M014"); mkdirSync(m013Dir, { recursive: true }); mkdirSync(m014Dir, { recursive: true }); writeFileSync( @@ -189,7 +189,7 @@ test("#2505: mergeMilestoneToMain preserves queued CONTEXT files (not swept into // Verify M013 is untracked (precondition) const statusBefore = run("git status --porcelain", repo); assert.ok( - statusBefore.includes("?? .gsd/milestones/"), + statusBefore.includes("?? .sf/milestones/"), "M013 directory is untracked before merge (precondition)", ); @@ -274,7 +274,7 @@ test("#2505: back-to-back merges preserve queued CONTEXT files", () => { run(`git merge --no-ff "${slice1}" -m "merge S01"`, wt1); // Create queued milestone CONTEXT file - const m013Dir = join(repo, ".gsd", "milestones", "M013"); + const m013Dir = join(repo, ".sf", "milestones", "M013"); mkdirSync(m013Dir, { recursive: true }); writeFileSync( join(m013Dir, "M013-CONTEXT.md"), diff --git a/src/resources/extensions/sf/tests/state-corruption-2945.test.ts b/src/resources/extensions/sf/tests/state-corruption-2945.test.ts index 6e9807426..5ab7efd58 100644 --- a/src/resources/extensions/sf/tests/state-corruption-2945.test.ts +++ b/src/resources/extensions/sf/tests/state-corruption-2945.test.ts @@ -46,7 +46,7 @@ function cleanupDb(dbPath: string): void { function createTempProject(): { basePath: string } { const basePath = mkdtempSync(join(tmpdir(), "sf-2945-project-")); - mkdirSync(join(basePath, ".gsd", "milestones", "M001"), { recursive: true }); + mkdirSync(join(basePath, ".sf", "milestones", "M001"), { recursive: true }); return { basePath }; } diff --git a/src/resources/extensions/sf/tests/state-derivation-parity.test.ts b/src/resources/extensions/sf/tests/state-derivation-parity.test.ts index d49e207b2..f022c8b6e 100644 --- a/src/resources/extensions/sf/tests/state-derivation-parity.test.ts +++ b/src/resources/extensions/sf/tests/state-derivation-parity.test.ts @@ -12,7 +12,7 @@ import { deriveState, isGhostMilestone, invalidateStateCache } from "../state.ts function createFixtureBase(): string { const base = mkdtempSync(join(tmpdir(), "sf-parity-test-")); - mkdirSync(join(base, ".gsd", "milestones"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones"), { recursive: true }); return base; } @@ -21,13 +21,13 @@ function cleanup(base: string): void { } function writeMilestoneFile(base: string, mid: string, suffix: string, content: string): void { - const dir = join(base, ".gsd", "milestones", mid); + const dir = join(base, ".sf", "milestones", mid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${mid}-${suffix}.md`), content); } function writeMilestoneValidation(base: string, mid: string, verdict: string = "pass"): void { - const dir = join(base, ".gsd", "milestones", mid); + const dir = join(base, ".sf", "milestones", mid); mkdirSync(dir, { recursive: true }); writeFileSync( join(dir, `${mid}-VALIDATION.md`), @@ -55,7 +55,7 @@ describe("state-derivation-parity", () => { test("ghost milestone with only META.json is correctly detected", () => { const base = createFixtureBase(); try { - const dir = join(base, ".gsd", "milestones", "M001"); + const dir = join(base, ".sf", "milestones", "M001"); mkdirSync(dir, { recursive: true }); // Write only META.json — no CONTEXT, CONTEXT-DRAFT, ROADMAP, or SUMMARY writeFileSync(join(dir, "META.json"), JSON.stringify({ id: "M001", createdAt: new Date().toISOString() })); @@ -106,7 +106,7 @@ describe("state-derivation-parity", () => { const base = createFixtureBase(); try { // Provide a milestone with a ROADMAP that has a single incomplete slice - const dir = join(base, ".gsd", "milestones", "M001"); + const dir = join(base, ".sf", "milestones", "M001"); mkdirSync(dir, { recursive: true }); writeFileSync( join(dir, "M001-ROADMAP.md"), @@ -155,7 +155,7 @@ describe("state-derivation-parity", () => { const base = createFixtureBase(); try { // M001: ghost — just an empty directory - mkdirSync(join(base, ".gsd", "milestones", "M001"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M001"), { recursive: true }); // M002: has CONTEXT-DRAFT — should become active writeMilestoneFile( @@ -187,7 +187,7 @@ describe("state-derivation-parity", () => { test("isGhostMilestone returns true for milestone directory with no files", () => { const base = createFixtureBase(); try { - mkdirSync(join(base, ".gsd", "milestones", "M001"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M001"), { recursive: true }); // No files at all in the directory assert.ok( isGhostMilestone(base, "M001"), @@ -231,8 +231,8 @@ describe("state-derivation-parity", () => { const base = createFixtureBase(); try { // M001 and M002: ghosts - mkdirSync(join(base, ".gsd", "milestones", "M001"), { recursive: true }); - mkdirSync(join(base, ".gsd", "milestones", "M002"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M001"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M002"), { recursive: true }); // M003: has CONTEXT-DRAFT — first real milestone writeMilestoneFile(base, "M003", "CONTEXT-DRAFT", "# M003 Draft\n\nFirst substantive milestone."); diff --git a/src/resources/extensions/sf/tests/state-machine-full-walkthrough.test.ts b/src/resources/extensions/sf/tests/state-machine-full-walkthrough.test.ts index f960fa1ee..65fe52cc2 100644 --- a/src/resources/extensions/sf/tests/state-machine-full-walkthrough.test.ts +++ b/src/resources/extensions/sf/tests/state-machine-full-walkthrough.test.ts @@ -34,7 +34,7 @@ const tempDirs: string[] = []; function createFixtureBase(): string { const base = mkdtempSync(join(tmpdir(), "sf-walkthrough-")); - mkdirSync(join(base, ".gsd", "milestones"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones"), { recursive: true }); tempDirs.push(base); return base; } @@ -49,25 +49,25 @@ afterEach(() => { }); function writeContext(base: string, mid: string, content: string): void { - const dir = join(base, ".gsd", "milestones", mid); + const dir = join(base, ".sf", "milestones", mid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${mid}-CONTEXT.md`), content); } function writeContextDraft(base: string, mid: string, content: string): void { - const dir = join(base, ".gsd", "milestones", mid); + const dir = join(base, ".sf", "milestones", mid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${mid}-CONTEXT-DRAFT.md`), content); } function writeRoadmap(base: string, mid: string, content: string): void { - const dir = join(base, ".gsd", "milestones", mid); + const dir = join(base, ".sf", "milestones", mid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${mid}-ROADMAP.md`), content); } function writePlan(base: string, mid: string, sid: string, content: string): void { - const dir = join(base, ".gsd", "milestones", mid, "slices", sid); + const dir = join(base, ".sf", "milestones", mid, "slices", sid); const tasksDir = join(dir, "tasks"); mkdirSync(tasksDir, { recursive: true }); writeFileSync(join(dir, `${sid}-PLAN.md`), content); @@ -80,7 +80,7 @@ function writePlan(base: string, mid: string, sid: string, content: string): voi } function writeTaskSummary(base: string, mid: string, sid: string, tid: string): void { - const tasksDir = join(base, ".gsd", "milestones", mid, "slices", sid, "tasks"); + const tasksDir = join(base, ".sf", "milestones", mid, "slices", sid, "tasks"); mkdirSync(tasksDir, { recursive: true }); writeFileSync(join(tasksDir, `${tid}-SUMMARY.md`), [ `# ${tid} Summary`, @@ -90,7 +90,7 @@ function writeTaskSummary(base: string, mid: string, sid: string, tid: string): } function writeTaskSummaryWithBlocker(base: string, mid: string, sid: string, tid: string): void { - const tasksDir = join(base, ".gsd", "milestones", mid, "slices", sid, "tasks"); + const tasksDir = join(base, ".sf", "milestones", mid, "slices", sid, "tasks"); mkdirSync(tasksDir, { recursive: true }); writeFileSync(join(tasksDir, `${tid}-SUMMARY.md`), [ "---", @@ -104,19 +104,19 @@ function writeTaskSummaryWithBlocker(base: string, mid: string, sid: string, tid } function writeSliceSummary(base: string, mid: string, sid: string): void { - const dir = join(base, ".gsd", "milestones", mid, "slices", sid); + const dir = join(base, ".sf", "milestones", mid, "slices", sid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${sid}-SUMMARY.md`), `# ${sid} Summary\n\nSlice done.\n`); } function writeMilestoneSummary(base: string, mid: string): void { - const dir = join(base, ".gsd", "milestones", mid); + const dir = join(base, ".sf", "milestones", mid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${mid}-SUMMARY.md`), `# ${mid} Summary\n\nMilestone complete.\n`); } function writeMilestoneValidation(base: string, mid: string, verdict: string = "pass"): void { - const dir = join(base, ".gsd", "milestones", mid); + const dir = join(base, ".sf", "milestones", mid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${mid}-VALIDATION.md`), [ "---", @@ -130,19 +130,19 @@ function writeMilestoneValidation(base: string, mid: string, verdict: string = " } function writeReplanTrigger(base: string, mid: string, sid: string): void { - const dir = join(base, ".gsd", "milestones", mid, "slices", sid); + const dir = join(base, ".sf", "milestones", mid, "slices", sid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${sid}-REPLAN-TRIGGER.md`), "Triage replan triggered.\n"); } function writeReplan(base: string, mid: string, sid: string): void { - const dir = join(base, ".gsd", "milestones", mid, "slices", sid); + const dir = join(base, ".sf", "milestones", mid, "slices", sid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${sid}-REPLAN.md`), "# Replan\n\nReplan completed.\n"); } function writeContinue(base: string, mid: string, sid: string): void { - const dir = join(base, ".gsd", "milestones", mid, "slices", sid); + const dir = join(base, ".sf", "milestones", mid, "slices", sid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${sid}-CONTINUE.md`), [ "---", @@ -367,7 +367,7 @@ describe("state-machine-full-walkthrough", () => { const base = createFixtureBase(); writeRoadmap(base, "M001", standardRoadmap()); // Plan file with no task entries - const dir = join(base, ".gsd", "milestones", "M001", "slices", "S01"); + const dir = join(base, ".sf", "milestones", "M001", "slices", "S01"); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, "S01-PLAN.md"), [ "# S01: First Slice", @@ -389,7 +389,7 @@ describe("state-machine-full-walkthrough", () => { const base = createFixtureBase(); writeRoadmap(base, "M001", standardRoadmap()); // Write plan file WITH tasks but WITHOUT stub T##-PLAN.md files - const dir = join(base, ".gsd", "milestones", "M001", "slices", "S01"); + const dir = join(base, ".sf", "milestones", "M001", "slices", "S01"); mkdirSync(join(dir, "tasks"), { recursive: true }); writeFileSync(join(dir, "S01-PLAN.md"), standardPlan()); // Intentionally do NOT create T01-PLAN.md or T02-PLAN.md @@ -419,7 +419,7 @@ describe("state-machine-full-walkthrough", () => { describe("Phase 6: evaluating-gates", () => { test("DB path: pending quality gates → evaluating-gates", async () => { const base = createFixtureBase(); - const dbPath = join(base, ".gsd", "sf.db"); + const dbPath = join(base, ".sf", "sf.db"); openDatabase(dbPath); // Set up milestone + slice + task in DB @@ -445,7 +445,7 @@ describe("state-machine-full-walkthrough", () => { test("DB path: no pending gates → NOT evaluating-gates", async () => { const base = createFixtureBase(); - const dbPath = join(base, ".gsd", "sf.db"); + const dbPath = join(base, ".sf", "sf.db"); openDatabase(dbPath); insertMilestone({ id: "M001", title: "M001: Test", status: "active" }); @@ -566,7 +566,7 @@ describe("state-machine-full-walkthrough", () => { "### T02: Second Task", "Second task.", ].join("\n"); - const dir = join(base, ".gsd", "milestones", "M001", "slices", "S01"); + const dir = join(base, ".sf", "milestones", "M001", "slices", "S01"); const tasksDir = join(dir, "tasks"); mkdirSync(tasksDir, { recursive: true }); writeFileSync(join(dir, "S01-PLAN.md"), planContent); @@ -620,7 +620,7 @@ describe("state-machine-full-walkthrough", () => { const base = createFixtureBase(); writeRoadmap(base, "M001", doneSliceRoadmap()); // Write a validation file with no parseable verdict - const dir = join(base, ".gsd", "milestones", "M001"); + const dir = join(base, ".sf", "milestones", "M001"); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, "M001-VALIDATION.md"), "Just some text with no frontmatter verdict."); invalidateStateCache(); @@ -840,7 +840,7 @@ describe("state-machine-full-walkthrough", () => { describe("Reconciliation", () => { test("DB: task with SUMMARY on disk but DB says pending → reconciliation fixes status (#2514)", async () => { const base = createFixtureBase(); - const dbPath = join(base, ".gsd", "sf.db"); + const dbPath = join(base, ".sf", "sf.db"); openDatabase(dbPath); insertMilestone({ id: "M001", title: "M001: Test", status: "active" }); @@ -885,7 +885,7 @@ describe("state-machine-full-walkthrough", () => { test("ghost milestone (empty dir) → NOT in registry", async () => { const base = createFixtureBase(); // Create empty milestone dir (ghost — no CONTEXT, ROADMAP, SUMMARY) - mkdirSync(join(base, ".gsd", "milestones", "M001"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M001"), { recursive: true }); // Create a real milestone too writeContext(base, "M002", "# M002: Real\n\nContext."); invalidateStateCache(); @@ -902,7 +902,7 @@ describe("state-machine-full-walkthrough", () => { test("ghost milestone detection helper", () => { const base = createFixtureBase(); // Ghost: empty dir - mkdirSync(join(base, ".gsd", "milestones", "M001"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M001"), { recursive: true }); clearPathCache(); assert.equal(isGhostMilestone(base, "M001"), true, "empty dir is ghost"); @@ -920,7 +920,7 @@ describe("state-machine-full-walkthrough", () => { describe("Cross-validation: DB vs filesystem", () => { test("executing scenario produces same phase on both paths", async () => { const base = createFixtureBase(); - const dbPath = join(base, ".gsd", "sf.db"); + const dbPath = join(base, ".sf", "sf.db"); openDatabase(dbPath); insertMilestone({ id: "M001", title: "M001: Test", status: "active" }); @@ -946,7 +946,7 @@ describe("state-machine-full-walkthrough", () => { test("summarizing scenario produces same phase on both paths", async () => { const base = createFixtureBase(); - const dbPath = join(base, ".gsd", "sf.db"); + const dbPath = join(base, ".sf", "sf.db"); openDatabase(dbPath); insertMilestone({ id: "M001", title: "M001: Test", status: "active" }); @@ -1041,7 +1041,7 @@ describe("state-machine-full-walkthrough", () => { describe("Recovery: DB has slice but no task rows (partial migration)", () => { test("DB tasks empty but PLAN on disk has tasks → reconciles to executing", async () => { const base = createFixtureBase(); - const dbPath = join(base, ".gsd", "sf.db"); + const dbPath = join(base, ".sf", "sf.db"); openDatabase(dbPath); insertMilestone({ id: "M001", title: "M001: Test", status: "active" }); @@ -1064,7 +1064,7 @@ describe("state-machine-full-walkthrough", () => { describe("Failure: partial SUMMARY reconciliation", () => { test("only one task has SUMMARY, other still pending → executing next task", async () => { const base = createFixtureBase(); - const dbPath = join(base, ".gsd", "sf.db"); + const dbPath = join(base, ".sf", "sf.db"); openDatabase(dbPath); insertMilestone({ id: "M001", title: "M001: Test", status: "active" }); @@ -1092,7 +1092,7 @@ describe("state-machine-full-walkthrough", () => { writeRoadmap(base, "M001", standardRoadmap()); writePlan(base, "M001", "S01", standardPlan()); // Write 0-byte SUMMARY — existsSync returns true for empty files - const tasksDir = join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks"); + const tasksDir = join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks"); mkdirSync(tasksDir, { recursive: true }); writeFileSync(join(tasksDir, "T01-SUMMARY.md"), ""); @@ -1111,7 +1111,7 @@ describe("state-machine-full-walkthrough", () => { test("0-byte VALIDATION file → stays in validating-milestone", async () => { const base = createFixtureBase(); writeRoadmap(base, "M001", doneSliceRoadmap()); - const dir = join(base, ".gsd", "milestones", "M001"); + const dir = join(base, ".sf", "milestones", "M001"); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, "M001-VALIDATION.md"), ""); @@ -1125,7 +1125,7 @@ describe("state-machine-full-walkthrough", () => { test("0-byte PLAN file → planning phase", async () => { const base = createFixtureBase(); writeRoadmap(base, "M001", standardRoadmap()); - const dir = join(base, ".gsd", "milestones", "M001", "slices", "S01"); + const dir = join(base, ".sf", "milestones", "M001", "slices", "S01"); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, "S01-PLAN.md"), ""); @@ -1139,7 +1139,7 @@ describe("state-machine-full-walkthrough", () => { describe("Failure: DB/filesystem divergence", () => { test("DB says slice complete, no milestone VALIDATION → validating-milestone", async () => { const base = createFixtureBase(); - const dbPath = join(base, ".gsd", "sf.db"); + const dbPath = join(base, ".sf", "sf.db"); openDatabase(dbPath); insertMilestone({ id: "M001", title: "M001: Test", status: "active" }); @@ -1156,7 +1156,7 @@ describe("state-machine-full-walkthrough", () => { test("DB says task complete but SUMMARY missing → no crash, advances to next", async () => { const base = createFixtureBase(); - const dbPath = join(base, ".gsd", "sf.db"); + const dbPath = join(base, ".sf", "sf.db"); openDatabase(dbPath); insertMilestone({ id: "M001", title: "M001: Test", status: "active" }); @@ -1176,7 +1176,7 @@ describe("state-machine-full-walkthrough", () => { test("milestone in DB but directory missing from disk → no crash", async () => { const base = createFixtureBase(); - const dbPath = join(base, ".gsd", "sf.db"); + const dbPath = join(base, ".sf", "sf.db"); openDatabase(dbPath); insertMilestone({ id: "M001", title: "M001: Test", status: "active" }); @@ -1192,7 +1192,7 @@ describe("state-machine-full-walkthrough", () => { test("VALIDATION with broken frontmatter → stays in validating", async () => { const base = createFixtureBase(); writeRoadmap(base, "M001", doneSliceRoadmap()); - const dir = join(base, ".gsd", "milestones", "M001"); + const dir = join(base, ".sf", "milestones", "M001"); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, "M001-VALIDATION.md"), [ "---", @@ -1233,7 +1233,7 @@ describe("state-machine-full-walkthrough", () => { describe("Failure: missing task plan files in DB path", () => { test("DB has tasks but no T##-PLAN.md files → planning phase", async () => { const base = createFixtureBase(); - const dbPath = join(base, ".gsd", "sf.db"); + const dbPath = join(base, ".sf", "sf.db"); openDatabase(dbPath); insertMilestone({ id: "M001", title: "M001: Test", status: "active" }); @@ -1241,7 +1241,7 @@ describe("state-machine-full-walkthrough", () => { insertTask({ id: "T01", sliceId: "S01", milestoneId: "M001", title: "T01: Task", status: "pending" }); writeRoadmap(base, "M001", standardRoadmap()); - const dir = join(base, ".gsd", "milestones", "M001", "slices", "S01"); + const dir = join(base, ".sf", "milestones", "M001", "slices", "S01"); mkdirSync(join(dir, "tasks"), { recursive: true }); writeFileSync(join(dir, "S01-PLAN.md"), standardPlan()); // NO T01-PLAN.md @@ -1316,7 +1316,7 @@ describe("state-machine-full-walkthrough", () => { describe("Failure at needs-discussion: CONTEXT-DRAFT is empty", () => { test("0-byte CONTEXT-DRAFT → should still trigger needs-discussion", async () => { const base = createFixtureBase(); - const dir = join(base, ".gsd", "milestones", "M001"); + const dir = join(base, ".sf", "milestones", "M001"); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, "M001-CONTEXT-DRAFT.md"), ""); invalidateStateCache(); @@ -1368,7 +1368,7 @@ describe("state-machine-full-walkthrough", () => { test("PLAN exists but tasks section is garbage → zero tasks → planning", async () => { const base = createFixtureBase(); writeRoadmap(base, "M001", standardRoadmap()); - const dir = join(base, ".gsd", "milestones", "M001", "slices", "S01"); + const dir = join(base, ".sf", "milestones", "M001", "slices", "S01"); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, "S01-PLAN.md"), [ "# S01: Slice", @@ -1391,7 +1391,7 @@ describe("state-machine-full-walkthrough", () => { test("T01-PLAN.md exists but is 0-byte → still enters executing", async () => { const base = createFixtureBase(); writeRoadmap(base, "M001", standardRoadmap()); - const dir = join(base, ".gsd", "milestones", "M001", "slices", "S01"); + const dir = join(base, ".sf", "milestones", "M001", "slices", "S01"); const tasksDir = join(dir, "tasks"); mkdirSync(tasksDir, { recursive: true }); writeFileSync(join(dir, "S01-PLAN.md"), standardPlan()); @@ -1412,7 +1412,7 @@ describe("state-machine-full-walkthrough", () => { describe("Failure at executing: DB has task but wrong status string", () => { test("task with unexpected status string → not treated as closed", async () => { const base = createFixtureBase(); - const dbPath = join(base, ".gsd", "sf.db"); + const dbPath = join(base, ".sf", "sf.db"); openDatabase(dbPath); insertMilestone({ id: "M001", title: "M001: Test", status: "active" }); @@ -1569,7 +1569,7 @@ describe("state-machine-full-walkthrough", () => { describe("Failure: multiple reconciliation in single derivation", () => { test("DB has 3 stale tasks, all with SUMMARY on disk → all reconciled in one pass", async () => { const base = createFixtureBase(); - const dbPath = join(base, ".gsd", "sf.db"); + const dbPath = join(base, ".sf", "sf.db"); openDatabase(dbPath); insertMilestone({ id: "M001", title: "M001: Test", status: "active" }); diff --git a/src/resources/extensions/sf/tests/steer-worktree-path.test.ts b/src/resources/extensions/sf/tests/steer-worktree-path.test.ts index 0718ace90..910b7d310 100644 --- a/src/resources/extensions/sf/tests/steer-worktree-path.test.ts +++ b/src/resources/extensions/sf/tests/steer-worktree-path.test.ts @@ -1,6 +1,6 @@ // SF Extension - Steer Worktree Path Resolution Test -// Regression test for #3476: /sf steer must write overrides to the worktree .gsd/, -// not the project root .gsd/, when a worktree is active. +// Regression test for #3476: /sf steer must write overrides to the worktree .sf/, +// not the project root .sf/, when a worktree is active. import { describe, test, beforeEach, afterEach } from "node:test"; import assert from "node:assert/strict"; @@ -16,33 +16,33 @@ describe("steer worktree path resolution (#3476)", () => { beforeEach(() => { projectRoot = mkdtempSync(join(tmpdir(), "sf-steer-wt-")); - mkdirSync(join(projectRoot, ".gsd"), { recursive: true }); + mkdirSync(join(projectRoot, ".sf"), { recursive: true }); - // Simulate a worktree with its own .gsd directory - worktreePath = join(projectRoot, ".gsd", "worktrees", "M001"); - mkdirSync(join(worktreePath, ".gsd"), { recursive: true }); + // Simulate a worktree with its own .sf directory + worktreePath = join(projectRoot, ".sf", "worktrees", "M001"); + mkdirSync(join(worktreePath, ".sf"), { recursive: true }); }); afterEach(() => { rmSync(projectRoot, { recursive: true, force: true }); }); - test("appendOverride writes to worktree .gsd/ when worktree path is used", async () => { + test("appendOverride writes to worktree .sf/ when worktree path is used", async () => { await appendOverride(worktreePath, "Use Postgres instead of SQLite", "M001/S01/T01"); - // Override should be in the worktree .gsd/ - const wtOverrides = join(worktreePath, ".gsd", "OVERRIDES.md"); - assert.ok(existsSync(wtOverrides), "override file exists in worktree .gsd/"); + // Override should be in the worktree .sf/ + const wtOverrides = join(worktreePath, ".sf", "OVERRIDES.md"); + assert.ok(existsSync(wtOverrides), "override file exists in worktree .sf/"); const content = readFileSync(wtOverrides, "utf-8"); assert.ok(content.includes("Use Postgres instead of SQLite"), "override content is correct"); - // Override should NOT be in the project root .gsd/ - const rootOverrides = join(projectRoot, ".gsd", "OVERRIDES.md"); - assert.ok(!existsSync(rootOverrides), "no override file in project root .gsd/"); + // Override should NOT be in the project root .sf/ + const rootOverrides = join(projectRoot, ".sf", "OVERRIDES.md"); + assert.ok(!existsSync(rootOverrides), "no override file in project root .sf/"); }); - test("loadActiveOverrides reads from worktree .gsd/ when worktree path is used", async () => { + test("loadActiveOverrides reads from worktree .sf/ when worktree path is used", async () => { await appendOverride(worktreePath, "Switch to JWT auth", "M001/S02/T01"); // Loading from worktree should find the override @@ -58,8 +58,8 @@ describe("steer worktree path resolution (#3476)", () => { test("appendOverride falls back to project root when no worktree exists", async () => { await appendOverride(projectRoot, "Use Redis cache", "M001/S01/T01"); - const rootOverrides = join(projectRoot, ".gsd", "OVERRIDES.md"); - assert.ok(existsSync(rootOverrides), "override file exists in project root .gsd/"); + const rootOverrides = join(projectRoot, ".sf", "OVERRIDES.md"); + assert.ok(existsSync(rootOverrides), "override file exists in project root .sf/"); const content = readFileSync(rootOverrides, "utf-8"); assert.ok(content.includes("Use Redis cache"), "override content is correct"); @@ -83,8 +83,8 @@ describe("steer worktree path resolution (#3476)", () => { await appendOverride(targetPath, "Should go to project root", "M001/S01/T01"); - const rootOverrides = join(projectRoot, ".gsd", "OVERRIDES.md"); - const wtOverrides = join(worktreePath, ".gsd", "OVERRIDES.md"); + const rootOverrides = join(projectRoot, ".sf", "OVERRIDES.md"); + const wtOverrides = join(worktreePath, ".sf", "OVERRIDES.md"); assert.ok(existsSync(rootOverrides), "override written to project root"); assert.ok(!existsSync(wtOverrides), "override NOT written to inactive worktree"); @@ -102,7 +102,7 @@ describe("steer worktree path resolution (#3476)", () => { // Without a valid .git file, falls back to project root await appendOverride(targetPath, "Falls back without .git", "M001/S01/T01"); - const rootOverrides = join(projectRoot, ".gsd", "OVERRIDES.md"); + const rootOverrides = join(projectRoot, ".sf", "OVERRIDES.md"); assert.ok(existsSync(rootOverrides), "override written to project root (no valid .git in worktree)"); }); }); diff --git a/src/resources/extensions/sf/tests/stop-auto-remote.test.ts b/src/resources/extensions/sf/tests/stop-auto-remote.test.ts index f01321b0f..383b1033d 100644 --- a/src/resources/extensions/sf/tests/stop-auto-remote.test.ts +++ b/src/resources/extensions/sf/tests/stop-auto-remote.test.ts @@ -17,7 +17,7 @@ import { stopAutoRemote } from "../auto.ts"; function makeTmpBase(): string { const base = join(tmpdir(), `sf-test-${randomUUID()}`); - mkdirSync(join(base, ".gsd"), { recursive: true }); + mkdirSync(join(base, ".sf"), { recursive: true }); return base; } @@ -68,7 +68,7 @@ test("stopAutoRemote cleans up stale lock (dead PID) and returns found:false", ( // Overwrite PID to a dead one const lock = readCrashLock(base)!; const staleData = { ...lock, pid: 999999999 }; - writeFileSync(join(base, ".gsd", "auto.lock"), JSON.stringify(staleData, null, 2), "utf-8"); + writeFileSync(join(base, ".sf", "auto.lock"), JSON.stringify(staleData, null, 2), "utf-8"); const result = stopAutoRemote(base); assert.equal(result.found, false, "stale lock should not be found as running"); @@ -112,7 +112,7 @@ test("stopAutoRemote sends SIGTERM to a live process and returns found:true", { unitId: "M001/S01/T01", unitStartedAt: new Date().toISOString(), }; - writeFileSync(join(base, ".gsd", "auto.lock"), JSON.stringify(lockData, null, 2), "utf-8"); + writeFileSync(join(base, ".sf", "auto.lock"), JSON.stringify(lockData, null, 2), "utf-8"); const exitPromise = waitForChildExit(child); const result = stopAutoRemote(base); @@ -137,8 +137,8 @@ test("stopAutoRemote sends SIGTERM to a live process and returns found:true", { test("lock file should be discoverable at project root, not worktree path", () => { const projectRoot = makeTmpBase(); - const worktreePath = join(projectRoot, ".gsd", "worktrees", "M001"); - mkdirSync(join(worktreePath, ".gsd"), { recursive: true }); + const worktreePath = join(projectRoot, ".sf", "worktrees", "M001"); + mkdirSync(join(worktreePath, ".sf"), { recursive: true }); try { // Simulate: auto-mode writes lock to project root (the fix) diff --git a/src/resources/extensions/sf/tests/stop-backtrack.test.ts b/src/resources/extensions/sf/tests/stop-backtrack.test.ts index 8773ed236..ec2225904 100644 --- a/src/resources/extensions/sf/tests/stop-backtrack.test.ts +++ b/src/resources/extensions/sf/tests/stop-backtrack.test.ts @@ -40,7 +40,7 @@ function makeTempDir(prefix: string): string { } function setupGsdDir(tmp: string): void { - mkdirSync(join(tmp, ".gsd"), { recursive: true }); + mkdirSync(join(tmp, ".sf"), { recursive: true }); } // ─── Classification Types ───────────────────────────────────────────────────── @@ -108,7 +108,7 @@ test("revertExecutorResolvedCaptures reverts captures resolved without classific const id = appendCapture(tmp, "stop everything"); // Simulate an executor writing Status: resolved directly (no classification) - const capPath = join(tmp, ".gsd", "CAPTURES.md"); + const capPath = join(tmp, ".sf", "CAPTURES.md"); let content = readFileSync(capPath, "utf-8"); content = content.replace("**Status:** pending", "**Status:** resolved"); writeFileSync(capPath, content, "utf-8"); @@ -144,7 +144,7 @@ test("executeBacktrack writes trigger and regression markers", () => { setupGsdDir(tmp); // Create target milestone directory - mkdirSync(join(tmp, ".gsd", "milestones", "M003"), { recursive: true }); + mkdirSync(join(tmp, ".sf", "milestones", "M003"), { recursive: true }); const targetMid = executeBacktrack(tmp, "M005", { id: "CAP-test123", @@ -159,14 +159,14 @@ test("executeBacktrack writes trigger and regression markers", () => { assert.equal(targetMid, "M003"); // Check trigger file exists - const triggerPath = join(tmp, ".gsd", "BACKTRACK-TRIGGER.md"); + const triggerPath = join(tmp, ".sf", "BACKTRACK-TRIGGER.md"); assert.ok(existsSync(triggerPath)); const triggerContent = readFileSync(triggerPath, "utf-8"); assert.ok(triggerContent.includes("M005")); assert.ok(triggerContent.includes("M003")); // Check regression marker exists on target milestone - const regressionPath = join(tmp, ".gsd", "milestones", "M003", "M003-REGRESSION.md"); + const regressionPath = join(tmp, ".sf", "milestones", "M003", "M003-REGRESSION.md"); assert.ok(existsSync(regressionPath)); const regressionContent = readFileSync(regressionPath, "utf-8"); assert.ok(regressionContent.includes("M005")); @@ -178,7 +178,7 @@ test("executeBacktrack writes trigger and regression markers", () => { test("readBacktrackTrigger parses trigger file", () => { const tmp = makeTempDir("read-bt"); setupGsdDir(tmp); - mkdirSync(join(tmp, ".gsd", "milestones", "M003"), { recursive: true }); + mkdirSync(join(tmp, ".sf", "milestones", "M003"), { recursive: true }); executeBacktrack(tmp, "M005", { id: "CAP-abc", diff --git a/src/resources/extensions/sf/tests/stuck-detection-coverage.test.ts b/src/resources/extensions/sf/tests/stuck-detection-coverage.test.ts index 2316ee41b..bcb947718 100644 --- a/src/resources/extensions/sf/tests/stuck-detection-coverage.test.ts +++ b/src/resources/extensions/sf/tests/stuck-detection-coverage.test.ts @@ -127,9 +127,9 @@ test("Rule 3: A-A-A-A triggers Rule 2 not Rule 3", () => { test("Rule 4: same ENOENT path in two entries triggers stuck", () => { const result = detectStuck([ - { key: "A", error: "ENOENT: no such file or directory, access '/home/user/.gsd/agent/skills/debug-like-expert/SKILL.md'" }, + { key: "A", error: "ENOENT: no such file or directory, access '/home/user/.sf/agent/skills/debug-like-expert/SKILL.md'" }, { key: "B" }, - { key: "A", error: "ENOENT: no such file or directory, access '/home/user/.gsd/agent/skills/debug-like-expert/SKILL.md'" }, + { key: "A", error: "ENOENT: no such file or directory, access '/home/user/.sf/agent/skills/debug-like-expert/SKILL.md'" }, ]); assert.notEqual(result, null); assert.equal(result!.stuck, true); diff --git a/src/resources/extensions/sf/tests/subagent-agent-discovery.test.ts b/src/resources/extensions/sf/tests/subagent-agent-discovery.test.ts index 9504ca783..8f4c8c555 100644 --- a/src/resources/extensions/sf/tests/subagent-agent-discovery.test.ts +++ b/src/resources/extensions/sf/tests/subagent-agent-discovery.test.ts @@ -12,7 +12,7 @@ function makeProjectRoot(t: test.TestContext): string { return root; } -function writeAgent(root: string, configDirName: ".gsd" | ".pi", name = "ping"): string { +function writeAgent(root: string, configDirName: ".sf" | ".pi", name = "ping"): string { const agentsDir = join(root, configDirName, "agents"); mkdirSync(agentsDir, { recursive: true }); writeFileSync( @@ -22,9 +22,9 @@ function writeAgent(root: string, configDirName: ".gsd" | ".pi", name = "ping"): return agentsDir; } -test("discoverAgents finds project agents in .gsd/agents", (t) => { +test("discoverAgents finds project agents in .sf/agents", (t) => { const root = makeProjectRoot(t); - const agentsDir = writeAgent(root, ".gsd"); + const agentsDir = writeAgent(root, ".sf"); const discovery = discoverAgents(root, "project"); @@ -45,7 +45,7 @@ test("discoverAgents falls back to legacy .pi/agents when needed", (t) => { test("discoverAgents accepts tools frontmatter as a YAML list", (t) => { const root = makeProjectRoot(t); - const agentsDir = join(root, ".gsd", "agents"); + const agentsDir = join(root, ".sf", "agents"); mkdirSync(agentsDir, { recursive: true }); writeFileSync( join(agentsDir, "reviewer.md"), @@ -70,7 +70,7 @@ test("discoverAgents accepts tools frontmatter as a YAML list", (t) => { test("discoverAgents still accepts comma-separated tools frontmatter", (t) => { const root = makeProjectRoot(t); - const agentsDir = join(root, ".gsd", "agents"); + const agentsDir = join(root, ".sf", "agents"); mkdirSync(agentsDir, { recursive: true }); writeFileSync( join(agentsDir, "reviewer.md"), diff --git a/src/resources/extensions/sf/tests/subagent-model-dispatch.test.ts b/src/resources/extensions/sf/tests/subagent-model-dispatch.test.ts index fff08ad5b..068409f51 100644 --- a/src/resources/extensions/sf/tests/subagent-model-dispatch.test.ts +++ b/src/resources/extensions/sf/tests/subagent-model-dispatch.test.ts @@ -164,7 +164,7 @@ test("buildReactiveExecutePrompt: output contains model string when subagentMode const repo = mkdtempSync(join(tmpdir(), "sf-subagent-model-reactive-")); t.after(() => rmSync(repo, { recursive: true, force: true })); - const sf = join(repo, ".gsd", "milestones", "M001", "slices", "S01"); + const sf = join(repo, ".sf", "milestones", "M001", "slices", "S01"); mkdirSync(join(sf, "tasks"), { recursive: true }); writeFileSync( @@ -217,7 +217,7 @@ test("buildReactiveExecutePrompt: no model instruction when subagentModel omitte const repo = mkdtempSync(join(tmpdir(), "sf-subagent-model-none-")); t.after(() => rmSync(repo, { recursive: true, force: true })); - const sf = join(repo, ".gsd", "milestones", "M001", "slices", "S01"); + const sf = join(repo, ".sf", "milestones", "M001", "slices", "S01"); mkdirSync(join(sf, "tasks"), { recursive: true }); writeFileSync( diff --git a/src/resources/extensions/sf/tests/symlink-extension-discovery.test.ts b/src/resources/extensions/sf/tests/symlink-extension-discovery.test.ts index 6386d90ba..05a1ec35d 100644 --- a/src/resources/extensions/sf/tests/symlink-extension-discovery.test.ts +++ b/src/resources/extensions/sf/tests/symlink-extension-discovery.test.ts @@ -1,7 +1,7 @@ // Regression test for: discoverManifests() skips symlinked extension directories // // The bug: Dirent.isDirectory() returns false for symlinks, so extensions installed -// as directory symlinks under ~/.gsd/agent/extensions/ were invisible to all +// as directory symlinks under ~/.sf/agent/extensions/ were invisible to all // management commands (list, enable, disable, info). // // The fix: check `entry.isDirectory() || entry.isSymbolicLink()`, matching the diff --git a/src/resources/extensions/sf/tests/symlink-numbered-variants.test.ts b/src/resources/extensions/sf/tests/symlink-numbered-variants.test.ts index 199d92649..43ffcb0f0 100644 --- a/src/resources/extensions/sf/tests/symlink-numbered-variants.test.ts +++ b/src/resources/extensions/sf/tests/symlink-numbered-variants.test.ts @@ -1,9 +1,9 @@ /** * Tests for macOS numbered symlink variant cleanup (#2205). * - * macOS can rename `.gsd` to `.gsd 2`, `.gsd 3`, etc. when a directory + * macOS can rename `.sf` to `.sf 2`, `.sf 3`, etc. when a directory * already exists at the target path. ensureGsdSymlink() must detect and - * remove these numbered variants so the real `.gsd` symlink is always + * remove these numbered variants so the real `.sf` symlink is always * the one in use. */ @@ -50,90 +50,90 @@ describe('symlink-numbered-variants', async () => { const externalPath = externalGsdRoot(base); // ── Test: numbered variant directories are cleaned up ────────────── - console.log("\n=== ensureGsdSymlink removes numbered .gsd variants (#2205) ==="); + console.log("\n=== ensureGsdSymlink removes numbered .sf variants (#2205) ==="); { - // Simulate macOS creating numbered variants: ".gsd 2", ".gsd 3" - mkdirSync(join(base, ".gsd 2"), { recursive: true }); - mkdirSync(join(base, ".gsd 3"), { recursive: true }); - mkdirSync(join(base, ".gsd 4"), { recursive: true }); + // Simulate macOS creating numbered variants: ".sf 2", ".sf 3" + mkdirSync(join(base, ".sf 2"), { recursive: true }); + mkdirSync(join(base, ".sf 3"), { recursive: true }); + mkdirSync(join(base, ".sf 4"), { recursive: true }); const result = ensureGsdSymlink(base); assert.deepStrictEqual(result, externalPath, "ensureGsdSymlink returns external path"); - assert.ok(existsSync(join(base, ".gsd")), ".gsd exists after ensureGsdSymlink"); - assert.ok(lstatSync(join(base, ".gsd")).isSymbolicLink(), ".gsd is a symlink"); + assert.ok(existsSync(join(base, ".sf")), ".sf exists after ensureGsdSymlink"); + assert.ok(lstatSync(join(base, ".sf")).isSymbolicLink(), ".sf is a symlink"); // The numbered variants must have been removed - assert.ok(!existsSync(join(base, ".gsd 2")), '".gsd 2" directory was cleaned up'); - assert.ok(!existsSync(join(base, ".gsd 3")), '".gsd 3" directory was cleaned up'); - assert.ok(!existsSync(join(base, ".gsd 4")), '".gsd 4" directory was cleaned up'); + assert.ok(!existsSync(join(base, ".sf 2")), '".sf 2" directory was cleaned up'); + assert.ok(!existsSync(join(base, ".sf 3")), '".sf 3" directory was cleaned up'); + assert.ok(!existsSync(join(base, ".sf 4")), '".sf 4" directory was cleaned up'); } // ── Test: numbered variant symlinks are cleaned up ───────────────── console.log("\n=== ensureGsdSymlink removes numbered symlink variants ==="); { // Clean slate - rmSync(join(base, ".gsd"), { recursive: true, force: true }); + rmSync(join(base, ".sf"), { recursive: true, force: true }); - // Simulate: ".gsd 2" is a symlink to the correct target (the real .gsd) - // and ".gsd" doesn't exist — this is the actual macOS scenario + // Simulate: ".sf 2" is a symlink to the correct target (the real .sf) + // and ".sf" doesn't exist — this is the actual macOS scenario const staleTarget = join(stateDir, "projects", "stale-target"); mkdirSync(staleTarget, { recursive: true }); - symlinkSync(externalPath, join(base, ".gsd 2"), "junction"); - symlinkSync(staleTarget, join(base, ".gsd 3"), "junction"); + symlinkSync(externalPath, join(base, ".sf 2"), "junction"); + symlinkSync(staleTarget, join(base, ".sf 3"), "junction"); const result = ensureGsdSymlink(base); assert.deepStrictEqual(result, externalPath, "ensureGsdSymlink returns external path when variants exist"); - assert.ok(existsSync(join(base, ".gsd")), ".gsd exists"); - assert.ok(lstatSync(join(base, ".gsd")).isSymbolicLink(), ".gsd is a symlink"); + assert.ok(existsSync(join(base, ".sf")), ".sf exists"); + assert.ok(lstatSync(join(base, ".sf")).isSymbolicLink(), ".sf is a symlink"); - assert.ok(!existsSync(join(base, ".gsd 2")), '".gsd 2" symlink variant was cleaned up'); - assert.ok(!existsSync(join(base, ".gsd 3")), '".gsd 3" symlink variant was cleaned up'); + assert.ok(!existsSync(join(base, ".sf 2")), '".sf 2" symlink variant was cleaned up'); + assert.ok(!existsSync(join(base, ".sf 3")), '".sf 3" symlink variant was cleaned up'); } - // ── Test: real .gsd directory blocks symlink, but variants still cleaned ── - console.log("\n=== ensureGsdSymlink cleans variants even when .gsd is a real directory ==="); + // ── Test: real .sf directory blocks symlink, but variants still cleaned ── + console.log("\n=== ensureGsdSymlink cleans variants even when .sf is a real directory ==="); { // Clean slate - rmSync(join(base, ".gsd"), { recursive: true, force: true }); + rmSync(join(base, ".sf"), { recursive: true, force: true }); - // .gsd is a real directory (git-tracked) and numbered variants exist - mkdirSync(join(base, ".gsd", "milestones"), { recursive: true }); - writeFileSync(join(base, ".gsd", "milestones", "M001.md"), "# M001\n", "utf-8"); - mkdirSync(join(base, ".gsd 2"), { recursive: true }); - mkdirSync(join(base, ".gsd 3"), { recursive: true }); + // .sf is a real directory (git-tracked) and numbered variants exist + mkdirSync(join(base, ".sf", "milestones"), { recursive: true }); + writeFileSync(join(base, ".sf", "milestones", "M001.md"), "# M001\n", "utf-8"); + mkdirSync(join(base, ".sf 2"), { recursive: true }); + mkdirSync(join(base, ".sf 3"), { recursive: true }); const result = ensureGsdSymlink(base); - // When .gsd is a real directory, ensureGsdSymlink preserves it - assert.deepStrictEqual(result, join(base, ".gsd"), "real .gsd directory preserved"); - assert.ok(lstatSync(join(base, ".gsd")).isDirectory(), ".gsd remains a directory"); + // When .sf is a real directory, ensureGsdSymlink preserves it + assert.deepStrictEqual(result, join(base, ".sf"), "real .sf directory preserved"); + assert.ok(lstatSync(join(base, ".sf")).isDirectory(), ".sf remains a directory"); // But the numbered variants should still be cleaned up - assert.ok(!existsSync(join(base, ".gsd 2")), '".gsd 2" cleaned even when .gsd is a directory'); - assert.ok(!existsSync(join(base, ".gsd 3")), '".gsd 3" cleaned even when .gsd is a directory'); + assert.ok(!existsSync(join(base, ".sf 2")), '".sf 2" cleaned even when .sf is a directory'); + assert.ok(!existsSync(join(base, ".sf 3")), '".sf 3" cleaned even when .sf is a directory'); } // ── Test: only numeric-suffixed variants are removed ─────────────── - console.log("\n=== ensureGsdSymlink only removes .gsd + space + digit variants ==="); + console.log("\n=== ensureGsdSymlink only removes .sf + space + digit variants ==="); { - rmSync(join(base, ".gsd"), { recursive: true, force: true }); + rmSync(join(base, ".sf"), { recursive: true, force: true }); // These should NOT be touched - mkdirSync(join(base, ".gsd-backup"), { recursive: true }); + mkdirSync(join(base, ".sf-backup"), { recursive: true }); mkdirSync(join(base, ".sf_old"), { recursive: true }); // These SHOULD be removed (macOS collision pattern) - mkdirSync(join(base, ".gsd 2"), { recursive: true }); - mkdirSync(join(base, ".gsd 10"), { recursive: true }); + mkdirSync(join(base, ".sf 2"), { recursive: true }); + mkdirSync(join(base, ".sf 10"), { recursive: true }); ensureGsdSymlink(base); - assert.ok(existsSync(join(base, ".gsd-backup")), ".gsd-backup is NOT removed"); + assert.ok(existsSync(join(base, ".sf-backup")), ".sf-backup is NOT removed"); assert.ok(existsSync(join(base, ".sf_old")), ".sf_old is NOT removed"); - assert.ok(!existsSync(join(base, ".gsd 2")), '".gsd 2" removed'); - assert.ok(!existsSync(join(base, ".gsd 10")), '".gsd 10" removed'); + assert.ok(!existsSync(join(base, ".sf 2")), '".sf 2" removed'); + assert.ok(!existsSync(join(base, ".sf 10")), '".sf 10" removed'); // Cleanup non-variant dirs - rmSync(join(base, ".gsd-backup"), { recursive: true, force: true }); + rmSync(join(base, ".sf-backup"), { recursive: true, force: true }); rmSync(join(base, ".sf_old"), { recursive: true, force: true }); } diff --git a/src/resources/extensions/sf/tests/sync-lock.test.ts b/src/resources/extensions/sf/tests/sync-lock.test.ts index 31b9dade1..07f0c0546 100644 --- a/src/resources/extensions/sf/tests/sync-lock.test.ts +++ b/src/resources/extensions/sf/tests/sync-lock.test.ts @@ -20,7 +20,7 @@ function cleanupDir(dirPath: string): void { test('sync-lock: acquireSyncLock returns { acquired: true } when no lock exists', () => { const base = tempDir(); - fs.mkdirSync(path.join(base, '.gsd'), { recursive: true }); + fs.mkdirSync(path.join(base, '.sf'), { recursive: true }); try { const result = acquireSyncLock(base); assert.strictEqual(result.acquired, true); @@ -29,12 +29,12 @@ test('sync-lock: acquireSyncLock returns { acquired: true } when no lock exists' } }); -test('sync-lock: acquireSyncLock creates lock file at .gsd/sync.lock', () => { +test('sync-lock: acquireSyncLock creates lock file at .sf/sync.lock', () => { const base = tempDir(); - fs.mkdirSync(path.join(base, '.gsd'), { recursive: true }); + fs.mkdirSync(path.join(base, '.sf'), { recursive: true }); try { acquireSyncLock(base); - const lockPath = path.join(base, '.gsd', 'sync.lock'); + const lockPath = path.join(base, '.sf', 'sync.lock'); assert.ok(fs.existsSync(lockPath), 'sync.lock should exist after acquire'); } finally { cleanupDir(base); @@ -43,10 +43,10 @@ test('sync-lock: acquireSyncLock creates lock file at .gsd/sync.lock', () => { test('sync-lock: lock file contains pid and acquired_at fields', () => { const base = tempDir(); - fs.mkdirSync(path.join(base, '.gsd'), { recursive: true }); + fs.mkdirSync(path.join(base, '.sf'), { recursive: true }); try { acquireSyncLock(base); - const lockPath = path.join(base, '.gsd', 'sync.lock'); + const lockPath = path.join(base, '.sf', 'sync.lock'); const content = JSON.parse(fs.readFileSync(lockPath, 'utf-8')); assert.strictEqual(typeof content.pid, 'number'); assert.strictEqual(typeof content.acquired_at, 'string'); @@ -59,10 +59,10 @@ test('sync-lock: lock file contains pid and acquired_at fields', () => { test('sync-lock: releaseSyncLock removes lock file', () => { const base = tempDir(); - fs.mkdirSync(path.join(base, '.gsd'), { recursive: true }); + fs.mkdirSync(path.join(base, '.sf'), { recursive: true }); try { acquireSyncLock(base); - const lockPath = path.join(base, '.gsd', 'sync.lock'); + const lockPath = path.join(base, '.sf', 'sync.lock'); assert.ok(fs.existsSync(lockPath), 'lock file should exist before release'); releaseSyncLock(base); assert.ok(!fs.existsSync(lockPath), 'lock file should not exist after release'); @@ -73,7 +73,7 @@ test('sync-lock: releaseSyncLock removes lock file', () => { test('sync-lock: releaseSyncLock is a no-op when no lock file exists', () => { const base = tempDir(); - fs.mkdirSync(path.join(base, '.gsd'), { recursive: true }); + fs.mkdirSync(path.join(base, '.sf'), { recursive: true }); try { // Should not throw releaseSyncLock(base); @@ -86,7 +86,7 @@ test('sync-lock: releaseSyncLock is a no-op when no lock file exists', () => { test('sync-lock: can re-acquire after release', () => { const base = tempDir(); - fs.mkdirSync(path.join(base, '.gsd'), { recursive: true }); + fs.mkdirSync(path.join(base, '.sf'), { recursive: true }); try { const r1 = acquireSyncLock(base); assert.strictEqual(r1.acquired, true, 'first acquire should succeed'); @@ -103,8 +103,8 @@ test('sync-lock: can re-acquire after release', () => { test('sync-lock: overrides stale lock file (mtime backdated)', (t) => { const base = tempDir(); - fs.mkdirSync(path.join(base, '.gsd'), { recursive: true }); - const lockPath = path.join(base, '.gsd', 'sync.lock'); + fs.mkdirSync(path.join(base, '.sf'), { recursive: true }); + const lockPath = path.join(base, '.sf', 'sync.lock'); try { // Write a lock file with a very old mtime (simulating staleness) fs.writeFileSync(lockPath, JSON.stringify({ pid: 99999, acquired_at: new Date(0).toISOString() })); diff --git a/src/resources/extensions/sf/tests/test-utils.ts b/src/resources/extensions/sf/tests/test-utils.ts index 481026869..57a83c615 100644 --- a/src/resources/extensions/sf/tests/test-utils.ts +++ b/src/resources/extensions/sf/tests/test-utils.ts @@ -112,7 +112,7 @@ export function safeReadFile(filePath: string): string | null { /** * Create a minimal SF milestone structure in a temp directory. * - * @param base - Base directory (should have .gsd/ or be a temp repo) + * @param base - Base directory (should have .sf/ or be a temp repo) * @param mid - Milestone ID (e.g., "M001") * @param options - What to create */ @@ -132,7 +132,7 @@ export function writeMilestoneFixture( }>; } = {}, ): void { - const milestoneDir = join(base, ".gsd", "milestones", mid); + const milestoneDir = join(base, ".sf", "milestones", mid); mkdirSync(milestoneDir, { recursive: true }); if (options.roadmap) { diff --git a/src/resources/extensions/sf/tests/triage-resolution.test.ts b/src/resources/extensions/sf/tests/triage-resolution.test.ts index aa1494b43..ee82581e7 100644 --- a/src/resources/extensions/sf/tests/triage-resolution.test.ts +++ b/src/resources/extensions/sf/tests/triage-resolution.test.ts @@ -22,7 +22,7 @@ function makeTempDir(prefix: string): string { } function setupPlanFile(tmp: string, mid: string, sid: string, content: string): string { - const planDir = join(tmp, ".gsd", "milestones", mid, "slices", sid); + const planDir = join(tmp, ".sf", "milestones", mid, "slices", sid); mkdirSync(planDir, { recursive: true }); const planPath = join(planDir, `${sid}-PLAN.md`); writeFileSync(planPath, content, "utf-8"); @@ -118,7 +118,7 @@ test("resolution: executeReplan writes REPLAN-TRIGGER.md", () => { assert.strictEqual(result, true); const triggerPath = join( - tmp, ".gsd", "milestones", "M001", "slices", "S01", "S01-REPLAN-TRIGGER.md", + tmp, ".sf", "milestones", "M001", "slices", "S01", "S01-REPLAN-TRIGGER.md", ); assert.ok(existsSync(triggerPath), "trigger file should exist"); @@ -249,7 +249,7 @@ test("resolution: markCaptureExecuted is idempotent", () => { markCaptureExecuted(tmp, id); markCaptureExecuted(tmp, id); // call again — should not duplicate - const filePath = join(tmp, ".gsd", "CAPTURES.md"); + const filePath = join(tmp, ".sf", "CAPTURES.md"); const content = readFileSync(filePath, "utf-8"); const executedMatches = content.match(/\*\*Executed:\*\*/g); assert.strictEqual(executedMatches?.length, 1, "should have exactly one Executed field"); @@ -306,7 +306,7 @@ test("resolution: executeTriageResolutions executes inject captures", () => { assert.strictEqual(result.quickTasks.length, 0); // Verify tasks were added to plan - const planPath = join(tmp, ".gsd", "milestones", "M001", "slices", "S01", "S01-PLAN.md"); + const planPath = join(tmp, ".sf", "milestones", "M001", "slices", "S01", "S01-PLAN.md"); const planContent = readFileSync(planPath, "utf-8"); assert.ok(planContent.includes("**T04:"), "should have T04"); assert.ok(planContent.includes("**T05:"), "should have T05"); @@ -335,7 +335,7 @@ test("resolution: executeTriageResolutions executes replan captures", () => { // Verify trigger file was written const triggerPath = join( - tmp, ".gsd", "milestones", "M001", "slices", "S01", "S01-REPLAN-TRIGGER.md", + tmp, ".sf", "milestones", "M001", "slices", "S01", "S01-REPLAN-TRIGGER.md", ); assert.ok(existsSync(triggerPath), "replan trigger should exist"); @@ -429,7 +429,7 @@ test("resolution: executeTriageResolutions returns empty result when no actionab test("resolution: ensureDeferMilestoneDir creates milestone directory with CONTEXT-DRAFT.md", () => { const tmp = makeTempDir("res-defer-create"); try { - mkdirSync(join(tmp, ".gsd", "milestones"), { recursive: true }); + mkdirSync(join(tmp, ".sf", "milestones"), { recursive: true }); const captures = [ { id: "CAP-aaa111", text: "add performance monitoring", timestamp: "2026-03-15T20:00:00Z", status: "resolved" as const, classification: "defer" as const }, @@ -439,7 +439,7 @@ test("resolution: ensureDeferMilestoneDir creates milestone directory with CONTE const created = ensureDeferMilestoneDir(tmp, "M005", captures); assert.strictEqual(created, true, "should return true"); - const msDir = join(tmp, ".gsd", "milestones", "M005"); + const msDir = join(tmp, ".sf", "milestones", "M005"); assert.ok(existsSync(msDir), "milestone directory should exist"); const draftPath = join(msDir, "M005-CONTEXT-DRAFT.md"); @@ -458,7 +458,7 @@ test("resolution: ensureDeferMilestoneDir creates milestone directory with CONTE test("resolution: ensureDeferMilestoneDir returns true without overwriting existing directory", () => { const tmp = makeTempDir("res-defer-exists"); try { - const msDir = join(tmp, ".gsd", "milestones", "M003"); + const msDir = join(tmp, ".sf", "milestones", "M003"); mkdirSync(msDir, { recursive: true }); writeFileSync(join(msDir, "M003-CONTEXT.md"), "# M003: Existing\n", "utf-8"); @@ -474,7 +474,7 @@ test("resolution: ensureDeferMilestoneDir returns true without overwriting exist test("resolution: ensureDeferMilestoneDir rejects invalid milestone IDs", () => { const tmp = makeTempDir("res-defer-invalid"); try { - mkdirSync(join(tmp, ".gsd", "milestones"), { recursive: true }); + mkdirSync(join(tmp, ".sf", "milestones"), { recursive: true }); assert.strictEqual(ensureDeferMilestoneDir(tmp, "S03", []), false, "should reject slice IDs"); assert.strictEqual(ensureDeferMilestoneDir(tmp, "not-a-milestone", []), false, "should reject arbitrary strings"); assert.strictEqual(ensureDeferMilestoneDir(tmp, "", []), false, "should reject empty string"); @@ -486,14 +486,14 @@ test("resolution: ensureDeferMilestoneDir rejects invalid milestone IDs", () => test("resolution: ensureDeferMilestoneDir handles unique milestone IDs (M005-abc123)", () => { const tmp = makeTempDir("res-defer-unique"); try { - mkdirSync(join(tmp, ".gsd", "milestones"), { recursive: true }); + mkdirSync(join(tmp, ".sf", "milestones"), { recursive: true }); const created = ensureDeferMilestoneDir(tmp, "M005-abc123", [ { id: "CAP-ccc333", text: "future work", timestamp: "2026-03-15T20:00:00Z", status: "resolved" as const, classification: "defer" as const }, ]); assert.strictEqual(created, true); - const msDir = join(tmp, ".gsd", "milestones", "M005-abc123"); + const msDir = join(tmp, ".sf", "milestones", "M005-abc123"); assert.ok(existsSync(msDir), "milestone directory should exist"); assert.ok( existsSync(join(msDir, "M005-abc123-CONTEXT-DRAFT.md")), @@ -509,7 +509,7 @@ test("resolution: ensureDeferMilestoneDir handles unique milestone IDs (M005-abc test("resolution: executeTriageResolutions creates milestone dir for deferred captures", () => { const tmp = makeTempDir("res-exec-defer"); try { - mkdirSync(join(tmp, ".gsd", "milestones"), { recursive: true }); + mkdirSync(join(tmp, ".sf", "milestones"), { recursive: true }); const id1 = appendCapture(tmp, "add caching layer"); const id2 = appendCapture(tmp, "optimize queries"); @@ -520,11 +520,11 @@ test("resolution: executeTriageResolutions creates milestone dir for deferred ca assert.strictEqual(result.deferredMilestones, 1, "should create 1 milestone"); assert.ok( - existsSync(join(tmp, ".gsd", "milestones", "M005")), + existsSync(join(tmp, ".sf", "milestones", "M005")), "M005 directory should exist", ); assert.ok( - existsSync(join(tmp, ".gsd", "milestones", "M005", "M005-CONTEXT-DRAFT.md")), + existsSync(join(tmp, ".sf", "milestones", "M005", "M005-CONTEXT-DRAFT.md")), "CONTEXT-DRAFT.md should exist", ); @@ -534,7 +534,7 @@ test("resolution: executeTriageResolutions creates milestone dir for deferred ca assert.strictEqual(all[1].executed, true, "second defer should be marked executed"); // Verify the draft content includes both captures - const draft = readFileSync(join(tmp, ".gsd", "milestones", "M005", "M005-CONTEXT-DRAFT.md"), "utf-8"); + const draft = readFileSync(join(tmp, ".sf", "milestones", "M005", "M005-CONTEXT-DRAFT.md"), "utf-8"); assert.ok(draft.includes("add caching layer"), "should include first capture text"); assert.ok(draft.includes("optimize queries"), "should include second capture text"); } finally { @@ -546,7 +546,7 @@ test("resolution: executeTriageResolutions skips defer when milestone already ex const tmp = makeTempDir("res-exec-defer-exists"); try { // Pre-create M005 - const msDir = join(tmp, ".gsd", "milestones", "M005"); + const msDir = join(tmp, ".sf", "milestones", "M005"); mkdirSync(msDir, { recursive: true }); writeFileSync(join(msDir, "M005-CONTEXT.md"), "# M005: Already Planned\n", "utf-8"); diff --git a/src/resources/extensions/sf/tests/uat-stuck-loop-orphaned-worktree.test.ts b/src/resources/extensions/sf/tests/uat-stuck-loop-orphaned-worktree.test.ts index a552cf0ef..c7a12bf51 100644 --- a/src/resources/extensions/sf/tests/uat-stuck-loop-orphaned-worktree.test.ts +++ b/src/resources/extensions/sf/tests/uat-stuck-loop-orphaned-worktree.test.ts @@ -51,7 +51,7 @@ function makeBaseRepo(): string { git(["config", "user.name", "Test"], base); git(["config", "user.email", "test@test.com"], base); writeFileSync(join(base, "README.md"), "# test\n"); - mkdirSync(join(base, ".gsd", "milestones", "M011"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M011"), { recursive: true }); git(["add", "."], base); git(["commit", "-m", "init"], base); return base; @@ -66,10 +66,10 @@ describe("#2821 Bug 1 — ASSESSMENT file force-synced on resume", () => { beforeEach(() => { mainBase = mkdtempSync(join(tmpdir(), "sf-2821-main-")); wtBase = mkdtempSync(join(tmpdir(), "sf-2821-wt-")); - mkdirSync(join(mainBase, ".gsd", "milestones", "M011", "slices", "S01"), { + mkdirSync(join(mainBase, ".sf", "milestones", "M011", "slices", "S01"), { recursive: true, }); - mkdirSync(join(wtBase, ".gsd", "milestones", "M011", "slices", "S01"), { + mkdirSync(join(wtBase, ".sf", "milestones", "M011", "slices", "S01"), { recursive: true, }); }); @@ -83,7 +83,7 @@ describe("#2821 Bug 1 — ASSESSMENT file force-synced on resume", () => { // Project root has ASSESSMENT with a PASS verdict (written by run-uat, synced by post-unit) const prAssessment = join( mainBase, - ".gsd", + ".sf", "milestones", "M011", "slices", @@ -98,7 +98,7 @@ describe("#2821 Bug 1 — ASSESSMENT file force-synced on resume", () => { // Worktree has a stale ASSESSMENT with FAIL verdict (from the initial run-uat execution) const wtAssessment = join( wtBase, - ".gsd", + ".sf", "milestones", "M011", "slices", @@ -124,7 +124,7 @@ describe("#2821 Bug 1 — ASSESSMENT file force-synced on resume", () => { // Project root has ASSESSMENT with verdict const prAssessment = join( mainBase, - ".gsd", + ".sf", "milestones", "M011", "slices", @@ -143,7 +143,7 @@ describe("#2821 Bug 1 — ASSESSMENT file force-synced on resume", () => { const wtAssessment = join( wtBase, - ".gsd", + ".sf", "milestones", "M011", "slices", @@ -165,7 +165,7 @@ describe("#2821 Bug 1 — ASSESSMENT file force-synced on resume", () => { // Project root has ASSESSMENT without verdict (incomplete) const prAssessment = join( mainBase, - ".gsd", + ".sf", "milestones", "M011", "slices", @@ -177,7 +177,7 @@ describe("#2821 Bug 1 — ASSESSMENT file force-synced on resume", () => { // Worktree has ASSESSMENT with verdict:fail const wtAssessment = join( wtBase, - ".gsd", + ".sf", "milestones", "M011", "slices", @@ -220,13 +220,13 @@ describe("#2821 Bug 2 — removeWorktree cleans up despite untracked files", () // Simulate run-uat writing untracked files (S01-UAT-RESULT.md, ASSESSMENT) mkdirSync( - join(info.path, ".gsd", "milestones", "M011", "slices", "S01"), + join(info.path, ".sf", "milestones", "M011", "slices", "S01"), { recursive: true }, ); writeFileSync( join( info.path, - ".gsd", + ".sf", "milestones", "M011", "slices", @@ -238,7 +238,7 @@ describe("#2821 Bug 2 — removeWorktree cleans up despite untracked files", () writeFileSync( join( info.path, - ".gsd", + ".sf", "milestones", "M011", "slices", diff --git a/src/resources/extensions/sf/tests/undo.test.ts b/src/resources/extensions/sf/tests/undo.test.ts index 9de96b719..e512ed220 100644 --- a/src/resources/extensions/sf/tests/undo.test.ts +++ b/src/resources/extensions/sf/tests/undo.test.ts @@ -31,15 +31,15 @@ function makeTempDir(prefix: string): string { test("handleUndo without --force only warns and leaves completed units intact", async () => { const base = makeTempDir("sf-undo-confirm"); try { - mkdirSync(join(base, ".gsd"), { recursive: true }); - mkdirSync(join(base, ".gsd", "activity"), { recursive: true }); + mkdirSync(join(base, ".sf"), { recursive: true }); + mkdirSync(join(base, ".sf", "activity"), { recursive: true }); writeFileSync( - join(base, ".gsd", "completed-units.json"), + join(base, ".sf", "completed-units.json"), JSON.stringify(["execute-task/M001/S01/T01"]), "utf-8", ); writeFileSync( - join(base, ".gsd", "activity", "001-execute-task-M001-S01-T01.jsonl"), + join(base, ".sf", "activity", "001-execute-task-M001-S01-T01.jsonl"), "", "utf-8", ); @@ -59,7 +59,7 @@ test("handleUndo without --force only warns and leaves completed units intact", assert.equal(notifications[0]?.level, "warning"); assert.match(notifications[0]?.message ?? "", /Run \/sf undo --force to confirm\./); assert.deepEqual( - JSON.parse(readFileSync(join(base, ".gsd", "completed-units.json"), "utf-8")), + JSON.parse(readFileSync(join(base, ".sf", "completed-units.json"), "utf-8")), ["execute-task/M001/S01/T01"], ); } finally { @@ -70,7 +70,7 @@ test("handleUndo without --force only warns and leaves completed units intact", test("uncheckTaskInPlan flips a checked task back to unchecked", () => { const base = makeTempDir("sf-undo-plan"); try { - const sliceDir = join(base, ".gsd", "milestones", "M001", "slices", "S01"); + const sliceDir = join(base, ".sf", "milestones", "M001", "slices", "S01"); mkdirSync(sliceDir, { recursive: true }); const planFile = join(sliceDir, "S01-PLAN.md"); writeFileSync( @@ -94,7 +94,7 @@ test("uncheckTaskInPlan flips a checked task back to unchecked", () => { test("findCommitsForUnit reads the newest matching activity log and dedupes SHAs", () => { const base = makeTempDir("sf-undo-activity"); try { - const activityDir = join(base, ".gsd", "activity"); + const activityDir = join(base, ".sf", "activity"); mkdirSync(activityDir, { recursive: true }); writeFileSync( @@ -170,7 +170,7 @@ function makeCtx(): { notifications: Array<{ message: string; level: string }>; function setupTaskFixture(base: string): void { // Create milestone/slice/task directory structure - const sliceDir = join(base, ".gsd", "milestones", "M001", "slices", "S01"); + const sliceDir = join(base, ".sf", "milestones", "M001", "slices", "S01"); const tasksDir = join(sliceDir, "tasks"); mkdirSync(tasksDir, { recursive: true }); @@ -247,12 +247,12 @@ test("handleUndoTask with --force resets task and re-renders plan", async () => assert.equal(task?.status, "pending"); // Summary file deleted - const summaryPath = join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks", "T01-SUMMARY.md"); + const summaryPath = join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks", "T01-SUMMARY.md"); assert.equal(existsSync(summaryPath), false); // Plan checkbox unchecked const planContent = readFileSync( - join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-PLAN.md"), + join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-PLAN.md"), "utf-8", ); assert.match(planContent, /\[ \] \*\*T01:/); @@ -289,9 +289,9 @@ test("handleUndoTask accepts partial ID (T01) and resolves from state", async () setupTaskFixture(base); // Create STATE.md so deriveState can resolve the active milestone/slice - mkdirSync(join(base, ".gsd"), { recursive: true }); + mkdirSync(join(base, ".sf"), { recursive: true }); writeFileSync( - join(base, ".gsd", "STATE.md"), + join(base, ".sf", "STATE.md"), [ "# SF State", "", @@ -318,7 +318,7 @@ test("handleUndoTask accepts partial ID (T01) and resolves from state", async () // ─── handleResetSlice tests ────────────────────────────────────────────────── function setupSliceFixture(base: string): void { - const mDir = join(base, ".gsd", "milestones", "M001"); + const mDir = join(base, ".sf", "milestones", "M001"); const sliceDir = join(mDir, "slices", "S01"); const tasksDir = join(sliceDir, "tasks"); mkdirSync(tasksDir, { recursive: true }); @@ -415,12 +415,12 @@ test("handleResetSlice with --force resets slice and all tasks", async () => { assert.equal(t2?.status, "pending"); // Task summaries deleted - const tasksDir = join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks"); + const tasksDir = join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks"); assert.equal(existsSync(join(tasksDir, "T01-SUMMARY.md")), false); assert.equal(existsSync(join(tasksDir, "T02-SUMMARY.md")), false); // Slice summary and UAT deleted - const sliceDir = join(base, ".gsd", "milestones", "M001", "slices", "S01"); + const sliceDir = join(base, ".sf", "milestones", "M001", "slices", "S01"); assert.equal(existsSync(join(sliceDir, "S01-SUMMARY.md")), false); assert.equal(existsSync(join(sliceDir, "S01-UAT.md")), false); @@ -431,7 +431,7 @@ test("handleResetSlice with --force resets slice and all tasks", async () => { // Roadmap checkbox unchecked const roadmapContent = readFileSync( - join(base, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), + join(base, ".sf", "milestones", "M001", "M001-ROADMAP.md"), "utf-8", ); assert.match(roadmapContent, /\[ \] \*\*S01:/); diff --git a/src/resources/extensions/sf/tests/unit-runtime.test.ts b/src/resources/extensions/sf/tests/unit-runtime.test.ts index d7f578878..854c1095b 100644 --- a/src/resources/extensions/sf/tests/unit-runtime.test.ts +++ b/src/resources/extensions/sf/tests/unit-runtime.test.ts @@ -13,11 +13,11 @@ import { test } from 'node:test'; import assert from 'node:assert/strict'; const base = mkdtempSync(join(tmpdir(), "sf-unit-runtime-test-")); -const tasksDir = join(base, ".gsd", "milestones", "M100", "slices", "S02", "tasks"); +const tasksDir = join(base, ".sf", "milestones", "M100", "slices", "S02", "tasks"); mkdirSync(tasksDir, { recursive: true }); -writeFileSync(join(base, ".gsd", "STATE.md"), "## Next Action\nExecute T09 for S02: do the thing\n", "utf-8"); +writeFileSync(join(base, ".sf", "STATE.md"), "## Next Action\nExecute T09 for S02: do the thing\n", "utf-8"); writeFileSync( - join(base, ".gsd", "milestones", "M100", "slices", "S02", "S02-PLAN.md"), + join(base, ".sf", "milestones", "M100", "slices", "S02", "S02-PLAN.md"), "# S02: Test Slice\n\n## Tasks\n\n- [ ] **T09: Do the thing** `est:10m`\n Description.\n", "utf-8", ); @@ -44,11 +44,11 @@ console.log("\n=== execute-task durability inspection ==="); writeFileSync(join(tasksDir, "T09-SUMMARY.md"), "# done\n", "utf-8"); writeFileSync( - join(base, ".gsd", "milestones", "M100", "slices", "S02", "S02-PLAN.md"), + join(base, ".sf", "milestones", "M100", "slices", "S02", "S02-PLAN.md"), "# S02: Test Slice\n\n## Tasks\n\n- [x] **T09: Do the thing** `est:10m`\n Description.\n", "utf-8", ); - writeFileSync(join(base, ".gsd", "STATE.md"), "## Next Action\nExecute T10 for S02: next thing\n", "utf-8"); + writeFileSync(join(base, ".sf", "STATE.md"), "## Next Action\nExecute T10 for S02: next thing\n", "utf-8"); clearPathCache(); status = await inspectExecuteTaskDurability(base, "M100/S02/T09"); @@ -78,7 +78,7 @@ console.log("\n=== hook unit type sanitization (slash in unitType) ==="); assert.deepStrictEqual(loaded!.phase, "dispatched", "hook phase correct"); // Verify the file is in the units dir, not in a subdirectory - const unitsDir = join(base, ".gsd", "runtime", "units"); + const unitsDir = join(base, ".sf", "runtime", "units"); const files = readdirSync(unitsDir); const hookFile = files.find((f: string) => f.includes("hook-code-review")); assert.ok(hookFile !== undefined, "hook file exists with sanitized name"); @@ -96,12 +96,12 @@ const mhBase = mkdtempSync(join(tmpdir(), "sf-unit-runtime-mh-test-")); console.log("\n=== must-haves: all mentioned in summary ==="); { - const tasksDir2 = join(mhBase, ".gsd", "milestones", "M200", "slices", "S01", "tasks"); + const tasksDir2 = join(mhBase, ".sf", "milestones", "M200", "slices", "S01", "tasks"); mkdirSync(tasksDir2, { recursive: true }); // Slice plan with T01 checked writeFileSync( - join(mhBase, ".gsd", "milestones", "M200", "slices", "S01", "S01-PLAN.md"), + join(mhBase, ".sf", "milestones", "M200", "slices", "S01", "S01-PLAN.md"), "# S01: Test\n\n## Tasks\n\n- [x] **T01: Build parser** `est:10m`\n Build the parser.\n", "utf-8", ); @@ -118,7 +118,7 @@ console.log("\n=== must-haves: all mentioned in summary ==="); "utf-8", ); // STATE.md with next action advanced past T01 - writeFileSync(join(mhBase, ".gsd", "STATE.md"), "## Next Action\nExecute T02 for S01: next thing\n", "utf-8"); + writeFileSync(join(mhBase, ".sf", "STATE.md"), "## Next Action\nExecute T02 for S01: next thing\n", "utf-8"); const status = await inspectExecuteTaskDurability(mhBase, "M200/S01/T01"); assert.ok(status !== null, "mh-all: status exists"); @@ -132,11 +132,11 @@ console.log("\n=== must-haves: all mentioned in summary ==="); console.log("\n=== must-haves: partially mentioned in summary ==="); { - const tasksDir3 = join(mhBase, ".gsd", "milestones", "M200", "slices", "S02", "tasks"); + const tasksDir3 = join(mhBase, ".sf", "milestones", "M200", "slices", "S02", "tasks"); mkdirSync(tasksDir3, { recursive: true }); writeFileSync( - join(mhBase, ".gsd", "milestones", "M200", "slices", "S02", "S02-PLAN.md"), + join(mhBase, ".sf", "milestones", "M200", "slices", "S02", "S02-PLAN.md"), "# S02: Test\n\n## Tasks\n\n- [x] **T01: Build thing** `est:10m`\n Build.\n", "utf-8", ); @@ -152,7 +152,7 @@ console.log("\n=== must-haves: partially mentioned in summary ==="); "# T01: Build thing\n\nAdded computeScore function with full test coverage.\n", "utf-8", ); - writeFileSync(join(mhBase, ".gsd", "STATE.md"), "## Next Action\nExecute T02 for S02: next thing\n", "utf-8"); + writeFileSync(join(mhBase, ".sf", "STATE.md"), "## Next Action\nExecute T02 for S02: next thing\n", "utf-8"); clearPathCache(); const status = await inspectExecuteTaskDurability(mhBase, "M200/S02/T01"); @@ -166,11 +166,11 @@ console.log("\n=== must-haves: partially mentioned in summary ==="); console.log("\n=== must-haves: no task plan file ==="); { - const tasksDir4 = join(mhBase, ".gsd", "milestones", "M200", "slices", "S03", "tasks"); + const tasksDir4 = join(mhBase, ".sf", "milestones", "M200", "slices", "S03", "tasks"); mkdirSync(tasksDir4, { recursive: true }); writeFileSync( - join(mhBase, ".gsd", "milestones", "M200", "slices", "S03", "S03-PLAN.md"), + join(mhBase, ".sf", "milestones", "M200", "slices", "S03", "S03-PLAN.md"), "# S03: Test\n\n## Tasks\n\n- [x] **T01: Quick fix** `est:5m`\n Fix.\n", "utf-8", ); @@ -180,7 +180,7 @@ console.log("\n=== must-haves: no task plan file ==="); "# T01: Quick fix\n\nFixed the thing.\n", "utf-8", ); - writeFileSync(join(mhBase, ".gsd", "STATE.md"), "## Next Action\nExecute T02 for S03: next thing\n", "utf-8"); + writeFileSync(join(mhBase, ".sf", "STATE.md"), "## Next Action\nExecute T02 for S03: next thing\n", "utf-8"); clearPathCache(); const status = await inspectExecuteTaskDurability(mhBase, "M200/S03/T01"); @@ -191,11 +191,11 @@ console.log("\n=== must-haves: no task plan file ==="); console.log("\n=== must-haves: present but no summary file ==="); { - const tasksDir5 = join(mhBase, ".gsd", "milestones", "M200", "slices", "S04", "tasks"); + const tasksDir5 = join(mhBase, ".sf", "milestones", "M200", "slices", "S04", "tasks"); mkdirSync(tasksDir5, { recursive: true }); writeFileSync( - join(mhBase, ".gsd", "milestones", "M200", "slices", "S04", "S04-PLAN.md"), + join(mhBase, ".sf", "milestones", "M200", "slices", "S04", "S04-PLAN.md"), "# S04: Test\n\n## Tasks\n\n- [ ] **T01: Build parser** `est:10m`\n Build.\n", "utf-8", ); @@ -205,7 +205,7 @@ console.log("\n=== must-haves: present but no summary file ==="); "# T01: Build parser\n\n## Must-Haves\n\n- [ ] `parseData` function exported\n- [ ] Error handling covers edge cases\n\n## Steps\n\n1. Do stuff\n", "utf-8", ); - writeFileSync(join(mhBase, ".gsd", "STATE.md"), "## Next Action\nExecute T01 for S04: build parser\n", "utf-8"); + writeFileSync(join(mhBase, ".sf", "STATE.md"), "## Next Action\nExecute T01 for S04: build parser\n", "utf-8"); clearPathCache(); const status = await inspectExecuteTaskDurability(mhBase, "M200/S04/T01"); @@ -217,11 +217,11 @@ console.log("\n=== must-haves: present but no summary file ==="); console.log("\n=== must-haves: substring matching (no backtick tokens) ==="); { - const tasksDir6 = join(mhBase, ".gsd", "milestones", "M200", "slices", "S05", "tasks"); + const tasksDir6 = join(mhBase, ".sf", "milestones", "M200", "slices", "S05", "tasks"); mkdirSync(tasksDir6, { recursive: true }); writeFileSync( - join(mhBase, ".gsd", "milestones", "M200", "slices", "S05", "S05-PLAN.md"), + join(mhBase, ".sf", "milestones", "M200", "slices", "S05", "S05-PLAN.md"), "# S05: Test\n\n## Tasks\n\n- [x] **T01: Add diagnostics** `est:10m`\n Add.\n", "utf-8", ); @@ -237,7 +237,7 @@ console.log("\n=== must-haves: substring matching (no backtick tokens) ==="); "# T01: Add diagnostics\n\nImplemented heuristic matching for must-have items. Recovery diagnostic string now includes gap counts.\n", "utf-8", ); - writeFileSync(join(mhBase, ".gsd", "STATE.md"), "## Next Action\nExecute T02 for S05: next thing\n", "utf-8"); + writeFileSync(join(mhBase, ".sf", "STATE.md"), "## Next Action\nExecute T02 for S05: next thing\n", "utf-8"); clearPathCache(); const status = await inspectExecuteTaskDurability(mhBase, "M200/S05/T01"); diff --git a/src/resources/extensions/sf/tests/uok-audit-unified.test.ts b/src/resources/extensions/sf/tests/uok-audit-unified.test.ts index bb7f2b3d5..ca8b0d585 100644 --- a/src/resources/extensions/sf/tests/uok-audit-unified.test.ts +++ b/src/resources/extensions/sf/tests/uok-audit-unified.test.ts @@ -10,7 +10,7 @@ import { setLogBasePath, logWarning } from "../workflow-logger.ts"; import { setAuditEnvelopeEnabled } from "../uok/audit-toggle.ts"; function readAuditEvents(basePath: string): Array<Record<string, unknown>> { - const file = join(basePath, ".gsd", "audit", "events.jsonl"); + const file = join(basePath, ".sf", "audit", "events.jsonl"); if (!existsSync(file)) return []; const raw = readFileSync(file, "utf-8"); return raw diff --git a/src/resources/extensions/sf/tests/uok-model-policy.test.ts b/src/resources/extensions/sf/tests/uok-model-policy.test.ts index fb8f50f20..ffbcc21b1 100644 --- a/src/resources/extensions/sf/tests/uok-model-policy.test.ts +++ b/src/resources/extensions/sf/tests/uok-model-policy.test.ts @@ -32,7 +32,7 @@ test("uok model policy builds requirement vectors from unit metadata", () => { test("uok model policy enforces provider/api/tool constraints and emits decision audit events", () => { const basePath = mkdtempSync(join(tmpdir(), "sf-uok-model-policy-")); try { - mkdirSync(join(basePath, ".gsd"), { recursive: true }); + mkdirSync(join(basePath, ".sf"), { recursive: true }); registerToolCompatibility("screenshot", { producesImages: true }); const result = applyModelPolicyFilter( @@ -69,7 +69,7 @@ test("uok model policy enforces provider/api/tool constraints and emits decision assert.equal(result.decisions[3]?.allowed, false); assert.match(result.decisions[3]?.reason ?? "", /provider denied by policy/); - const auditLogPath = join(basePath, ".gsd", "audit", "events.jsonl"); + const auditLogPath = join(basePath, ".sf", "audit", "events.jsonl"); const auditLines = readFileSync(auditLogPath, "utf-8") .trim() .split("\n") diff --git a/src/resources/extensions/sf/tests/uok-plan-v2-wiring.test.ts b/src/resources/extensions/sf/tests/uok-plan-v2-wiring.test.ts index 34af78b93..769c6075c 100644 --- a/src/resources/extensions/sf/tests/uok-plan-v2-wiring.test.ts +++ b/src/resources/extensions/sf/tests/uok-plan-v2-wiring.test.ts @@ -24,19 +24,19 @@ const tempDirs = new Set<string>(); function createBasePath(): string { const basePath = mkdtempSync(join(tmpdir(), "sf-uok-planv2-")); - mkdirSync(join(basePath, ".gsd", "milestones", MILESTONE_ID), { recursive: true }); + mkdirSync(join(basePath, ".sf", "milestones", MILESTONE_ID), { recursive: true }); tempDirs.add(basePath); return basePath; } function writeMilestoneFile(basePath: string, suffix: string, content: string): void { - const milestoneDir = join(basePath, ".gsd", "milestones", MILESTONE_ID); + const milestoneDir = join(basePath, ".sf", "milestones", MILESTONE_ID); mkdirSync(milestoneDir, { recursive: true }); writeFileSync(join(milestoneDir, `${MILESTONE_ID}-${suffix}.md`), `${content}\n`, "utf-8"); } function writeSliceFile(basePath: string, suffix: string, content: string): void { - const sliceDir = join(basePath, ".gsd", "milestones", MILESTONE_ID, "slices", SLICE_ID); + const sliceDir = join(basePath, ".sf", "milestones", MILESTONE_ID, "slices", SLICE_ID); mkdirSync(sliceDir, { recursive: true }); writeFileSync(join(sliceDir, `${SLICE_ID}-${suffix}.md`), `${content}\n`, "utf-8"); } diff --git a/src/resources/extensions/sf/tests/vacuous-truth-slices.test.ts b/src/resources/extensions/sf/tests/vacuous-truth-slices.test.ts index 6bd668106..7649951e2 100644 --- a/src/resources/extensions/sf/tests/vacuous-truth-slices.test.ts +++ b/src/resources/extensions/sf/tests/vacuous-truth-slices.test.ts @@ -23,13 +23,13 @@ import { test("deriveStateFromDb does NOT skip to validating when slice array is empty (#2667)", async () => { const base = mkdtempSync(join(tmpdir(), "sf-vacuous-truth-")); - mkdirSync(join(base, ".gsd", "milestones", "M001"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M001"), { recursive: true }); try { // Set up a milestone with a roadmap that references slices, // but the DB has NO slice rows (simulating a worktree DB wipe) writeFileSync( - join(base, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), + join(base, ".sf", "milestones", "M001", "M001-ROADMAP.md"), [ "# M001: Test Milestone", "", @@ -73,11 +73,11 @@ test("deriveStateFromDb does NOT skip to validating when slice array is empty (# test("deriveStateFromDb correctly reaches validating when all slices are done (#2667 guard)", async () => { const base = mkdtempSync(join(tmpdir(), "sf-vacuous-truth-")); - mkdirSync(join(base, ".gsd", "milestones", "M001", "slices", "S01"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M001", "slices", "S01"), { recursive: true }); try { writeFileSync( - join(base, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), + join(base, ".sf", "milestones", "M001", "M001-ROADMAP.md"), [ "# M001: Test Milestone", "", @@ -90,7 +90,7 @@ test("deriveStateFromDb correctly reaches validating when all slices are done (# // Write a slice summary so the filesystem recognizes it as complete writeFileSync( - join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-SUMMARY.md"), + join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-SUMMARY.md"), "# S01 Summary\n\nDone.", ); diff --git a/src/resources/extensions/sf/tests/validate-directory.test.ts b/src/resources/extensions/sf/tests/validate-directory.test.ts index ece832f51..0cc44394e 100644 --- a/src/resources/extensions/sf/tests/validate-directory.test.ts +++ b/src/resources/extensions/sf/tests/validate-directory.test.ts @@ -130,8 +130,8 @@ test("validateDirectory: SF worktree path nested under home is NOT blocked (#131 const fakeHome = makeTempDir("fake-home"); process.env.HOME = fakeHome; process.env.USERPROFILE = fakeHome; - const worktreePath = join(homedir(), ".gsd", "worktrees", "M001"); - const worktreeRoot = join(fakeHome, ".gsd", "worktrees", "M001"); + const worktreePath = join(homedir(), ".sf", "worktrees", "M001"); + const worktreeRoot = join(fakeHome, ".sf", "worktrees", "M001"); mkdirSync(worktreePath, { recursive: true }); try { // The worktree CWD itself is a valid location — it must pass. diff --git a/src/resources/extensions/sf/tests/validate-milestone-stuck-guard.test.ts b/src/resources/extensions/sf/tests/validate-milestone-stuck-guard.test.ts index e7153794c..48515ce5b 100644 --- a/src/resources/extensions/sf/tests/validate-milestone-stuck-guard.test.ts +++ b/src/resources/extensions/sf/tests/validate-milestone-stuck-guard.test.ts @@ -54,13 +54,13 @@ function setupTestEnvironment(): void { tempDir = join(tmpdir(), `validate-milestone-guard-${Date.now()}-${Math.random().toString(36).slice(2)}`); mkdirSync(tempDir, { recursive: true }); - const milestoneDir = join(tempDir, ".gsd", "milestones", "M001"); + const milestoneDir = join(tempDir, ".sf", "milestones", "M001"); mkdirSync(milestoneDir, { recursive: true }); process.chdir(tempDir); _clearGsdRootCache(); - dbPath = join(tempDir, ".gsd", "sf.db"); + dbPath = join(tempDir, ".sf", "sf.db"); openDatabase(dbPath); invalidateAllCaches(); } @@ -72,7 +72,7 @@ function cleanupTestEnvironment(): void { } function writeValidationFile(verdict: string): void { - const path = join(tempDir, ".gsd", "milestones", "M001", "M001-VALIDATION.md"); + const path = join(tempDir, ".sf", "milestones", "M001", "M001-VALIDATION.md"); const content = `--- verdict: ${verdict} remediation_round: 1 diff --git a/src/resources/extensions/sf/tests/validate-milestone-write-order.test.ts b/src/resources/extensions/sf/tests/validate-milestone-write-order.test.ts index 2e4760300..64f1dc42d 100644 --- a/src/resources/extensions/sf/tests/validate-milestone-write-order.test.ts +++ b/src/resources/extensions/sf/tests/validate-milestone-write-order.test.ts @@ -12,7 +12,7 @@ import { clearParseCache } from "../files.js"; function makeTmpBase(): string { const base = join(tmpdir(), `sf-val-handler-${randomUUID()}`); - mkdirSync(join(base, ".gsd", "milestones", "M001"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M001"), { recursive: true }); return base; } @@ -42,7 +42,7 @@ describe("handleValidateMilestone write ordering (#2725)", () => { it("writes DB row and disk file on success", async () => { base = makeTmpBase(); - const dbPath = join(base, ".gsd", "sf.db"); + const dbPath = join(base, ".sf", "sf.db"); openDatabase(dbPath); insertMilestone({ id: "M001" }); insertSlice({ id: "S01", milestoneId: "M001" }); @@ -59,7 +59,7 @@ describe("handleValidateMilestone write ordering (#2725)", () => { assert.equal(row!.status, "pass"); // Disk file exists - const filePath = join(base, ".gsd", "milestones", "M001", "M001-VALIDATION.md"); + const filePath = join(base, ".sf", "milestones", "M001", "M001-VALIDATION.md"); assert.ok(existsSync(filePath), "VALIDATION.md should exist on disk"); const validationMd = readFileSync(filePath, "utf-8"); assert.match(validationMd, /## Verification Class Compliance/); @@ -69,7 +69,7 @@ describe("handleValidateMilestone write ordering (#2725)", () => { it("omits verification class section when no verification classes are supplied", async () => { base = makeTmpBase(); - const dbPath = join(base, ".gsd", "sf.db"); + const dbPath = join(base, ".sf", "sf.db"); openDatabase(dbPath); insertMilestone({ id: "M001" }); insertSlice({ id: "S01", milestoneId: "M001" }); @@ -80,14 +80,14 @@ describe("handleValidateMilestone write ordering (#2725)", () => { ); assert.ok(!("error" in result), `unexpected error: ${"error" in result ? result.error : ""}`); - const filePath = join(base, ".gsd", "milestones", "M001", "M001-VALIDATION.md"); + const filePath = join(base, ".sf", "milestones", "M001", "M001-VALIDATION.md"); const validationMd = readFileSync(filePath, "utf-8"); assert.doesNotMatch(validationMd, /## Verification Class Compliance/); }); it("rolls back DB row when disk write fails", async () => { base = makeTmpBase(); - const dbPath = join(base, ".gsd", "sf.db"); + const dbPath = join(base, ".sf", "sf.db"); openDatabase(dbPath); insertMilestone({ id: "M001" }); insertSlice({ id: "S01", milestoneId: "M001" }); @@ -95,7 +95,7 @@ describe("handleValidateMilestone write ordering (#2725)", () => { // Force disk write failure by replacing the milestone directory with a // regular file. saveFile() will fail because it cannot write inside a // non-directory. This works cross-platform (chmod is ignored on Windows). - const milestoneDir = join(base, ".gsd", "milestones", "M001"); + const milestoneDir = join(base, ".sf", "milestones", "M001"); rmSync(milestoneDir, { recursive: true, force: true }); writeFileSync(milestoneDir, "not-a-directory"); @@ -115,7 +115,7 @@ describe("handleValidateMilestone write ordering (#2725)", () => { it("persists milestone validation gate_runs rows when UOK gates are enabled", async () => { base = makeTmpBase(); - const dbPath = join(base, ".gsd", "sf.db"); + const dbPath = join(base, ".sf", "sf.db"); openDatabase(dbPath); insertMilestone({ id: "M001" }); insertSlice({ id: "S01", milestoneId: "M001" }); diff --git a/src/resources/extensions/sf/tests/validate-milestone.test.ts b/src/resources/extensions/sf/tests/validate-milestone.test.ts index d3919b87b..772f0ea7a 100644 --- a/src/resources/extensions/sf/tests/validate-milestone.test.ts +++ b/src/resources/extensions/sf/tests/validate-milestone.test.ts @@ -19,7 +19,7 @@ import { closeDatabase, insertMilestone, insertSlice, openDatabase } from "../sf function makeTmpBase(): string { const base = join(tmpdir(), `sf-val-test-${randomUUID()}`); - mkdirSync(join(base, ".gsd", "milestones"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones"), { recursive: true }); return base; } @@ -31,42 +31,42 @@ function cleanup(base: string): void { } function openTestDb(base: string): void { - const dbPath = join(base, ".gsd", "sf.db"); + const dbPath = join(base, ".sf", "sf.db"); assert.equal(openDatabase(dbPath), true, "test DB should open"); } function writeRoadmap(base: string, mid: string, content: string): void { - const dir = join(base, ".gsd", "milestones", mid); + const dir = join(base, ".sf", "milestones", mid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${mid}-ROADMAP.md`), content); } function writeMilestoneSummary(base: string, mid: string, content: string): void { - const dir = join(base, ".gsd", "milestones", mid); + const dir = join(base, ".sf", "milestones", mid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${mid}-SUMMARY.md`), content); } function writeValidation(base: string, mid: string, content: string): void { - const dir = join(base, ".gsd", "milestones", mid); + const dir = join(base, ".sf", "milestones", mid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${mid}-VALIDATION.md`), content); } function writeSlicePlan(base: string, mid: string, sid: string, content: string): void { - const dir = join(base, ".gsd", "milestones", mid, "slices", sid); + const dir = join(base, ".sf", "milestones", mid, "slices", sid); mkdirSync(join(dir, "tasks"), { recursive: true }); writeFileSync(join(dir, `${sid}-PLAN.md`), content); } function writeSliceSummary(base: string, mid: string, sid: string, content: string): void { - const dir = join(base, ".gsd", "milestones", mid, "slices", sid); + const dir = join(base, ".sf", "milestones", mid, "slices", sid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${sid}-SUMMARY.md`), content); } function writeSliceAssessment(base: string, mid: string, sid: string, content: string): void { - const dir = join(base, ".gsd", "milestones", mid, "slices", sid); + const dir = join(base, ".sf", "milestones", mid, "slices", sid); mkdirSync(dir, { recursive: true }); writeFileSync(join(dir, `${sid}-ASSESSMENT.md`), content); } @@ -151,7 +151,7 @@ test("deriveState returns validating-milestone when all slices done and no VALID try { writeRoadmap(base, "M001", ALL_DONE_ROADMAP); // Write CONTEXT so milestone has a title - const dir = join(base, ".gsd", "milestones", "M001"); + const dir = join(base, ".sf", "milestones", "M001"); writeFileSync(join(dir, "M001-CONTEXT.md"), CONTEXT_FILE); const state = await deriveState(base); @@ -210,7 +210,7 @@ test("buildValidateMilestonePrompt inlines ASSESSMENT evidence instead of UAT sp const base = makeTmpBase(); try { writeRoadmap(base, "M001", ALL_DONE_ROADMAP); - const dir = join(base, ".gsd", "milestones", "M001"); + const dir = join(base, ".sf", "milestones", "M001"); writeFileSync(join(dir, "M001-CONTEXT.md"), CONTEXT_FILE); writeSliceSummary(base, "M001", "S01", "# S01 Summary\nDelivered."); writeFileSync(join(dir, "slices", "S01", "S01-UAT.md"), "# UAT Spec\nDo the thing.\n"); @@ -372,7 +372,7 @@ test("dispatch rule skips when skip_milestone_validation preference is set", asy assert.equal(result.action, "skip"); // Verify the VALIDATION file was written - const validationPath = join(base, ".gsd", "milestones", "M001", "M001-VALIDATION.md"); + const validationPath = join(base, ".sf", "milestones", "M001", "M001-VALIDATION.md"); assert.ok(existsSync(validationPath), "VALIDATION file should be written on skip"); } finally { cleanup(base); @@ -384,7 +384,7 @@ test("dispatch rule skips when skip_milestone_validation preference is set", asy test("resolveExpectedArtifactPath returns VALIDATION path for validate-milestone", () => { const base = makeTmpBase(); try { - mkdirSync(join(base, ".gsd", "milestones", "M001"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M001"), { recursive: true }); const result = resolveExpectedArtifactPath("validate-milestone", "M001", base); assert.ok(result); assert.ok(result!.includes("VALIDATION")); @@ -409,7 +409,7 @@ test("verifyExpectedArtifact passes when VALIDATION.md exists", () => { test("verifyExpectedArtifact fails when VALIDATION.md is missing", () => { const base = makeTmpBase(); try { - mkdirSync(join(base, ".gsd", "milestones", "M001"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M001"), { recursive: true }); clearPathCache(); clearParseCache(); const result = verifyExpectedArtifact("validate-milestone", "M001", base); diff --git a/src/resources/extensions/sf/tests/wave5-consistency-regressions.test.ts b/src/resources/extensions/sf/tests/wave5-consistency-regressions.test.ts index 1c0726b12..dd7c98c64 100644 --- a/src/resources/extensions/sf/tests/wave5-consistency-regressions.test.ts +++ b/src/resources/extensions/sf/tests/wave5-consistency-regressions.test.ts @@ -96,7 +96,7 @@ describe("WorkflowEvent v field", () => { actor: "system", }); - const logPath = join(tmp, ".gsd", "event-log.jsonl"); + const logPath = join(tmp, ".sf", "event-log.jsonl"); const line = readFileSync(logPath, "utf-8").trim(); const event = JSON.parse(line); assert.strictEqual(event.v, 2, "New events should have v:2"); diff --git a/src/resources/extensions/sf/tests/windows-path-normalization.test.ts b/src/resources/extensions/sf/tests/windows-path-normalization.test.ts index 419c1cf7a..05c2ecfdc 100644 --- a/src/resources/extensions/sf/tests/windows-path-normalization.test.ts +++ b/src/resources/extensions/sf/tests/windows-path-normalization.test.ts @@ -70,11 +70,11 @@ assert.deepStrictEqual( console.log("\n=== cd command construction with normalised paths ==="); -const windowsCwd = "C:\\Users\\user\\project\\.gsd\\worktrees\\M001"; +const windowsCwd = "C:\\Users\\user\\project\\.sf\\worktrees\\M001"; const cdCommand = `cd ${bashPath(windowsCwd)}`; assert.deepStrictEqual( cdCommand, - "cd 'C:/Users/user/project/.gsd/worktrees/M001'", + "cd 'C:/Users/user/project/.sf/worktrees/M001'", "cd command uses forward slashes for Windows worktree path", ); @@ -88,10 +88,10 @@ assert.ok( console.log("\n=== teardown orphan warning path formatting ==="); -const windowsWtDir = "C:\\Users\\user\\project\\.gsd\\worktrees\\M001"; +const windowsWtDir = "C:\\Users\\user\\project\\.sf\\worktrees\\M001"; const helpCommand = `rm -rf "${windowsWtDir.replaceAll("\\", "/")}"`; assert.deepStrictEqual( helpCommand, - 'rm -rf "C:/Users/user/project/.gsd/worktrees/M001"', + 'rm -rf "C:/Users/user/project/.sf/worktrees/M001"', "orphan cleanup help command uses forward slashes", ); diff --git a/src/resources/extensions/sf/tests/workflow-events.test.ts b/src/resources/extensions/sf/tests/workflow-events.test.ts index 2a786ef04..a5c867e41 100644 --- a/src/resources/extensions/sf/tests/workflow-events.test.ts +++ b/src/resources/extensions/sf/tests/workflow-events.test.ts @@ -28,11 +28,11 @@ function makeEvent(cmd: string, params: Record<string, unknown> = {}): Omit<Work // ─── appendEvent ───────────────────────────────────────────────────────── -test('workflow-events: appendEvent creates .gsd dir and event-log.jsonl', () => { +test('workflow-events: appendEvent creates .sf dir and event-log.jsonl', () => { const base = tempDir(); try { appendEvent(base, makeEvent('complete-task', { milestoneId: 'M001', taskId: 'T01' })); - assert.ok(fs.existsSync(path.join(base, '.gsd', 'event-log.jsonl'))); + assert.ok(fs.existsSync(path.join(base, '.sf', 'event-log.jsonl'))); } finally { cleanupDir(base); } @@ -42,7 +42,7 @@ test('workflow-events: appendEvent writes valid JSON line', () => { const base = tempDir(); try { appendEvent(base, makeEvent('complete-task', { milestoneId: 'M001', taskId: 'T01' })); - const content = fs.readFileSync(path.join(base, '.gsd', 'event-log.jsonl'), 'utf-8'); + const content = fs.readFileSync(path.join(base, '.sf', 'event-log.jsonl'), 'utf-8'); const lines = content.trim().split('\n'); assert.strictEqual(lines.length, 1); const parsed = JSON.parse(lines[0]!) as WorkflowEvent; @@ -60,7 +60,7 @@ test('workflow-events: appendEvent appends multiple events', () => { try { appendEvent(base, makeEvent('complete-task', { taskId: 'T01' })); appendEvent(base, makeEvent('complete-slice', { sliceId: 'S01' })); - const events = readEvents(path.join(base, '.gsd', 'event-log.jsonl')); + const events = readEvents(path.join(base, '.sf', 'event-log.jsonl')); assert.strictEqual(events.length, 2); assert.strictEqual(events[0]!.cmd, 'complete-task'); assert.strictEqual(events[1]!.cmd, 'complete-slice'); @@ -74,7 +74,7 @@ test('workflow-events: same cmd+params → same hash (deterministic)', () => { try { appendEvent(base, makeEvent('plan-task', { milestoneId: 'M001', sliceId: 'S01' })); appendEvent(base, makeEvent('plan-task', { milestoneId: 'M001', sliceId: 'S01' })); - const events = readEvents(path.join(base, '.gsd', 'event-log.jsonl')); + const events = readEvents(path.join(base, '.sf', 'event-log.jsonl')); assert.strictEqual(events[0]!.hash, events[1]!.hash, 'identical cmd+params produce identical hash'); } finally { cleanupDir(base); @@ -86,7 +86,7 @@ test('workflow-events: different params → different hash', () => { try { appendEvent(base, makeEvent('complete-task', { taskId: 'T01' })); appendEvent(base, makeEvent('complete-task', { taskId: 'T02' })); - const events = readEvents(path.join(base, '.gsd', 'event-log.jsonl')); + const events = readEvents(path.join(base, '.sf', 'event-log.jsonl')); assert.notStrictEqual(events[0]!.hash, events[1]!.hash, 'different params produce different hash'); } finally { cleanupDir(base); @@ -103,8 +103,8 @@ test('workflow-events: readEvents returns [] for non-existent file', () => { test('workflow-events: readEvents skips corrupted lines', () => { const base = tempDir(); try { - fs.mkdirSync(path.join(base, '.gsd'), { recursive: true }); - const logPath = path.join(base, '.gsd', 'event-log.jsonl'); + fs.mkdirSync(path.join(base, '.sf'), { recursive: true }); + const logPath = path.join(base, '.sf', 'event-log.jsonl'); // Write a valid line, a corrupted line, and another valid line fs.writeFileSync(logPath, '{"cmd":"complete-task","params":{},"ts":"2026-01-01T00:00:00Z","hash":"abcd1234abcd1234","actor":"agent"}\n' + @@ -178,13 +178,13 @@ test('workflow-events: compactMilestoneEvents archives milestone events', () => assert.strictEqual(result.archived, 2, 'should archive 2 M001 events'); // Archive file should exist - const archivePath = path.join(base, '.gsd', 'event-log-M001.jsonl.archived'); + const archivePath = path.join(base, '.sf', 'event-log-M001.jsonl.archived'); assert.ok(fs.existsSync(archivePath), 'archive file should exist'); const archived = readEvents(archivePath); assert.strictEqual(archived.length, 2, 'archive file should have 2 events'); // Active log should retain only M002 event - const active = readEvents(path.join(base, '.gsd', 'event-log.jsonl')); + const active = readEvents(path.join(base, '.sf', 'event-log.jsonl')); assert.strictEqual(active.length, 1, 'active log should have 1 remaining event'); assert.strictEqual((active[0]!.params as { milestoneId?: string }).milestoneId, 'M002'); } finally { @@ -197,7 +197,7 @@ test('workflow-events: compactMilestoneEvents empties active log when all events try { appendEvent(base, makeEvent('complete-task', { milestoneId: 'M001', taskId: 'T01' })); compactMilestoneEvents(base, 'M001'); - const active = readEvents(path.join(base, '.gsd', 'event-log.jsonl')); + const active = readEvents(path.join(base, '.sf', 'event-log.jsonl')); assert.strictEqual(active.length, 0, 'active log should be empty after full compact'); } finally { cleanupDir(base); diff --git a/src/resources/extensions/sf/tests/workflow-logger-audit.test.ts b/src/resources/extensions/sf/tests/workflow-logger-audit.test.ts index 1dd2a27b0..e3583c404 100644 --- a/src/resources/extensions/sf/tests/workflow-logger-audit.test.ts +++ b/src/resources/extensions/sf/tests/workflow-logger-audit.test.ts @@ -18,12 +18,12 @@ import { function createTempProject(): string { const tmp = mkdtempSync(join(tmpdir(), "sf-wflog-test-")); - mkdirSync(join(tmp, ".gsd"), { recursive: true }); + mkdirSync(join(tmp, ".sf"), { recursive: true }); return tmp; } function readAuditLines(basePath: string): Record<string, unknown>[] { - const auditPath = join(basePath, ".gsd", "audit-log.jsonl"); + const auditPath = join(basePath, ".sf", "audit-log.jsonl"); if (!existsSync(auditPath)) return []; const content = readFileSync(auditPath, "utf-8").trim(); if (!content) return []; diff --git a/src/resources/extensions/sf/tests/workflow-logger.test.ts b/src/resources/extensions/sf/tests/workflow-logger.test.ts index 21f78cd2a..250c40b9a 100644 --- a/src/resources/extensions/sf/tests/workflow-logger.test.ts +++ b/src/resources/extensions/sf/tests/workflow-logger.test.ts @@ -253,11 +253,11 @@ describe("workflow-logger", () => { cleanup(dir); }); - test("writes entry to .gsd/audit-log.jsonl after setLogBasePath", () => { + test("writes entry to .sf/audit-log.jsonl after setLogBasePath", () => { setLogBasePath(dir); logError("engine", "audit test entry"); - const auditPath = join(dir, ".gsd", "audit-log.jsonl"); + const auditPath = join(dir, ".sf", "audit-log.jsonl"); assert.ok(existsSync(auditPath), "audit-log.jsonl should exist"); const content = readFileSync(auditPath, "utf-8"); const entry = JSON.parse(content.trim()); @@ -271,7 +271,7 @@ describe("workflow-logger", () => { _resetLogs(); logError("engine", "post-reset entry"); - const auditPath = join(dir, ".gsd", "audit-log.jsonl"); + const auditPath = join(dir, ".sf", "audit-log.jsonl"); assert.ok(existsSync(auditPath), "audit-log.jsonl should exist after _resetLogs"); const content = readFileSync(auditPath, "utf-8"); const entry = JSON.parse(content.trim()); diff --git a/src/resources/extensions/sf/tests/workflow-manifest.test.ts b/src/resources/extensions/sf/tests/workflow-manifest.test.ts index ae9899648..2576b828b 100644 --- a/src/resources/extensions/sf/tests/workflow-manifest.test.ts +++ b/src/resources/extensions/sf/tests/workflow-manifest.test.ts @@ -52,7 +52,7 @@ test('workflow-manifest: writeManifest creates state-manifest.json with version openDatabase(tempDbPath(base)); try { writeManifest(base); - const manifestPath = path.join(base, '.gsd', 'state-manifest.json'); + const manifestPath = path.join(base, '.sf', 'state-manifest.json'); assert.ok(fs.existsSync(manifestPath), 'state-manifest.json should exist'); const raw = JSON.parse(fs.readFileSync(manifestPath, 'utf-8')); assert.strictEqual(raw.version, 1); @@ -262,9 +262,9 @@ test('workflow-manifest: snapshotState coerces string placeholders in sequence c test('workflow-manifest: readManifest throws on unsupported version', () => { const base = tempDir(); try { - fs.mkdirSync(path.join(base, '.gsd'), { recursive: true }); + fs.mkdirSync(path.join(base, '.sf'), { recursive: true }); fs.writeFileSync( - path.join(base, '.gsd', 'state-manifest.json'), + path.join(base, '.sf', 'state-manifest.json'), JSON.stringify({ version: 99, exported_at: '', milestones: [], slices: [], tasks: [], decisions: [], verification_evidence: [] }), ); assert.throws( diff --git a/src/resources/extensions/sf/tests/workflow-mcp.test.ts b/src/resources/extensions/sf/tests/workflow-mcp.test.ts index c7a97c70f..28dcd0419 100644 --- a/src/resources/extensions/sf/tests/workflow-mcp.test.ts +++ b/src/resources/extensions/sf/tests/workflow-mcp.test.ts @@ -179,7 +179,7 @@ test("detectWorkflowMcpLaunchConfig resolves the bundled server relative to the test("workflow MCP launch config reaches mutation tools over stdio", async () => { const projectRoot = mkdtempSync(join(tmpdir(), "sf-workflow-transport-")); - mkdirSync(join(projectRoot, ".gsd"), { recursive: true }); + mkdirSync(join(projectRoot, ".sf"), { recursive: true }); const launch = detectWorkflowMcpLaunchConfig(projectRoot, {}); assert.ok(launch, "expected a workflow MCP launch config"); @@ -333,12 +333,12 @@ test("workflow MCP launch config reaches mutation tools over stdio", async () => /Planned slice S01/, ); assert.ok( - existsSync(join(projectRoot, ".gsd", "milestones", "M001", "slices", "S01", "S01-PLAN.md")), + existsSync(join(projectRoot, ".sf", "milestones", "M001", "slices", "S01", "S01-PLAN.md")), "expected slice plan artifact to be written through stdio MCP", ); assert.ok( existsSync( - join(projectRoot, ".gsd", "milestones", "M001", "slices", "S01", "tasks", "T01-PLAN.md"), + join(projectRoot, ".sf", "milestones", "M001", "slices", "S01", "tasks", "T01-PLAN.md"), ), "expected task plan artifact to be written through stdio MCP", ); @@ -350,7 +350,7 @@ test("workflow MCP launch config reaches mutation tools over stdio", async () => test("workflow MCP ask_user_questions uses stdio elicitation round-trip", async () => { const projectRoot = mkdtempSync(join(tmpdir(), "sf-workflow-elicit-")); - mkdirSync(join(projectRoot, ".gsd"), { recursive: true }); + mkdirSync(join(projectRoot, ".sf"), { recursive: true }); const launch = detectWorkflowMcpLaunchConfig(projectRoot, {}); assert.ok(launch, "expected a workflow MCP launch config"); diff --git a/src/resources/extensions/sf/tests/workflow-reconcile.test.ts b/src/resources/extensions/sf/tests/workflow-reconcile.test.ts index 3d3d89c73..760fa253c 100644 --- a/src/resources/extensions/sf/tests/workflow-reconcile.test.ts +++ b/src/resources/extensions/sf/tests/workflow-reconcile.test.ts @@ -69,14 +69,14 @@ test("resolveConflict(pick=main) rewrites the worktree log durably", () => { resolveConflict(main, worktree, "milestone:M001", "main"); assert.equal(listConflicts(main).length, 0, "conflict file should be cleared after resolving main"); - const conflictsPath = join(main, ".gsd", "CONFLICTS.md"); + const conflictsPath = join(main, ".sf", "CONFLICTS.md"); assert.equal( existsSync(conflictsPath), false, "CONFLICTS.md should be removed after the last conflict is resolved", ); - const wtEvents = readEvents(join(worktree, ".gsd", "event-log.jsonl")); + const wtEvents = readEvents(join(worktree, ".sf", "event-log.jsonl")); assert.ok( wtEvents.some((e) => e.cmd === "plan_milestone" && e.params.title === "Main Choice"), "worktree log should be rewritten to the main-side resolution", diff --git a/src/resources/extensions/sf/tests/workflow-tool-executors.test.ts b/src/resources/extensions/sf/tests/workflow-tool-executors.test.ts index 463ca6d97..2b905df29 100644 --- a/src/resources/extensions/sf/tests/workflow-tool-executors.test.ts +++ b/src/resources/extensions/sf/tests/workflow-tool-executors.test.ts @@ -27,7 +27,7 @@ import { function makeTmpBase(): string { const base = join(tmpdir(), `sf-workflow-executors-${randomUUID()}`); - mkdirSync(join(base, ".gsd"), { recursive: true }); + mkdirSync(join(base, ".sf"), { recursive: true }); return base; } @@ -36,7 +36,7 @@ function cleanup(base: string): void { } function openTestDb(base: string): void { - openDatabase(join(base, ".gsd", "sf.db")); + openDatabase(join(base, ".sf", "sf.db")); } async function inProjectDir<T>(dir: string, fn: () => Promise<T>): Promise<T> { @@ -66,7 +66,7 @@ function seedSlice(milestoneId: string, sliceId: string, status: string): void { } function writeRoadmap(base: string, milestoneId: string, sliceIds: string[]): void { - const milestoneDir = join(base, ".gsd", "milestones", milestoneId); + const milestoneDir = join(base, ".sf", "milestones", milestoneId); mkdirSync(milestoneDir, { recursive: true }); const lines = [ `# ${milestoneId}: Workflow MCP planning`, @@ -93,7 +93,7 @@ test("executeSummarySave persists artifact and returns computed path", async () assert.equal(result.details.operation, "save_summary"); assert.equal(result.details.path, "milestones/M001/slices/S01/S01-SUMMARY.md"); - const filePath = join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-SUMMARY.md"); + const filePath = join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-SUMMARY.md"); assert.ok(existsSync(filePath), "summary artifact should be written to disk"); assert.match(readFileSync(filePath, "utf-8"), /# Summary/); } finally { @@ -106,7 +106,7 @@ test("executeTaskComplete coerces string verificationEvidence entries", async () const base = makeTmpBase(); try { openTestDb(base); - const planDir = join(base, ".gsd", "milestones", "M001", "slices", "S01"); + const planDir = join(base, ".sf", "milestones", "M001", "slices", "S01"); mkdirSync(planDir, { recursive: true }); writeFileSync(join(planDir, "S01-PLAN.md"), "# S01\n\n- [ ] **T01: Demo** `est:5m`\n"); diff --git a/src/resources/extensions/sf/tests/workspace-index.test.ts b/src/resources/extensions/sf/tests/workspace-index.test.ts index d7d045218..faaaa04ed 100644 --- a/src/resources/extensions/sf/tests/workspace-index.test.ts +++ b/src/resources/extensions/sf/tests/workspace-index.test.ts @@ -7,7 +7,7 @@ import { getSuggestedNextCommands, indexWorkspace, listDoctorScopeSuggestions } test("workspace index: indexes active milestone/slice/task and suggests commands", async () => { const base = mkdtempSync(join(tmpdir(), "sf-workspace-index-test-")); - const sf = join(base, ".gsd"); + const sf = join(base, ".sf"); const mDir = join(sf, "milestones", "M001"); const sDir = join(mDir, "slices", "S01"); mkdirSync(join(sDir, "tasks"), { recursive: true }); diff --git a/src/resources/extensions/sf/tests/worktree-bugfix.test.ts b/src/resources/extensions/sf/tests/worktree-bugfix.test.ts index e75c8a27b..6a2f0fd13 100644 --- a/src/resources/extensions/sf/tests/worktree-bugfix.test.ts +++ b/src/resources/extensions/sf/tests/worktree-bugfix.test.ts @@ -74,7 +74,7 @@ describe("worktree-bugfix", () => { it("detectWorktreeName returns name for worktree path", () => { assert.deepStrictEqual( - detectWorktreeName("/project/.gsd/worktrees/M005"), + detectWorktreeName("/project/.sf/worktrees/M005"), "M005", "detects worktree name", ); @@ -94,14 +94,14 @@ describe("worktree-bugfix", () => { initRepo(repo); // Create a fake worktree path structure - const wtPath = join(repo, ".gsd", "worktrees", "M005"); + const wtPath = join(repo, ".sf", "worktrees", "M005"); mkdirSync(wtPath, { recursive: true }); - mkdirSync(join(wtPath, ".gsd", "milestones", "M005"), { recursive: true }); + mkdirSync(join(wtPath, ".sf", "milestones", "M005"), { recursive: true }); // Initialize git in the worktree so getService doesn't fail initRepo(wtPath); // captureIntegrationBranch should be a no-op — no META.json written - const metaPath = join(wtPath, ".gsd", "milestones", "M005", "M005-META.json"); + const metaPath = join(wtPath, ".sf", "milestones", "M005", "M005-META.json"); captureIntegrationBranch(wtPath, "M005"); assert.ok(!existsSync(metaPath), "no META.json written in worktree"); }); @@ -109,7 +109,7 @@ describe("worktree-bugfix", () => { it("detectWorktreeName prevents pull in worktree context", () => { // Verifies the guard pattern: if detectWorktreeName returns non-null, // the caller should skip pull/fetch operations - const inWorktree = detectWorktreeName("/project/.gsd/worktrees/M006"); + const inWorktree = detectWorktreeName("/project/.sf/worktrees/M006"); const inNormal = detectWorktreeName("/project"); assert.ok(inWorktree !== null, "worktree detected → skip pull"); assert.ok(inNormal === null, "normal repo → allow pull"); diff --git a/src/resources/extensions/sf/tests/worktree-db-integration.test.ts b/src/resources/extensions/sf/tests/worktree-db-integration.test.ts index 6966c8f62..f5f4dd9fb 100644 --- a/src/resources/extensions/sf/tests/worktree-db-integration.test.ts +++ b/src/resources/extensions/sf/tests/worktree-db-integration.test.ts @@ -5,7 +5,7 @@ * Uses real temp git repos and real SQLite databases. * * Test cases: - * 1. Copy: createAutoWorktree seeds .gsd/sf.db into the worktree when main has one + * 1. Copy: createAutoWorktree seeds .sf/sf.db into the worktree when main has one * 2. Copy-skip: createAutoWorktree silently skips when main has no sf.db * 3. Reconcile: reconcileWorktreeDb merges worktree rows into main DB * 4. Reconcile-skip: reconcileWorktreeDb is non-fatal when both paths are nonexistent @@ -68,7 +68,7 @@ describe('worktree-db-integration', async () => { tempDirs.push(tempDir); // Seed a sf.db in the main repo - const sfDir = join(tempDir, ".gsd"); + const sfDir = join(tempDir, ".sf"); mkdirSync(sfDir, { recursive: true }); const mainDbPath = join(sfDir, "sf.db"); openDatabase(mainDbPath); @@ -81,10 +81,10 @@ describe('worktree-db-integration', async () => { // createAutoWorktree should copy the DB into the worktree const wtPath = createAutoWorktree(tempDir, "M004"); - const worktreeDbPath = join(worktreePath(tempDir, "M004"), ".gsd", "sf.db"); + const worktreeDbPath = join(worktreePath(tempDir, "M004"), ".sf", "sf.db"); assert.ok( existsSync(worktreeDbPath), - "sf.db exists in worktree .gsd after createAutoWorktree", + "sf.db exists in worktree .sf after createAutoWorktree", ); // Restore cwd for next test @@ -109,7 +109,7 @@ describe('worktree-db-integration', async () => { assert.ok(!threw, "createAutoWorktree does not throw when no source DB"); - const worktreeDbPath = join(worktreePath(tempDir, "M004"), ".gsd", "sf.db"); + const worktreeDbPath = join(worktreePath(tempDir, "M004"), ".sf", "sf.db"); assert.ok( !existsSync(worktreeDbPath), "sf.db is absent in worktree when source had none", diff --git a/src/resources/extensions/sf/tests/worktree-db-respawn-truncation.test.ts b/src/resources/extensions/sf/tests/worktree-db-respawn-truncation.test.ts index de41890ed..627868628 100644 --- a/src/resources/extensions/sf/tests/worktree-db-respawn-truncation.test.ts +++ b/src/resources/extensions/sf/tests/worktree-db-respawn-truncation.test.ts @@ -24,7 +24,7 @@ import assert from 'node:assert/strict'; function createBase(name: string): string { const base = mkdtempSync(join(tmpdir(), `sf-wt-respawn-${name}-`)); - mkdirSync(join(base, '.gsd', 'milestones'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones'), { recursive: true }); return base; } @@ -42,26 +42,26 @@ describe('worktree-db-respawn-truncation (#2815)', async () => { try { // Set up milestone artifacts in main project root - const m001Dir = join(mainBase, '.gsd', 'milestones', 'M001'); + const m001Dir = join(mainBase, '.sf', 'milestones', 'M001'); mkdirSync(m001Dir, { recursive: true }); writeFileSync(join(m001Dir, 'M001-ROADMAP.md'), '# Roadmap'); // Simulate a freshly-migrated worktree DB (non-empty, like after sf-migrate) // Real DBs are ~1.7MB; we use a smaller payload to prove the size check works const fakeDbContent = Buffer.alloc(4096, 0x42); // 4KB non-empty DB - writeFileSync(join(wtBase, '.gsd', 'sf.db'), fakeDbContent); + writeFileSync(join(wtBase, '.sf', 'sf.db'), fakeDbContent); - const sizeBefore = statSync(join(wtBase, '.gsd', 'sf.db')).size; + const sizeBefore = statSync(join(wtBase, '.sf', 'sf.db')).size; assert.ok(sizeBefore > 0, 'sf.db is non-empty before sync'); syncProjectRootToWorktree(mainBase, wtBase, 'M001'); // The non-empty DB must survive the sync assert.ok( - existsSync(join(wtBase, '.gsd', 'sf.db')), + existsSync(join(wtBase, '.sf', 'sf.db')), '#2815: non-empty sf.db must not be deleted by sync', ); - const sizeAfter = statSync(join(wtBase, '.gsd', 'sf.db')).size; + const sizeAfter = statSync(join(wtBase, '.sf', 'sf.db')).size; assert.equal( sizeAfter, sizeBefore, @@ -80,18 +80,18 @@ describe('worktree-db-respawn-truncation (#2815)', async () => { const wtBase = createBase('wt'); try { - const m001Dir = join(mainBase, '.gsd', 'milestones', 'M001'); + const m001Dir = join(mainBase, '.sf', 'milestones', 'M001'); mkdirSync(m001Dir, { recursive: true }); writeFileSync(join(m001Dir, 'M001-ROADMAP.md'), '# Roadmap'); // Create an empty (0-byte) sf.db — this is stale/corrupt and should be deleted - writeFileSync(join(wtBase, '.gsd', 'sf.db'), ''); - assert.ok(existsSync(join(wtBase, '.gsd', 'sf.db')), 'empty sf.db exists before sync'); + writeFileSync(join(wtBase, '.sf', 'sf.db'), ''); + assert.ok(existsSync(join(wtBase, '.sf', 'sf.db')), 'empty sf.db exists before sync'); syncProjectRootToWorktree(mainBase, wtBase, 'M001'); assert.ok( - !existsSync(join(wtBase, '.gsd', 'sf.db')), + !existsSync(join(wtBase, '.sf', 'sf.db')), '#853: empty sf.db must still be deleted after sync', ); } finally { @@ -107,13 +107,13 @@ describe('worktree-db-respawn-truncation (#2815)', async () => { const wtBase = createBase('wt'); try { - const m001Dir = join(mainBase, '.gsd', 'milestones', 'M001'); + const m001Dir = join(mainBase, '.sf', 'milestones', 'M001'); mkdirSync(m001Dir, { recursive: true }); writeFileSync(join(m001Dir, 'M001-ROADMAP.md'), '# Roadmap'); // Create an empty (0-byte) sf.db plus orphaned WAL and SHM files — // this is the exact state that causes Node 24 node:sqlite CPU spin (#2478). - const wtGsd = join(wtBase, '.gsd'); + const wtGsd = join(wtBase, '.sf'); writeFileSync(join(wtGsd, 'sf.db'), ''); writeFileSync(join(wtGsd, 'sf.db-wal'), Buffer.alloc(605672, 0xAA)); writeFileSync(join(wtGsd, 'sf.db-shm'), Buffer.alloc(32768, 0xBB)); @@ -149,13 +149,13 @@ describe('worktree-db-respawn-truncation (#2815)', async () => { const wtBase = createBase('wt'); try { - const m001Dir = join(mainBase, '.gsd', 'milestones', 'M001'); + const m001Dir = join(mainBase, '.sf', 'milestones', 'M001'); mkdirSync(m001Dir, { recursive: true }); writeFileSync(join(m001Dir, 'M001-ROADMAP.md'), '# Roadmap'); // Orphaned WAL/SHM with NO sf.db at all — can happen from a previous // partial cleanup. These must still be cleaned up. - const wtGsd = join(wtBase, '.gsd'); + const wtGsd = join(wtBase, '.sf'); writeFileSync(join(wtGsd, 'sf.db-wal'), Buffer.alloc(1024, 0xAA)); writeFileSync(join(wtGsd, 'sf.db-shm'), Buffer.alloc(1024, 0xBB)); @@ -186,29 +186,29 @@ describe('worktree-db-respawn-truncation (#2815)', async () => { const wtBase = createBase('wt'); try { - const m001Dir = join(mainBase, '.gsd', 'milestones', 'M001'); + const m001Dir = join(mainBase, '.sf', 'milestones', 'M001'); mkdirSync(m001Dir, { recursive: true }); writeFileSync(join(m001Dir, 'M001-ROADMAP.md'), '# Roadmap'); mkdirSync(join(m001Dir, 'slices', 'S01'), { recursive: true }); writeFileSync(join(m001Dir, 'slices', 'S01', 'S01-PLAN.md'), '# Plan'); // Non-empty DB in worktree - writeFileSync(join(wtBase, '.gsd', 'sf.db'), 'populated-db-data'); + writeFileSync(join(wtBase, '.sf', 'sf.db'), 'populated-db-data'); syncProjectRootToWorktree(mainBase, wtBase, 'M001'); // Artifacts must still be synced assert.ok( - existsSync(join(wtBase, '.gsd', 'milestones', 'M001', 'M001-ROADMAP.md')), + existsSync(join(wtBase, '.sf', 'milestones', 'M001', 'M001-ROADMAP.md')), 'milestone artifacts synced even with preserved DB', ); assert.ok( - existsSync(join(wtBase, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'S01-PLAN.md')), + existsSync(join(wtBase, '.sf', 'milestones', 'M001', 'slices', 'S01', 'S01-PLAN.md')), 'slice artifacts synced even with preserved DB', ); // DB must still exist assert.ok( - existsSync(join(wtBase, '.gsd', 'sf.db')), + existsSync(join(wtBase, '.sf', 'sf.db')), '#2815: DB preserved alongside artifact sync', ); } finally { diff --git a/src/resources/extensions/sf/tests/worktree-db-same-file.test.ts b/src/resources/extensions/sf/tests/worktree-db-same-file.test.ts index b4148fb85..d1f97f1ac 100644 --- a/src/resources/extensions/sf/tests/worktree-db-same-file.test.ts +++ b/src/resources/extensions/sf/tests/worktree-db-same-file.test.ts @@ -45,7 +45,7 @@ describe("#2823: reconcileWorktreeDb same-file guard", () => { }); test("returns zero result when both paths resolve to the same file", () => { - const mainGsd = join(tmpDir, "main", ".gsd"); + const mainGsd = join(tmpDir, "main", ".sf"); mkdirSync(mainGsd, { recursive: true }); const mainDbPath = join(mainGsd, "sf.db"); @@ -64,7 +64,7 @@ describe("#2823: reconcileWorktreeDb same-file guard", () => { }); // Create a worktree path that resolves to the same file via symlink - const wtGsd = join(tmpDir, "worktree", ".gsd"); + const wtGsd = join(tmpDir, "worktree", ".sf"); mkdirSync(join(tmpDir, "worktree"), { recursive: true }); symlinkSync(mainGsd, wtGsd, "junction"); const worktreeDbPath = join(wtGsd, "sf.db"); @@ -83,7 +83,7 @@ describe("#2823: reconcileWorktreeDb same-file guard", () => { }); test("returns zero result when both paths are identical strings", () => { - const mainGsd = join(tmpDir, "project", ".gsd"); + const mainGsd = join(tmpDir, "project", ".sf"); mkdirSync(mainGsd, { recursive: true }); const dbPath = join(mainGsd, "sf.db"); @@ -109,7 +109,7 @@ describe("#2823: reconcileWorktreeDb same-file guard", () => { test("still reconciles when paths are genuinely different files", () => { // Main DB - const mainGsd = join(tmpDir, "main", ".gsd"); + const mainGsd = join(tmpDir, "main", ".sf"); mkdirSync(mainGsd, { recursive: true }); const mainDbPath = join(mainGsd, "sf.db"); @@ -128,7 +128,7 @@ describe("#2823: reconcileWorktreeDb same-file guard", () => { closeDatabase(); // Create a separate worktree DB with different data - const wtGsd = join(tmpDir, "worktree", ".gsd"); + const wtGsd = join(tmpDir, "worktree", ".sf"); mkdirSync(wtGsd, { recursive: true }); const worktreeDbPath = join(wtGsd, "sf.db"); diff --git a/src/resources/extensions/sf/tests/worktree-health.test.ts b/src/resources/extensions/sf/tests/worktree-health.test.ts index bdccc38bd..87afe63ef 100644 --- a/src/resources/extensions/sf/tests/worktree-health.test.ts +++ b/src/resources/extensions/sf/tests/worktree-health.test.ts @@ -48,9 +48,9 @@ describe('worktree-health', async () => { const dir = createBaseRepo(); cleanups.push(dir); - mkdirSync(join(dir, ".gsd", "worktrees"), { recursive: true }); - run("git worktree add -b worktree/done-feature .gsd/worktrees/done-feature", dir); - const wtPath = join(dir, ".gsd", "worktrees", "done-feature"); + mkdirSync(join(dir, ".sf", "worktrees"), { recursive: true }); + run("git worktree add -b worktree/done-feature .sf/worktrees/done-feature", dir); + const wtPath = join(dir, ".sf", "worktrees", "done-feature"); writeFileSync(join(wtPath, "done.txt"), "done\n"); run("git add -A", wtPath); run("git -c user.email=test@test.com -c user.name=Test commit -m \"done\"", wtPath); @@ -76,9 +76,9 @@ describe('worktree-health', async () => { const dir = createBaseRepo(); cleanups.push(dir); - mkdirSync(join(dir, ".gsd", "worktrees"), { recursive: true }); - run("git worktree add -b worktree/dirty-wip .gsd/worktrees/dirty-wip", dir); - const wtPath = join(dir, ".gsd", "worktrees", "dirty-wip"); + mkdirSync(join(dir, ".sf", "worktrees"), { recursive: true }); + run("git worktree add -b worktree/dirty-wip .sf/worktrees/dirty-wip", dir); + const wtPath = join(dir, ".sf", "worktrees", "dirty-wip"); // Make a commit so the branch diverges from main, then leave dirty state writeFileSync(join(wtPath, "committed.txt"), "committed\n"); run("git add -A", wtPath); @@ -103,9 +103,9 @@ describe('worktree-health', async () => { const dir = createBaseRepo(); cleanups.push(dir); - mkdirSync(join(dir, ".gsd", "worktrees"), { recursive: true }); - run("git worktree add -b worktree/unpushed .gsd/worktrees/unpushed", dir); - const wtPath = join(dir, ".gsd", "worktrees", "unpushed"); + mkdirSync(join(dir, ".sf", "worktrees"), { recursive: true }); + run("git worktree add -b worktree/unpushed .sf/worktrees/unpushed", dir); + const wtPath = join(dir, ".sf", "worktrees", "unpushed"); writeFileSync(join(wtPath, "feature.txt"), "feature\n"); run("git add -A", wtPath); run("git -c user.email=test@test.com -c user.name=Test commit -m \"feature\"", wtPath); @@ -126,10 +126,10 @@ describe('worktree-health', async () => { const dir = createBaseRepo(); cleanups.push(dir); - mkdirSync(join(dir, ".gsd", "worktrees"), { recursive: true }); - run("git worktree add -b worktree/stale-test .gsd/worktrees/stale-test", dir); + mkdirSync(join(dir, ".sf", "worktrees"), { recursive: true }); + run("git worktree add -b worktree/stale-test .sf/worktrees/stale-test", dir); // Diverge from main so the branch is not "merged" - const wtPath = join(dir, ".gsd", "worktrees", "stale-test"); + const wtPath = join(dir, ".sf", "worktrees", "stale-test"); writeFileSync(join(wtPath, "stale.txt"), "stale\n"); run("git add -A", wtPath); run("git -c user.email=test@test.com -c user.name=Test commit -m \"stale work\"", wtPath); @@ -155,10 +155,10 @@ describe('worktree-health', async () => { const dir = createBaseRepo(); cleanups.push(dir); - mkdirSync(join(dir, ".gsd", "worktrees"), { recursive: true }); - run("git worktree add -b worktree/clean-active .gsd/worktrees/clean-active", dir); + mkdirSync(join(dir, ".sf", "worktrees"), { recursive: true }); + run("git worktree add -b worktree/clean-active .sf/worktrees/clean-active", dir); // Diverge from main so it's not "merged" - const wtPath = join(dir, ".gsd", "worktrees", "clean-active"); + const wtPath = join(dir, ".sf", "worktrees", "clean-active"); writeFileSync(join(wtPath, "active.txt"), "active\n"); run("git add -A", wtPath); run("git -c user.email=test@test.com -c user.name=Test commit -m \"active work\"", wtPath); diff --git a/src/resources/extensions/sf/tests/worktree-integration.test.ts b/src/resources/extensions/sf/tests/worktree-integration.test.ts index 341172438..81988171b 100644 --- a/src/resources/extensions/sf/tests/worktree-integration.test.ts +++ b/src/resources/extensions/sf/tests/worktree-integration.test.ts @@ -46,11 +46,11 @@ run("git config user.name 'Pi Test'", base); run("git config user.email 'pi@example.com'", base); // Create a project with one milestone and two slices -mkdirSync(join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks"), { recursive: true }); -mkdirSync(join(base, ".gsd", "milestones", "M001", "slices", "S02", "tasks"), { recursive: true }); +mkdirSync(join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks"), { recursive: true }); +mkdirSync(join(base, ".sf", "milestones", "M001", "slices", "S02", "tasks"), { recursive: true }); writeFileSync(join(base, "README.md"), "# Test Project\n", "utf-8"); writeFileSync( - join(base, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), + join(base, ".sf", "milestones", "M001", "M001-ROADMAP.md"), [ "# M001: Demo", "", @@ -63,12 +63,12 @@ writeFileSync( "utf-8", ); writeFileSync( - join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-PLAN.md"), + join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-PLAN.md"), "# S01: First\n\n**Goal:** Demo\n**Demo:** Demo\n\n## Must-Haves\n- done\n\n## Tasks\n- [ ] **T01: Implement** `est:10m`\n do it\n", "utf-8", ); writeFileSync( - join(base, ".gsd", "milestones", "M001", "slices", "S02", "S02-PLAN.md"), + join(base, ".sf", "milestones", "M001", "slices", "S02", "S02-PLAN.md"), "# S02: Second\n\n**Goal:** Demo\n**Demo:** Demo\n\n## Must-Haves\n- done\n\n## Tasks\n- [ ] **T01: Implement** `est:10m`\n do it\n", "utf-8", ); diff --git a/src/resources/extensions/sf/tests/worktree-journal-events.test.ts b/src/resources/extensions/sf/tests/worktree-journal-events.test.ts index a54b2d00c..2d6223b0d 100644 --- a/src/resources/extensions/sf/tests/worktree-journal-events.test.ts +++ b/src/resources/extensions/sf/tests/worktree-journal-events.test.ts @@ -33,15 +33,15 @@ function makeDeps( syncWorktreeStateBack: () => ({ synced: [] }), teardownAutoWorktree: () => {}, createAutoWorktree: (_basePath: string, milestoneId: string) => - `/project/.gsd/worktrees/${milestoneId}`, + `/project/.sf/worktrees/${milestoneId}`, enterAutoWorktree: (_basePath: string, milestoneId: string) => - `/project/.gsd/worktrees/${milestoneId}`, + `/project/.sf/worktrees/${milestoneId}`, getAutoWorktreePath: () => null, autoCommitCurrentBranch: () => {}, getCurrentBranch: () => "main", autoWorktreeBranch: (milestoneId: string) => `milestone/${milestoneId}`, resolveMilestoneFile: (_basePath: string, milestoneId: string) => - `/project/.gsd/milestones/${milestoneId}/${milestoneId}-ROADMAP.md`, + `/project/.sf/milestones/${milestoneId}/${milestoneId}-ROADMAP.md`, readFileSync: () => "# Roadmap\n- [x] S01: Slice one\n", GitServiceImpl: class { constructor() {} @@ -60,9 +60,9 @@ function makeNotifyCtx(): NotifyCtx { }; } -/** Read all journal entries from a temp .gsd/journal directory. */ +/** Read all journal entries from a temp .sf/journal directory. */ function readJournalEntries(basePath: string): JournalEntry[] { - const journalDir = join(basePath, ".gsd", "journal"); + const journalDir = join(basePath, ".sf", "journal"); try { const files = readdirSync(journalDir).filter(f => f.endsWith(".jsonl")).sort(); const entries: JournalEntry[] = []; @@ -113,7 +113,7 @@ describe("worktree journal events", () => { test("enterMilestone emits worktree-enter with created=false for existing worktree", () => { const s = makeSession({ basePath: tmp, originalBasePath: tmp }); const deps = makeDeps({ - getAutoWorktreePath: () => "/project/.gsd/worktrees/M001", + getAutoWorktreePath: () => "/project/.sf/worktrees/M001", }); const resolver = new WorktreeResolver(s, deps); diff --git a/src/resources/extensions/sf/tests/worktree-manager.test.ts b/src/resources/extensions/sf/tests/worktree-manager.test.ts index 143ab0a74..3c973f54d 100644 --- a/src/resources/extensions/sf/tests/worktree-manager.test.ts +++ b/src/resources/extensions/sf/tests/worktree-manager.test.ts @@ -25,10 +25,10 @@ function makeBaseRepo(): string { run("git init -b main", base); run('git config user.name "Test User"', base); run('git config user.email "test@example.com"', base); - mkdirSync(join(base, ".gsd", "milestones", "M001"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones", "M001"), { recursive: true }); writeFileSync(join(base, "README.md"), "# Test Project\n", "utf-8"); writeFileSync( - join(base, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), + join(base, ".sf", "milestones", "M001", "M001-ROADMAP.md"), "# M001: Demo\n\n## Slices\n- [ ] **S01: First** `risk:low` `depends:[]`\n > After this: it works\n", "utf-8", ); @@ -45,14 +45,14 @@ function makeRepoWithWorktree(worktreeName: string): { base: string; wtPath: str function makeRepoWithChanges(worktreeName: string): { base: string; wtPath: string } { const { base, wtPath } = makeRepoWithWorktree(worktreeName); - mkdirSync(join(wtPath, ".gsd", "milestones", "M002"), { recursive: true }); + mkdirSync(join(wtPath, ".sf", "milestones", "M002"), { recursive: true }); writeFileSync( - join(wtPath, ".gsd", "milestones", "M002", "M002-ROADMAP.md"), + join(wtPath, ".sf", "milestones", "M002", "M002-ROADMAP.md"), "# M002: New Feature\n\n## Slices\n- [ ] **S01: Setup** `risk:low` `depends:[]`\n > After this: new feature ready\n", "utf-8", ); writeFileSync( - join(wtPath, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), + join(wtPath, ".sf", "milestones", "M001", "M001-ROADMAP.md"), "# M001: Demo (updated)\n\n## Slices\n- [x] **S01: First** `risk:low` `depends:[]`\n > Done\n", "utf-8", ); @@ -86,8 +86,8 @@ describe("createWorktree", () => { assert.ok(existsSync(info.path), "worktree path should exist on disk"); assert.ok(existsSync(join(info.path, "README.md")), "README.md should be in worktree"); assert.ok( - existsSync(join(info.path, ".gsd", "milestones", "M001", "M001-ROADMAP.md")), - ".gsd files should be in worktree", + existsSync(join(info.path, ".sf", "milestones", "M001", "M001-ROADMAP.md")), + ".sf files should be in worktree", ); const branches = run("git branch", base); assert.ok(branches.includes("worktree/feature-x"), "branch should be created in base repo"); diff --git a/src/resources/extensions/sf/tests/worktree-nested-git-safety.test.ts b/src/resources/extensions/sf/tests/worktree-nested-git-safety.test.ts index 27ec1383a..3c3aeb772 100644 --- a/src/resources/extensions/sf/tests/worktree-nested-git-safety.test.ts +++ b/src/resources/extensions/sf/tests/worktree-nested-git-safety.test.ts @@ -81,7 +81,7 @@ assertTrue( ); // ── Test 5: The findNestedGitDirs helper correctly identifies nested repos ── -// Verify the helper scans subdirectories but skips .gsd/, node_modules/, .git/ +// Verify the helper scans subdirectories but skips .sf/, node_modules/, .git/ const helperBody = src.includes("findNestedGitDirs") ? src.slice(src.indexOf("findNestedGitDirs")) @@ -89,7 +89,7 @@ const helperBody = src.includes("findNestedGitDirs") const skipsExcludedDirs = helperBody.includes("node_modules") || - helperBody.includes(".gsd") || + helperBody.includes(".sf") || helperBody.includes("skip") || helperBody.includes("exclude"); diff --git a/src/resources/extensions/sf/tests/worktree-post-create-hook.test.ts b/src/resources/extensions/sf/tests/worktree-post-create-hook.test.ts index b3ab2e30a..3d246743b 100644 --- a/src/resources/extensions/sf/tests/worktree-post-create-hook.test.ts +++ b/src/resources/extensions/sf/tests/worktree-post-create-hook.test.ts @@ -58,7 +58,7 @@ test("returns error when hook script does not exist", () => { const src = makeTmpDir(); const wt = makeTmpDir(); try { - const result = runWorktreePostCreateHook(src, wt, ".gsd/hooks/nonexistent"); + const result = runWorktreePostCreateHook(src, wt, ".sf/hooks/nonexistent"); assert.ok(result !== null, "should return error string"); assert.ok(result!.includes("not found"), "error should mention 'not found'"); } finally { @@ -71,7 +71,7 @@ test("executes hook script with correct SOURCE_DIR and WORKTREE_DIR env vars", ( const src = makeTmpDir(); const wt = makeTmpDir(); try { - const hooksDir = join(src, ".gsd", "hooks"); + const hooksDir = join(src, ".sf", "hooks"); mkdirSync(hooksDir, { recursive: true }); const hookFile = hookPath(join(hooksDir, "post-create")); const code = [ @@ -82,7 +82,7 @@ test("executes hook script with correct SOURCE_DIR and WORKTREE_DIR env vars", ( ].join("\n"); writeNodeHookScript(hookFile, code); - const result = runWorktreePostCreateHook(src, wt, hookPath(".gsd/hooks/post-create")); + const result = runWorktreePostCreateHook(src, wt, hookPath(".sf/hooks/post-create")); assert.equal(result, null, "should succeed"); const outputFile = join(wt, "hook-output.txt"); @@ -101,12 +101,12 @@ test("returns error message when hook script fails", () => { const src = makeTmpDir(); const wt = makeTmpDir(); try { - const hooksDir = join(src, ".gsd", "hooks"); + const hooksDir = join(src, ".sf", "hooks"); mkdirSync(hooksDir, { recursive: true }); const hookFile = hookPath(join(hooksDir, "failing-hook")); writeNodeHookScript(hookFile, `process.exit(1);`); - const result = runWorktreePostCreateHook(src, wt, hookPath(".gsd/hooks/failing-hook")); + const result = runWorktreePostCreateHook(src, wt, hookPath(".sf/hooks/failing-hook")); assert.ok(result !== null, "should return error string"); assert.ok(result!.includes("hook failed"), "error should mention 'hook failed'"); } finally { diff --git a/src/resources/extensions/sf/tests/worktree-preferences-sync.test.ts b/src/resources/extensions/sf/tests/worktree-preferences-sync.test.ts index 13f324389..b26947c14 100644 --- a/src/resources/extensions/sf/tests/worktree-preferences-sync.test.ts +++ b/src/resources/extensions/sf/tests/worktree-preferences-sync.test.ts @@ -64,19 +64,19 @@ test("#2684: syncSfStateToWorktree forward-syncs PREFERENCES.md when missing fro t.after(() => cleanup(mainBase, wtBase)); // Project root has canonical PREFERENCES.md - writeFile(mainBase, ".gsd/PREFERENCES.md", PREFS_CONTENT); + writeFile(mainBase, ".sf/PREFERENCES.md", PREFS_CONTENT); - // Worktree has .gsd/ but no preferences file - mkdirSync(join(wtBase, ".gsd"), { recursive: true }); + // Worktree has .sf/ but no preferences file + mkdirSync(join(wtBase, ".sf"), { recursive: true }); const result = syncSfStateToWorktree(mainBase, wtBase); assert.ok( - existsSync(join(wtBase, ".gsd", "PREFERENCES.md")), + existsSync(join(wtBase, ".sf", "PREFERENCES.md")), "PREFERENCES.md should be copied to worktree", ); assert.equal( - readFileSync(join(wtBase, ".gsd", "PREFERENCES.md"), "utf-8"), + readFileSync(join(wtBase, ".sf", "PREFERENCES.md"), "utf-8"), PREFS_CONTENT, "PREFERENCES.md content should match source", ); @@ -91,12 +91,12 @@ test("syncSfStateToWorktree still accepts legacy lowercase preferences.md", (t) const wtBase = makeTempDir("wt"); t.after(() => cleanup(mainBase, wtBase)); - writeFile(mainBase, ".gsd/preferences.md", PREFS_CONTENT); - mkdirSync(join(wtBase, ".gsd"), { recursive: true }); + writeFile(mainBase, ".sf/preferences.md", PREFS_CONTENT); + mkdirSync(join(wtBase, ".sf"), { recursive: true }); const result = syncSfStateToWorktree(mainBase, wtBase); - const copiedEntries = readdirSync(join(wtBase, ".gsd")) + const copiedEntries = readdirSync(join(wtBase, ".sf")) .filter((name) => name === "PREFERENCES.md" || name === "preferences.md"); assert.ok( @@ -117,13 +117,13 @@ test("#2684: syncSfStateToWorktree does NOT overwrite existing worktree preferen const rootPrefs = "# Root preferences\nold: true"; const wtPrefs = "# Worktree preferences\nmodified: true"; - writeFile(mainBase, ".gsd/PREFERENCES.md", rootPrefs); - writeFile(wtBase, ".gsd/PREFERENCES.md", wtPrefs); + writeFile(mainBase, ".sf/PREFERENCES.md", rootPrefs); + writeFile(wtBase, ".sf/PREFERENCES.md", wtPrefs); syncSfStateToWorktree(mainBase, wtBase); assert.equal( - readFileSync(join(wtBase, ".gsd", "PREFERENCES.md"), "utf-8"), + readFileSync(join(wtBase, ".sf", "PREFERENCES.md"), "utf-8"), wtPrefs, "existing worktree PREFERENCES.md must not be overwritten", ); @@ -138,17 +138,17 @@ test("#2684: syncWorktreeStateBack does NOT overwrite project root PREFERENCES.m const rootPrefs = "# Root preferences\nauthoritative: true"; const wtPrefs = "# Worktree preferences\nstale-copy: true"; - writeFile(mainBase, ".gsd/PREFERENCES.md", rootPrefs); - writeFile(wtBase, ".gsd/PREFERENCES.md", wtPrefs); + writeFile(mainBase, ".sf/PREFERENCES.md", rootPrefs); + writeFile(wtBase, ".sf/PREFERENCES.md", wtPrefs); // Worktree needs at least a milestone dir for the function to proceed - mkdirSync(join(wtBase, ".gsd", "milestones", mid), { recursive: true }); - mkdirSync(join(mainBase, ".gsd", "milestones"), { recursive: true }); + mkdirSync(join(wtBase, ".sf", "milestones", mid), { recursive: true }); + mkdirSync(join(mainBase, ".sf", "milestones"), { recursive: true }); syncWorktreeStateBack(mainBase, wtBase, mid); assert.equal( - readFileSync(join(mainBase, ".gsd", "PREFERENCES.md"), "utf-8"), + readFileSync(join(mainBase, ".sf", "PREFERENCES.md"), "utf-8"), rootPrefs, "project root PREFERENCES.md must NOT be overwritten by worktree copy", ); diff --git a/src/resources/extensions/sf/tests/worktree-resolver.test.ts b/src/resources/extensions/sf/tests/worktree-resolver.test.ts index 7fae21b9f..4efb255f3 100644 --- a/src/resources/extensions/sf/tests/worktree-resolver.test.ts +++ b/src/resources/extensions/sf/tests/worktree-resolver.test.ts @@ -77,11 +77,11 @@ function makeDeps( }, createAutoWorktree: (basePath: string, milestoneId: string) => { calls.push({ fn: "createAutoWorktree", args: [basePath, milestoneId] }); - return `/project/.gsd/worktrees/${milestoneId}`; + return `/project/.sf/worktrees/${milestoneId}`; }, enterAutoWorktree: (basePath: string, milestoneId: string) => { calls.push({ fn: "enterAutoWorktree", args: [basePath, milestoneId] }); - return `/project/.gsd/worktrees/${milestoneId}`; + return `/project/.sf/worktrees/${milestoneId}`; }, getAutoWorktreePath: (basePath: string, milestoneId: string) => { calls.push({ fn: "getAutoWorktreePath", args: [basePath, milestoneId] }); @@ -114,7 +114,7 @@ function makeDeps( fn: "resolveMilestoneFile", args: [basePath, milestoneId, fileType], }); - return `/project/.gsd/milestones/${milestoneId}/${milestoneId}-ROADMAP.md`; + return `/project/.sf/milestones/${milestoneId}/${milestoneId}-ROADMAP.md`; }, readFileSync: (path: string, _encoding: string) => { calls.push({ fn: "readFileSync", args: [path] }); @@ -179,14 +179,14 @@ function findCalls(calls: CallLog[], fn: string): CallLog[] { // ─── Getter Tests ──────────────────────────────────────────────────────────── test("workPath returns s.basePath", () => { - const s = makeSession({ basePath: "/project/.gsd/worktrees/M001" }); + const s = makeSession({ basePath: "/project/.sf/worktrees/M001" }); const resolver = new WorktreeResolver(s, makeDeps()); - assert.equal(resolver.workPath, "/project/.gsd/worktrees/M001"); + assert.equal(resolver.workPath, "/project/.sf/worktrees/M001"); }); test("projectRoot returns originalBasePath when set", () => { const s = makeSession({ - basePath: "/project/.gsd/worktrees/M001", + basePath: "/project/.sf/worktrees/M001", originalBasePath: "/project", }); const resolver = new WorktreeResolver(s, makeDeps()); @@ -201,7 +201,7 @@ test("projectRoot falls back to basePath when originalBasePath is empty", () => test("lockPath returns originalBasePath when set (same as lockBase)", () => { const s = makeSession({ - basePath: "/project/.gsd/worktrees/M001", + basePath: "/project/.sf/worktrees/M001", originalBasePath: "/project", }); const resolver = new WorktreeResolver(s, makeDeps()); @@ -226,7 +226,7 @@ test("enterMilestone creates new worktree when none exists", () => { resolver.enterMilestone("M001", ctx); - assert.equal(s.basePath, "/project/.gsd/worktrees/M001"); + assert.equal(s.basePath, "/project/.sf/worktrees/M001"); assert.equal(findCalls(deps.calls, "createAutoWorktree").length, 1); assert.equal(findCalls(deps.calls, "enterAutoWorktree").length, 0); assert.equal(findCalls(deps.calls, "GitServiceImpl").length, 1); @@ -240,14 +240,14 @@ test("enterMilestone creates new worktree when none exists", () => { test("enterMilestone enters existing worktree instead of creating", () => { const s = makeSession(); const deps = makeDeps({ - getAutoWorktreePath: () => "/project/.gsd/worktrees/M001", + getAutoWorktreePath: () => "/project/.sf/worktrees/M001", }); const ctx = makeNotifyCtx(); const resolver = new WorktreeResolver(s, deps); resolver.enterMilestone("M001", ctx); - assert.equal(s.basePath, "/project/.gsd/worktrees/M001"); + assert.equal(s.basePath, "/project/.sf/worktrees/M001"); assert.equal(findCalls(deps.calls, "enterAutoWorktree").length, 1); assert.equal(findCalls(deps.calls, "createAutoWorktree").length, 0); }); @@ -290,7 +290,7 @@ test("enterMilestone does NOT update basePath on creation failure", () => { test("enterMilestone uses originalBasePath as base for worktree ops", () => { const s = makeSession({ - basePath: "/project/.gsd/worktrees/M001", + basePath: "/project/.sf/worktrees/M001", originalBasePath: "/project", }); let createdFrom = ""; @@ -298,7 +298,7 @@ test("enterMilestone uses originalBasePath as base for worktree ops", () => { getAutoWorktreePath: () => null, createAutoWorktree: (basePath: string, _mid: string) => { createdFrom = basePath; - return "/project/.gsd/worktrees/M002"; + return "/project/.sf/worktrees/M002"; }, }); const ctx = makeNotifyCtx(); @@ -313,7 +313,7 @@ test("enterMilestone uses originalBasePath as base for worktree ops", () => { test("exitMilestone commits, tears down, and resets basePath", () => { const s = makeSession({ - basePath: "/project/.gsd/worktrees/M001", + basePath: "/project/.sf/worktrees/M001", originalBasePath: "/project", }); const deps = makeDeps({ @@ -348,7 +348,7 @@ test("exitMilestone is no-op when not in worktree", () => { test("exitMilestone passes preserveBranch option", () => { const s = makeSession({ - basePath: "/project/.gsd/worktrees/M001", + basePath: "/project/.sf/worktrees/M001", originalBasePath: "/project", }); let preserveOpts: unknown = null; @@ -372,7 +372,7 @@ test("exitMilestone passes preserveBranch option", () => { test("exitMilestone still resets basePath even if auto-commit fails", () => { const s = makeSession({ - basePath: "/project/.gsd/worktrees/M001", + basePath: "/project/.sf/worktrees/M001", originalBasePath: "/project", }); const deps = makeDeps({ @@ -395,7 +395,7 @@ test("exitMilestone still resets basePath even if auto-commit fails", () => { test("mergeAndExit in worktree mode reads roadmap and merges", () => { const s = makeSession({ - basePath: "/project/.gsd/worktrees/M001", + basePath: "/project/.sf/worktrees/M001", originalBasePath: "/project", }); const deps = makeDeps({ @@ -417,7 +417,7 @@ test("mergeAndExit in worktree mode reads roadmap and merges", () => { test("mergeAndExit in worktree mode shows pushed status", () => { const s = makeSession({ - basePath: "/project/.gsd/worktrees/M001", + basePath: "/project/.sf/worktrees/M001", originalBasePath: "/project", }); const deps = makeDeps({ @@ -435,7 +435,7 @@ test("mergeAndExit in worktree mode shows pushed status", () => { test("mergeAndExit falls back to teardown with preserveBranch when roadmap is missing (#1573)", () => { const s = makeSession({ - basePath: "/project/.gsd/worktrees/M001", + basePath: "/project/.sf/worktrees/M001", originalBasePath: "/project", }); const deps = makeDeps({ @@ -459,7 +459,7 @@ test("mergeAndExit falls back to teardown with preserveBranch when roadmap is mi test("mergeAndExit resolves roadmap from worktree when missing at project root (#1573)", () => { const s = makeSession({ - basePath: "/project/.gsd/worktrees/M001", + basePath: "/project/.sf/worktrees/M001", originalBasePath: "/project", }); // resolveMilestoneFile returns null for project root, returns path for worktree @@ -468,8 +468,8 @@ test("mergeAndExit resolves roadmap from worktree when missing at project root ( getIsolationMode: () => "worktree", resolveMilestoneFile: (basePath: string) => { if (basePath === "/project") return null; // missing at project root - if (basePath === "/project/.gsd/worktrees/M001") { - return "/project/.gsd/worktrees/M001/.gsd/milestones/M001/M001-ROADMAP.md"; + if (basePath === "/project/.sf/worktrees/M001") { + return "/project/.sf/worktrees/M001/.sf/milestones/M001/M001-ROADMAP.md"; } return null; }, @@ -489,7 +489,7 @@ test("mergeAndExit resolves roadmap from worktree when missing at project root ( test("mergeAndExit in worktree mode restores to project root on merge failure", () => { const s = makeSession({ - basePath: "/project/.gsd/worktrees/M001", + basePath: "/project/.sf/worktrees/M001", originalBasePath: "/project", }); const deps = makeDeps({ @@ -519,7 +519,7 @@ test("mergeAndExit failure message tells user worktree and branch are preserved // left confused about whether their code had been deleted. The new message // explicitly states that the worktree and branch are preserved and what to do. const s = makeSession({ - basePath: "/project/.gsd/worktrees/M001", + basePath: "/project/.sf/worktrees/M001", originalBasePath: "/project", }); const deps = makeDeps({ @@ -555,7 +555,7 @@ test("mergeAndExit failure message references /sf dispatch complete-milestone, n // "retry /complete-milestone" — a command that does not exist. The correct // recovery command is "/sf dispatch complete-milestone". const s = makeSession({ - basePath: "/project/.gsd/worktrees/M001", + basePath: "/project/.sf/worktrees/M001", originalBasePath: "/project", }); const deps = makeDeps({ @@ -697,7 +697,7 @@ test("mergeAndExit in none mode is a no-op", () => { test("mergeAndExit warns when merge contains no code changes (#1906)", () => { const s = makeSession({ - basePath: "/project/.gsd/worktrees/M001", + basePath: "/project/.sf/worktrees/M001", originalBasePath: "/project", }); const deps = makeDeps({ @@ -712,7 +712,7 @@ test("mergeAndExit warns when merge contains no code changes (#1906)", () => { assert.ok( ctx.messages.some((m) => m.msg.includes("NO code changes") && m.level === "warning"), - "must emit warning when only .gsd/ metadata was merged", + "must emit warning when only .sf/ metadata was merged", ); assert.ok( !ctx.messages.some((m) => m.msg.includes("merged to main") && m.level === "info"), @@ -722,7 +722,7 @@ test("mergeAndExit warns when merge contains no code changes (#1906)", () => { test("mergeAndExit emits info when merge contains code changes (#1906)", () => { const s = makeSession({ - basePath: "/project/.gsd/worktrees/M001", + basePath: "/project/.sf/worktrees/M001", originalBasePath: "/project", }); const deps = makeDeps({ @@ -764,7 +764,7 @@ test("mergeAndExit branch mode warns when merge contains no code changes (#1906) assert.ok( ctx.messages.some((m) => m.msg.includes("NO code changes") && m.level === "warning"), - "branch mode must emit warning when only .gsd/ metadata was merged", + "branch mode must emit warning when only .sf/ metadata was merged", ); }); @@ -772,7 +772,7 @@ test("mergeAndExit branch mode warns when merge contains no code changes (#1906) test("mergeAndEnterNext calls mergeAndExit then enterMilestone", () => { const s = makeSession({ - basePath: "/project/.gsd/worktrees/M001", + basePath: "/project/.sf/worktrees/M001", originalBasePath: "/project", }); const callOrder: string[] = []; @@ -791,7 +791,7 @@ test("mergeAndEnterNext calls mergeAndExit then enterMilestone", () => { getAutoWorktreePath: () => null, createAutoWorktree: (basePath: string, milestoneId: string) => { callOrder.push(`create:${milestoneId}`); - return `/project/.gsd/worktrees/${milestoneId}`; + return `/project/.sf/worktrees/${milestoneId}`; }, }); const ctx = makeNotifyCtx(); @@ -800,12 +800,12 @@ test("mergeAndEnterNext calls mergeAndExit then enterMilestone", () => { resolver.mergeAndEnterNext("M001", "M002", ctx); assert.deepEqual(callOrder, ["merge:M001", "create:M002"]); - assert.equal(s.basePath, "/project/.gsd/worktrees/M002"); + assert.equal(s.basePath, "/project/.sf/worktrees/M002"); }); test("mergeAndEnterNext enters next milestone even if merge fails", () => { const s = makeSession({ - basePath: "/project/.gsd/worktrees/M001", + basePath: "/project/.sf/worktrees/M001", originalBasePath: "/project", }); const deps = makeDeps({ @@ -817,7 +817,7 @@ test("mergeAndEnterNext enters next milestone even if merge fails", () => { }, getAutoWorktreePath: () => null, createAutoWorktree: (_basePath: string, milestoneId: string) => { - return `/project/.gsd/worktrees/${milestoneId}`; + return `/project/.sf/worktrees/${milestoneId}`; }, }); const ctx = makeNotifyCtx(); @@ -826,7 +826,7 @@ test("mergeAndEnterNext enters next milestone even if merge fails", () => { resolver.mergeAndEnterNext("M001", "M002", ctx); // Merge failed but enter should still happen - assert.equal(s.basePath, "/project/.gsd/worktrees/M002"); + assert.equal(s.basePath, "/project/.sf/worktrees/M002"); assert.ok( ctx.messages.some( (m) => m.level === "warning" && m.msg.includes("merge failed"), @@ -857,12 +857,12 @@ test("GitService is rebuilt with the NEW basePath after enterMilestone", () => { resolver.enterMilestone("M001", ctx); - assert.equal(gitServiceBasePath, "/project/.gsd/worktrees/M001"); // new path, not old + assert.equal(gitServiceBasePath, "/project/.sf/worktrees/M001"); // new path, not old }); test("GitService is rebuilt with originalBasePath after exitMilestone", () => { const s = makeSession({ - basePath: "/project/.gsd/worktrees/M001", + basePath: "/project/.sf/worktrees/M001", originalBasePath: "/project", }); let gitServiceBasePath = ""; @@ -956,7 +956,7 @@ test("mergeAndExit still merges when mode is 'none' but session is in a worktree // where default is "none". They have an active worktree with committed work. // mergeAndExit must detect the active worktree and merge regardless of config. const s = makeSession({ - basePath: "/project/.gsd/worktrees/M001", + basePath: "/project/.sf/worktrees/M001", originalBasePath: "/project", }); const deps = makeDeps({ diff --git a/src/resources/extensions/sf/tests/worktree-symlink-removal.test.ts b/src/resources/extensions/sf/tests/worktree-symlink-removal.test.ts index eb44c5844..c52ab9f3c 100644 --- a/src/resources/extensions/sf/tests/worktree-symlink-removal.test.ts +++ b/src/resources/extensions/sf/tests/worktree-symlink-removal.test.ts @@ -1,7 +1,7 @@ /** - * Regression test for #1852: removeWorktree targets wrong path when .gsd/ is a symlink. + * Regression test for #1852: removeWorktree targets wrong path when .sf/ is a symlink. * - * When .gsd/ is a symlink to an external state directory, git registers + * When .sf/ is a symlink to an external state directory, git registers * the worktree at the resolved (real) path. But removeWorktree recomputes * the path via worktreePath() which uses the unresolved symlink, causing * a mismatch — the removal silently fails. @@ -28,8 +28,8 @@ function run(command: string, cwd: string): string { return execSync(command, { cwd, stdio: ["ignore", "pipe", "pipe"], encoding: "utf-8" }).trim(); } -// Set up a test repo with .gsd/ as a symlink to an external directory, -// mimicking the external state directory layout (~/.gsd/projects/<hash>/). +// Set up a test repo with .sf/ as a symlink to an external directory, +// mimicking the external state directory layout (~/.sf/projects/<hash>/). // Resolve tmpdir to handle macOS /tmp -> /private/var/... symlink. const realTmp = realpathSync(tmpdir()); const base = mkdtempSync(join(realTmp, "sf-wt-symlink-test-")); @@ -42,14 +42,14 @@ run('git config user.email "test@example.com"', base); // Create external state directory structure mkdirSync(join(externalState, "worktrees"), { recursive: true }); -// Create .gsd as a symlink to the external state directory -symlinkSync(externalState, join(base, ".gsd")); +// Create .sf as a symlink to the external state directory +symlinkSync(externalState, join(base, ".sf")); // Verify the symlink is in place -assert.ok(existsSync(join(base, ".gsd")), ".gsd symlink exists"); +assert.ok(existsSync(join(base, ".sf")), ".sf symlink exists"); assert.ok( - realpathSync(join(base, ".gsd")) === externalState, - ".gsd resolves to external state dir", + realpathSync(join(base, ".sf")) === externalState, + ".sf resolves to external state dir", ); // Create initial commit so we have a valid repo @@ -58,7 +58,7 @@ run("git add .", base); run('git commit -m "init"', base); describe('worktree-symlink-removal', async () => { - console.log("\n=== #1852: removeWorktree with symlinked .gsd/ ==="); + console.log("\n=== #1852: removeWorktree with symlinked .sf/ ==="); // Create a worktree — git will resolve the symlink and register // the worktree at the external path @@ -80,15 +80,15 @@ describe('worktree-symlink-removal', async () => { const computedPath = worktreePath(base, "M002"); assert.ok(existsSync(computedPath), "computed path exists (via symlink)"); - // Simulate what syncStateToProjectRoot does: replace the .gsd symlink with + // Simulate what syncStateToProjectRoot does: replace the .sf symlink with // a real directory containing stale worktree data. This causes worktreePath() // to compute a LOCAL path that differs from git's REGISTERED path (the // resolved external path). The stale local dir passes existsSync but is not // a real git worktree, so nativeWorktreeRemove fails silently. - unlinkSync(join(base, ".gsd")); // remove the symlink - mkdirSync(join(base, ".gsd", "worktrees", "M002"), { recursive: true }); + unlinkSync(join(base, ".sf")); // remove the symlink + mkdirSync(join(base, ".sf", "worktrees", "M002"), { recursive: true }); // Write a dummy file so the stale directory is non-empty - writeFileSync(join(base, ".gsd", "worktrees", "M002", "stale.txt"), "stale sync artifact", "utf-8"); + writeFileSync(join(base, ".sf", "worktrees", "M002", "stale.txt"), "stale sync artifact", "utf-8"); // Now worktreePath(base, "M002") points to the LOCAL stale dir, not the // external path where git actually registered the worktree. diff --git a/src/resources/extensions/sf/tests/worktree-sync-milestones.test.ts b/src/resources/extensions/sf/tests/worktree-sync-milestones.test.ts index 28938e41b..745e33c09 100644 --- a/src/resources/extensions/sf/tests/worktree-sync-milestones.test.ts +++ b/src/resources/extensions/sf/tests/worktree-sync-milestones.test.ts @@ -2,7 +2,7 @@ * worktree-sync-milestones.test.ts — Regression tests for #1311 and #1678. * * Verifies that syncProjectRootToWorktree copies milestone artifacts - * from the main repo's .gsd/ into the worktree's .gsd/ for the + * from the main repo's .sf/ into the worktree's .sf/ for the * specified milestone, and deletes sf.db so it rebuilds from fresh state. * * Also verifies that syncWorktreeStateBack recurses into tasks/ subdirectories @@ -16,7 +16,7 @@ * - No-op when milestoneId is null * - Non-existent directories handled gracefully * - syncWorktreeStateBack recurses into tasks/ subdirectory (#1678) - * - syncWorktreeStateBack syncs root-level .gsd/ files (REQUIREMENTS, PROJECT, etc.) + * - syncWorktreeStateBack syncs root-level .sf/ files (REQUIREMENTS, PROJECT, etc.) * - syncWorktreeStateBack syncs ALL milestone directories, not just the current one * - syncWorktreeStateBack handles next-milestone artifacts created during completion * - syncSfStateToWorktree syncs non-standard milestone dir names (#1547) @@ -35,7 +35,7 @@ import assert from 'node:assert/strict'; function createBase(name: string): string { const base = mkdtempSync(join(tmpdir(), `sf-wt-sync-${name}-`)); - mkdirSync(join(base, '.gsd', 'milestones'), { recursive: true }); + mkdirSync(join(base, '.sf', 'milestones'), { recursive: true }); return base; } @@ -52,19 +52,19 @@ describe('worktree-sync-milestones', async () => { const wtBase = createBase('wt'); try { - const m001Dir = join(mainBase, '.gsd', 'milestones', 'M001'); + const m001Dir = join(mainBase, '.sf', 'milestones', 'M001'); mkdirSync(m001Dir, { recursive: true }); writeFileSync(join(m001Dir, 'M001-CONTEXT.md'), '# M001\nContext.'); writeFileSync(join(m001Dir, 'M001-ROADMAP.md'), '# Roadmap'); // Worktree has no M001 - assert.ok(!existsSync(join(wtBase, '.gsd', 'milestones', 'M001')), 'M001 missing before sync'); + assert.ok(!existsSync(join(wtBase, '.sf', 'milestones', 'M001')), 'M001 missing before sync'); syncProjectRootToWorktree(mainBase, wtBase, 'M001'); - assert.ok(existsSync(join(wtBase, '.gsd', 'milestones', 'M001')), '#1311: M001 synced to worktree'); - assert.ok(existsSync(join(wtBase, '.gsd', 'milestones', 'M001', 'M001-CONTEXT.md')), 'M001 CONTEXT synced'); - assert.ok(existsSync(join(wtBase, '.gsd', 'milestones', 'M001', 'M001-ROADMAP.md')), 'M001 ROADMAP synced'); + assert.ok(existsSync(join(wtBase, '.sf', 'milestones', 'M001')), '#1311: M001 synced to worktree'); + assert.ok(existsSync(join(wtBase, '.sf', 'milestones', 'M001', 'M001-CONTEXT.md')), 'M001 CONTEXT synced'); + assert.ok(existsSync(join(wtBase, '.sf', 'milestones', 'M001', 'M001-ROADMAP.md')), 'M001 ROADMAP synced'); } finally { cleanup(mainBase); cleanup(wtBase); @@ -78,7 +78,7 @@ describe('worktree-sync-milestones', async () => { const wtBase = createBase('wt'); try { - const m001Dir = join(mainBase, '.gsd', 'milestones', 'M001'); + const m001Dir = join(mainBase, '.sf', 'milestones', 'M001'); mkdirSync(join(m001Dir, 'slices', 'S01'), { recursive: true }); mkdirSync(join(m001Dir, 'slices', 'S02'), { recursive: true }); writeFileSync(join(m001Dir, 'M001-ROADMAP.md'), '# Roadmap'); @@ -86,14 +86,14 @@ describe('worktree-sync-milestones', async () => { writeFileSync(join(m001Dir, 'slices', 'S02', 'S02-PLAN.md'), '# S02 Plan'); // Worktree only has S01 - const wtM001Dir = join(wtBase, '.gsd', 'milestones', 'M001'); + const wtM001Dir = join(wtBase, '.sf', 'milestones', 'M001'); mkdirSync(join(wtM001Dir, 'slices', 'S01'), { recursive: true }); writeFileSync(join(wtM001Dir, 'slices', 'S01', 'S01-PLAN.md'), '# S01 Plan'); syncProjectRootToWorktree(mainBase, wtBase, 'M001'); - assert.ok(existsSync(join(wtBase, '.gsd', 'milestones', 'M001', 'slices', 'S02')), '#1311: S02 synced'); - assert.ok(existsSync(join(wtBase, '.gsd', 'milestones', 'M001', 'slices', 'S02', 'S02-PLAN.md')), 'S02 PLAN synced'); + assert.ok(existsSync(join(wtBase, '.sf', 'milestones', 'M001', 'slices', 'S02')), '#1311: S02 synced'); + assert.ok(existsSync(join(wtBase, '.sf', 'milestones', 'M001', 'slices', 'S02', 'S02-PLAN.md')), 'S02 PLAN synced'); } finally { cleanup(mainBase); cleanup(wtBase); @@ -107,17 +107,17 @@ describe('worktree-sync-milestones', async () => { const wtBase = createBase('wt'); try { - const m001Dir = join(mainBase, '.gsd', 'milestones', 'M001'); + const m001Dir = join(mainBase, '.sf', 'milestones', 'M001'); mkdirSync(m001Dir, { recursive: true }); writeFileSync(join(m001Dir, 'M001-ROADMAP.md'), '# Roadmap'); // Worktree has an empty (0-byte) sf.db — stale/corrupt - writeFileSync(join(wtBase, '.gsd', 'sf.db'), ''); - assert.ok(existsSync(join(wtBase, '.gsd', 'sf.db')), 'sf.db exists before sync'); + writeFileSync(join(wtBase, '.sf', 'sf.db'), ''); + assert.ok(existsSync(join(wtBase, '.sf', 'sf.db')), 'sf.db exists before sync'); syncProjectRootToWorktree(mainBase, wtBase, 'M001'); - assert.ok(!existsSync(join(wtBase, '.gsd', 'sf.db')), '#853: empty sf.db deleted after sync'); + assert.ok(!existsSync(join(wtBase, '.sf', 'sf.db')), '#853: empty sf.db deleted after sync'); } finally { cleanup(mainBase); cleanup(wtBase); @@ -131,17 +131,17 @@ describe('worktree-sync-milestones', async () => { const wtBase = createBase('wt'); try { - const m001Dir = join(mainBase, '.gsd', 'milestones', 'M001'); + const m001Dir = join(mainBase, '.sf', 'milestones', 'M001'); mkdirSync(m001Dir, { recursive: true }); writeFileSync(join(m001Dir, 'M001-ROADMAP.md'), '# Roadmap'); // Worktree has a populated sf.db (e.g. from sf-migrate on respawn) - writeFileSync(join(wtBase, '.gsd', 'sf.db'), 'migrated-db-content'); - assert.ok(existsSync(join(wtBase, '.gsd', 'sf.db')), 'sf.db exists before sync'); + writeFileSync(join(wtBase, '.sf', 'sf.db'), 'migrated-db-content'); + assert.ok(existsSync(join(wtBase, '.sf', 'sf.db')), 'sf.db exists before sync'); syncProjectRootToWorktree(mainBase, wtBase, 'M001'); - assert.ok(existsSync(join(wtBase, '.gsd', 'sf.db')), '#2815: non-empty sf.db preserved after sync'); + assert.ok(existsSync(join(wtBase, '.sf', 'sf.db')), '#2815: non-empty sf.db preserved after sync'); } finally { cleanup(mainBase); cleanup(wtBase); @@ -189,23 +189,23 @@ describe('worktree-sync-milestones', async () => { const wtBase = mkdtempSync(join(tmpdir(), 'sf-wt-sync-wt-')); try { - // Worktree has .gsd/ but NO milestones/ subdirectory - mkdirSync(join(wtBase, '.gsd'), { recursive: true }); + // Worktree has .sf/ but NO milestones/ subdirectory + mkdirSync(join(wtBase, '.sf'), { recursive: true }); // Main repo has M001 - const m001Dir = join(mainBase, '.gsd', 'milestones', 'M001'); + const m001Dir = join(mainBase, '.sf', 'milestones', 'M001'); mkdirSync(m001Dir, { recursive: true }); writeFileSync(join(m001Dir, 'M001-CONTEXT.md'), '# M001 Context'); writeFileSync(join(m001Dir, 'M001-ROADMAP.md'), '# M001 Roadmap'); - assert.ok(!existsSync(join(wtBase, '.gsd', 'milestones')), 'milestones/ missing before sync'); + assert.ok(!existsSync(join(wtBase, '.sf', 'milestones')), 'milestones/ missing before sync'); const result = syncSfStateToWorktree(mainBase, wtBase); - assert.ok(existsSync(join(wtBase, '.gsd', 'milestones')), 'milestones/ created in worktree'); - assert.ok(existsSync(join(wtBase, '.gsd', 'milestones', 'M001')), 'M001 synced to worktree'); - assert.ok(existsSync(join(wtBase, '.gsd', 'milestones', 'M001', 'M001-CONTEXT.md')), 'M001 CONTEXT synced'); - assert.ok(existsSync(join(wtBase, '.gsd', 'milestones', 'M001', 'M001-ROADMAP.md')), 'M001 ROADMAP synced'); + assert.ok(existsSync(join(wtBase, '.sf', 'milestones')), 'milestones/ created in worktree'); + assert.ok(existsSync(join(wtBase, '.sf', 'milestones', 'M001')), 'M001 synced to worktree'); + assert.ok(existsSync(join(wtBase, '.sf', 'milestones', 'M001', 'M001-CONTEXT.md')), 'M001 CONTEXT synced'); + assert.ok(existsSync(join(wtBase, '.sf', 'milestones', 'M001', 'M001-ROADMAP.md')), 'M001 ROADMAP synced'); assert.ok(result.synced.length > 0, 'sync reported files'); } finally { cleanup(mainBase); @@ -222,7 +222,7 @@ describe('worktree-sync-milestones', async () => { try { // Build worktree milestone structure with slice-level and task-level files // Use M002 as the milestone to sync, M001 as the "current" being merged (skipped) - const wtSliceDir = join(wtBase, '.gsd', 'milestones', 'M002', 'slices', 'S01'); + const wtSliceDir = join(wtBase, '.sf', 'milestones', 'M002', 'slices', 'S01'); const wtTasksDir = join(wtSliceDir, 'tasks'); mkdirSync(wtTasksDir, { recursive: true }); writeFileSync(join(wtSliceDir, 'S01-SUMMARY.md'), '# S01 Summary'); @@ -230,12 +230,12 @@ describe('worktree-sync-milestones', async () => { writeFileSync(join(wtTasksDir, 'T02-SUMMARY.md'), '# T02 Summary'); // Main project root starts with only the milestone directory (no slices yet) - mkdirSync(join(mainBase, '.gsd', 'milestones', 'M002'), { recursive: true }); + mkdirSync(join(mainBase, '.sf', 'milestones', 'M002'), { recursive: true }); // Pass M001 as milestoneId (the one being merged/skipped), M002 should still sync const { synced } = syncWorktreeStateBack(mainBase, wtBase, 'M001'); - const mainSliceDir = join(mainBase, '.gsd', 'milestones', 'M002', 'slices', 'S01'); + const mainSliceDir = join(mainBase, '.sf', 'milestones', 'M002', 'slices', 'S01'); const mainTasksDir = join(mainSliceDir, 'tasks'); assert.ok( @@ -260,42 +260,42 @@ describe('worktree-sync-milestones', async () => { } } - // ─── 9. syncWorktreeStateBack syncs root-level .gsd/ files ────────── + // ─── 9. syncWorktreeStateBack syncs root-level .sf/ files ────────── console.log('\n=== 9. syncWorktreeStateBack syncs root-level files (REQUIREMENTS, PROJECT) ==='); { const mainBase = mkdtempSync(join(tmpdir(), 'sf-wt-back-root-main-')); const wtBase = mkdtempSync(join(tmpdir(), 'sf-wt-back-root-wt-')); try { - mkdirSync(join(mainBase, '.gsd', 'milestones', 'M001'), { recursive: true }); - mkdirSync(join(wtBase, '.gsd', 'milestones', 'M001'), { recursive: true }); + mkdirSync(join(mainBase, '.sf', 'milestones', 'M001'), { recursive: true }); + mkdirSync(join(wtBase, '.sf', 'milestones', 'M001'), { recursive: true }); // Main has original REQUIREMENTS and PROJECT - writeFileSync(join(mainBase, '.gsd', 'REQUIREMENTS.md'), '# Requirements\n## R001'); - writeFileSync(join(mainBase, '.gsd', 'PROJECT.md'), '# Project\n## Milestone: M001'); + writeFileSync(join(mainBase, '.sf', 'REQUIREMENTS.md'), '# Requirements\n## R001'); + writeFileSync(join(mainBase, '.sf', 'PROJECT.md'), '# Project\n## Milestone: M001'); // Worktree has updated versions (complete-milestone added M002 refs) - writeFileSync(join(wtBase, '.gsd', 'REQUIREMENTS.md'), '# Requirements\n## R001\n## R002 — New req'); - writeFileSync(join(wtBase, '.gsd', 'PROJECT.md'), '# Project\n## Milestone: M001\n## Milestone: M002'); - writeFileSync(join(wtBase, '.gsd', 'KNOWLEDGE.md'), '# Knowledge\nLearned something.'); + writeFileSync(join(wtBase, '.sf', 'REQUIREMENTS.md'), '# Requirements\n## R001\n## R002 — New req'); + writeFileSync(join(wtBase, '.sf', 'PROJECT.md'), '# Project\n## Milestone: M001\n## Milestone: M002'); + writeFileSync(join(wtBase, '.sf', 'KNOWLEDGE.md'), '# Knowledge\nLearned something.'); const { synced } = syncWorktreeStateBack(mainBase, wtBase, 'M001'); // Root-level files should be overwritten with worktree versions - const reqContent = readFileSync(join(mainBase, '.gsd', 'REQUIREMENTS.md'), 'utf-8'); + const reqContent = readFileSync(join(mainBase, '.sf', 'REQUIREMENTS.md'), 'utf-8'); assert.ok( reqContent.includes('R002'), 'REQUIREMENTS.md updated with worktree content', ); - const projContent = readFileSync(join(mainBase, '.gsd', 'PROJECT.md'), 'utf-8'); + const projContent = readFileSync(join(mainBase, '.sf', 'PROJECT.md'), 'utf-8'); assert.ok( projContent.includes('M002'), 'PROJECT.md updated with worktree content', ); assert.ok( - existsSync(join(mainBase, '.gsd', 'KNOWLEDGE.md')), + existsSync(join(mainBase, '.sf', 'KNOWLEDGE.md')), 'KNOWLEDGE.md synced from worktree', ); @@ -320,26 +320,26 @@ describe('worktree-sync-milestones', async () => { const wtBase = mkdtempSync(join(tmpdir(), 'sf-wt-back-all-wt-')); try { - mkdirSync(join(mainBase, '.gsd', 'milestones'), { recursive: true }); - mkdirSync(join(wtBase, '.gsd', 'milestones'), { recursive: true }); + mkdirSync(join(mainBase, '.sf', 'milestones'), { recursive: true }); + mkdirSync(join(wtBase, '.sf', 'milestones'), { recursive: true }); // Worktree has M001 (current) AND M002 (next, created by complete-milestone) - const wtM001Dir = join(wtBase, '.gsd', 'milestones', 'M001'); + const wtM001Dir = join(wtBase, '.sf', 'milestones', 'M001'); mkdirSync(wtM001Dir, { recursive: true }); writeFileSync(join(wtM001Dir, 'M001-SUMMARY.md'), '# M001 Summary'); - const wtM002Dir = join(wtBase, '.gsd', 'milestones', 'M002-abc123'); + const wtM002Dir = join(wtBase, '.sf', 'milestones', 'M002-abc123'); mkdirSync(wtM002Dir, { recursive: true }); writeFileSync(join(wtM002Dir, 'M002-abc123-CONTEXT.md'), '# M002 Context'); writeFileSync(join(wtM002Dir, 'M002-abc123-ROADMAP.md'), '# M002 Roadmap'); // Main has neither assert.ok( - !existsSync(join(mainBase, '.gsd', 'milestones', 'M001')), + !existsSync(join(mainBase, '.sf', 'milestones', 'M001')), 'M001 missing in main before sync', ); assert.ok( - !existsSync(join(mainBase, '.gsd', 'milestones', 'M002-abc123')), + !existsSync(join(mainBase, '.sf', 'milestones', 'M002-abc123')), 'M002 missing in main before sync', ); @@ -348,17 +348,17 @@ describe('worktree-sync-milestones', async () => { // M001 should be SKIPPED (current milestone being merged — #3641) assert.ok( - !existsSync(join(mainBase, '.gsd', 'milestones', 'M001', 'M001-SUMMARY.md')), + !existsSync(join(mainBase, '.sf', 'milestones', 'M001', 'M001-SUMMARY.md')), 'M001 SUMMARY NOT synced (current milestone skipped to prevent merge conflicts)', ); // M002 should be synced (other milestone — not skipped) assert.ok( - existsSync(join(mainBase, '.gsd', 'milestones', 'M002-abc123', 'M002-abc123-CONTEXT.md')), + existsSync(join(mainBase, '.sf', 'milestones', 'M002-abc123', 'M002-abc123-CONTEXT.md')), 'M002 CONTEXT synced to main (next-milestone fix)', ); assert.ok( - existsSync(join(mainBase, '.gsd', 'milestones', 'M002-abc123', 'M002-abc123-ROADMAP.md')), + existsSync(join(mainBase, '.sf', 'milestones', 'M002-abc123', 'M002-abc123-ROADMAP.md')), 'M002 ROADMAP synced to main (next-milestone fix)', ); @@ -379,35 +379,35 @@ describe('worktree-sync-milestones', async () => { const wtBase = mkdtempSync(join(tmpdir(), 'sf-wt-transition-wt-')); try { - mkdirSync(join(mainBase, '.gsd', 'milestones'), { recursive: true }); - mkdirSync(join(wtBase, '.gsd', 'milestones'), { recursive: true }); + mkdirSync(join(mainBase, '.sf', 'milestones'), { recursive: true }); + mkdirSync(join(wtBase, '.sf', 'milestones'), { recursive: true }); // Main starts with M006 context + existing REQUIREMENTS - const mainM006 = join(mainBase, '.gsd', 'milestones', 'M006-589wvh'); + const mainM006 = join(mainBase, '.sf', 'milestones', 'M006-589wvh'); mkdirSync(mainM006, { recursive: true }); writeFileSync(join(mainM006, 'M006-589wvh-CONTEXT.md'), '# M006 Context'); - writeFileSync(join(mainBase, '.gsd', 'REQUIREMENTS.md'), '# Requirements\n## R001 through R089'); - writeFileSync(join(mainBase, '.gsd', 'PROJECT.md'), '# Project\nMilestones: M001-M006'); + writeFileSync(join(mainBase, '.sf', 'REQUIREMENTS.md'), '# Requirements\n## R001 through R089'); + writeFileSync(join(mainBase, '.sf', 'PROJECT.md'), '# Project\nMilestones: M001-M006'); // Worktree (M006 execution context) has: // - M006 SUMMARY + VALIDATION (created by complete-milestone) // - M007 setup (created by complete-milestone for next milestone) // - Updated REQUIREMENTS with R090-R094 // - Updated PROJECT with M007 - const wtM006 = join(wtBase, '.gsd', 'milestones', 'M006-589wvh'); + const wtM006 = join(wtBase, '.sf', 'milestones', 'M006-589wvh'); mkdirSync(join(wtM006, 'slices', 'S01'), { recursive: true }); writeFileSync(join(wtM006, 'M006-589wvh-CONTEXT.md'), '# M006 Context'); writeFileSync(join(wtM006, 'M006-589wvh-SUMMARY.md'), '# M006 Complete'); writeFileSync(join(wtM006, 'M006-589wvh-VALIDATION.md'), '# Validated'); writeFileSync(join(wtM006, 'slices', 'S01', 'S01-SUMMARY.md'), '# S01 done'); - const wtM007 = join(wtBase, '.gsd', 'milestones', 'M007-wortc8'); + const wtM007 = join(wtBase, '.sf', 'milestones', 'M007-wortc8'); mkdirSync(wtM007, { recursive: true }); writeFileSync(join(wtM007, 'M007-wortc8-CONTEXT.md'), '# M007 Enterprise Security'); writeFileSync(join(wtM007, 'M007-wortc8-ROADMAP.md'), '# M007 Roadmap\n10 phases'); - writeFileSync(join(wtBase, '.gsd', 'REQUIREMENTS.md'), '# Requirements\n## R001-R089\n## R090 — SCIM\n## R091 — WebAuthn'); - writeFileSync(join(wtBase, '.gsd', 'PROJECT.md'), '# Project\nMilestones: M001-M007'); + writeFileSync(join(wtBase, '.sf', 'REQUIREMENTS.md'), '# Requirements\n## R001-R089\n## R090 — SCIM\n## R091 — WebAuthn'); + writeFileSync(join(wtBase, '.sf', 'PROJECT.md'), '# Project\nMilestones: M001-M007'); // Sync with milestoneId = M006 (the completing milestone — skipped by sync) const { synced } = syncWorktreeStateBack(mainBase, wtBase, 'M006-589wvh'); @@ -415,28 +415,28 @@ describe('worktree-sync-milestones', async () => { // M006 is the current milestone being merged — it should be SKIPPED (#3641) // Its files are already in the milestone branch and would conflict with squash merge. assert.ok( - !existsSync(join(mainBase, '.gsd', 'milestones', 'M006-589wvh', 'M006-589wvh-SUMMARY.md')), + !existsSync(join(mainBase, '.sf', 'milestones', 'M006-589wvh', 'M006-589wvh-SUMMARY.md')), 'M006 SUMMARY NOT synced (current milestone skipped)', ); // Verify M007 artifacts synced (the critical fix — other milestones still sync) assert.ok( - existsSync(join(mainBase, '.gsd', 'milestones', 'M007-wortc8', 'M007-wortc8-CONTEXT.md')), + existsSync(join(mainBase, '.sf', 'milestones', 'M007-wortc8', 'M007-wortc8-CONTEXT.md')), 'M007 CONTEXT synced to main (next-milestone)', ); assert.ok( - existsSync(join(mainBase, '.gsd', 'milestones', 'M007-wortc8', 'M007-wortc8-ROADMAP.md')), + existsSync(join(mainBase, '.sf', 'milestones', 'M007-wortc8', 'M007-wortc8-ROADMAP.md')), 'M007 ROADMAP synced to main (next-milestone)', ); // Verify root-level files updated - const reqContent = readFileSync(join(mainBase, '.gsd', 'REQUIREMENTS.md'), 'utf-8'); + const reqContent = readFileSync(join(mainBase, '.sf', 'REQUIREMENTS.md'), 'utf-8'); assert.ok( reqContent.includes('R090'), 'REQUIREMENTS.md has R090 from worktree', ); - const projContent = readFileSync(join(mainBase, '.gsd', 'PROJECT.md'), 'utf-8'); + const projContent = readFileSync(join(mainBase, '.sf', 'PROJECT.md'), 'utf-8'); assert.ok( projContent.includes('M007'), 'PROJECT.md has M007 from worktree', @@ -454,16 +454,16 @@ describe('worktree-sync-milestones', async () => { const wtBase = mkdtempSync(join(tmpdir(), 'sf-wt-back-noroot-wt-')); try { - mkdirSync(join(mainBase, '.gsd', 'milestones', 'M001'), { recursive: true }); - mkdirSync(join(wtBase, '.gsd', 'milestones', 'M001'), { recursive: true }); + mkdirSync(join(mainBase, '.sf', 'milestones', 'M001'), { recursive: true }); + mkdirSync(join(wtBase, '.sf', 'milestones', 'M001'), { recursive: true }); // Main has REQUIREMENTS, worktree does not - writeFileSync(join(mainBase, '.gsd', 'REQUIREMENTS.md'), '# Original'); + writeFileSync(join(mainBase, '.sf', 'REQUIREMENTS.md'), '# Original'); const { synced } = syncWorktreeStateBack(mainBase, wtBase, 'M001'); // Main's REQUIREMENTS should be untouched (worktree had nothing to sync) - const content = readFileSync(join(mainBase, '.gsd', 'REQUIREMENTS.md'), 'utf-8'); + const content = readFileSync(join(mainBase, '.sf', 'REQUIREMENTS.md'), 'utf-8'); assert.ok( content === '# Original', 'REQUIREMENTS.md unchanged when worktree has no copy', @@ -485,23 +485,23 @@ describe('worktree-sync-milestones', async () => { const wtBase = mkdtempSync(join(tmpdir(), 'sf-wt-back-queue-wt-')); try { - mkdirSync(join(mainBase, '.gsd', 'milestones', 'M001'), { recursive: true }); - mkdirSync(join(wtBase, '.gsd', 'milestones', 'M001'), { recursive: true }); + mkdirSync(join(mainBase, '.sf', 'milestones', 'M001'), { recursive: true }); + mkdirSync(join(wtBase, '.sf', 'milestones', 'M001'), { recursive: true }); // Worktree has QUEUE.md and completed-units.json written during milestone closeout - writeFileSync(join(wtBase, '.gsd', 'QUEUE.md'), '# Queue\n- M002 next'); + writeFileSync(join(wtBase, '.sf', 'QUEUE.md'), '# Queue\n- M002 next'); writeFileSync( - join(wtBase, '.gsd', 'completed-units.json'), + join(wtBase, '.sf', 'completed-units.json'), JSON.stringify({ units: [{ id: 'M001-S01-T01', completed: true }] }), ); // Main has neither assert.ok( - !existsSync(join(mainBase, '.gsd', 'QUEUE.md')), + !existsSync(join(mainBase, '.sf', 'QUEUE.md')), 'QUEUE.md missing in main before sync', ); assert.ok( - !existsSync(join(mainBase, '.gsd', 'completed-units.json')), + !existsSync(join(mainBase, '.sf', 'completed-units.json')), 'completed-units.json missing in main before sync', ); @@ -509,10 +509,10 @@ describe('worktree-sync-milestones', async () => { // QUEUE.md should be synced assert.ok( - existsSync(join(mainBase, '.gsd', 'QUEUE.md')), + existsSync(join(mainBase, '.sf', 'QUEUE.md')), '#1787: QUEUE.md synced from worktree to main', ); - const queueContent = readFileSync(join(mainBase, '.gsd', 'QUEUE.md'), 'utf-8'); + const queueContent = readFileSync(join(mainBase, '.sf', 'QUEUE.md'), 'utf-8'); assert.ok( queueContent.includes('M002 next'), '#1787: QUEUE.md has correct content', @@ -524,10 +524,10 @@ describe('worktree-sync-milestones', async () => { // completed-units.json should be synced assert.ok( - existsSync(join(mainBase, '.gsd', 'completed-units.json')), + existsSync(join(mainBase, '.sf', 'completed-units.json')), '#1787: completed-units.json synced from worktree to main', ); - const cuContent = readFileSync(join(mainBase, '.gsd', 'completed-units.json'), 'utf-8'); + const cuContent = readFileSync(join(mainBase, '.sf', 'completed-units.json'), 'utf-8'); assert.ok( cuContent.includes('M001-S01-T01'), '#1787: completed-units.json has correct content', @@ -550,25 +550,25 @@ describe('worktree-sync-milestones', async () => { try { // Main has milestone dirs with non-standard names - const customDir = join(mainBase, '.gsd', 'milestones', 'sprint-alpha'); + const customDir = join(mainBase, '.sf', 'milestones', 'sprint-alpha'); mkdirSync(customDir, { recursive: true }); writeFileSync(join(customDir, 'CONTEXT.md'), '# Sprint Alpha Context'); - const suffixDir = join(mainBase, '.gsd', 'milestones', 'M001-abc123'); + const suffixDir = join(mainBase, '.sf', 'milestones', 'M001-abc123'); mkdirSync(suffixDir, { recursive: true }); writeFileSync(join(suffixDir, 'M001-abc123-CONTEXT.md'), '# M001 Context'); - assert.ok(!existsSync(join(wtBase, '.gsd', 'milestones', 'sprint-alpha')), 'sprint-alpha missing before sync'); - assert.ok(!existsSync(join(wtBase, '.gsd', 'milestones', 'M001-abc123')), 'M001-abc123 missing before sync'); + assert.ok(!existsSync(join(wtBase, '.sf', 'milestones', 'sprint-alpha')), 'sprint-alpha missing before sync'); + assert.ok(!existsSync(join(wtBase, '.sf', 'milestones', 'M001-abc123')), 'M001-abc123 missing before sync'); const result = syncSfStateToWorktree(mainBase, wtBase); assert.ok( - existsSync(join(wtBase, '.gsd', 'milestones', 'sprint-alpha', 'CONTEXT.md')), + existsSync(join(wtBase, '.sf', 'milestones', 'sprint-alpha', 'CONTEXT.md')), '#1547: non-standard milestone dir "sprint-alpha" synced to worktree', ); assert.ok( - existsSync(join(wtBase, '.gsd', 'milestones', 'M001-abc123', 'M001-abc123-CONTEXT.md')), + existsSync(join(wtBase, '.sf', 'milestones', 'M001-abc123', 'M001-abc123-CONTEXT.md')), '#1547: suffixed milestone dir "M001-abc123" synced to worktree', ); assert.ok(result.synced.length > 0, 'sync reported files'); @@ -585,23 +585,23 @@ describe('worktree-sync-milestones', async () => { const wtBase = mkdtempSync(join(tmpdir(), 'sf-wt-back-custom-wt-')); try { - mkdirSync(join(mainBase, '.gsd', 'milestones'), { recursive: true }); - mkdirSync(join(wtBase, '.gsd', 'milestones'), { recursive: true }); + mkdirSync(join(mainBase, '.sf', 'milestones'), { recursive: true }); + mkdirSync(join(wtBase, '.sf', 'milestones'), { recursive: true }); // Worktree has a non-standard milestone dir - const wtCustomDir = join(wtBase, '.gsd', 'milestones', 'sprint-beta'); + const wtCustomDir = join(wtBase, '.sf', 'milestones', 'sprint-beta'); mkdirSync(wtCustomDir, { recursive: true }); writeFileSync(join(wtCustomDir, 'SUMMARY.md'), '# Sprint Beta Summary'); assert.ok( - !existsSync(join(mainBase, '.gsd', 'milestones', 'sprint-beta')), + !existsSync(join(mainBase, '.sf', 'milestones', 'sprint-beta')), 'sprint-beta missing in main before sync', ); const { synced } = syncWorktreeStateBack(mainBase, wtBase, 'M001'); assert.ok( - existsSync(join(mainBase, '.gsd', 'milestones', 'sprint-beta', 'SUMMARY.md')), + existsSync(join(mainBase, '.sf', 'milestones', 'sprint-beta', 'SUMMARY.md')), '#1547: non-standard milestone dir "sprint-beta" synced back to main', ); assert.ok( diff --git a/src/resources/extensions/sf/tests/worktree-sync-overwrite-loop.test.ts b/src/resources/extensions/sf/tests/worktree-sync-overwrite-loop.test.ts index a21a3d717..7d1c8f0aa 100644 --- a/src/resources/extensions/sf/tests/worktree-sync-overwrite-loop.test.ts +++ b/src/resources/extensions/sf/tests/worktree-sync-overwrite-loop.test.ts @@ -36,7 +36,7 @@ const { assertTrue, assertEq, report } = createTestContext(); function createBase(name: string): string { const base = mkdtempSync(join(tmpdir(), `sf-wt-1886-${name}-`)); - mkdirSync(join(base, ".gsd", "milestones"), { recursive: true }); + mkdirSync(join(base, ".sf", "milestones"), { recursive: true }); return base; } @@ -55,12 +55,12 @@ async function main(): Promise<void> { try { // Project root has an older CONTEXT but no VALIDATION - const prM004 = join(mainBase, ".gsd", "milestones", "M004"); + const prM004 = join(mainBase, ".sf", "milestones", "M004"); mkdirSync(prM004, { recursive: true }); writeFileSync(join(prM004, "M004-CONTEXT.md"), "# old context"); // Worktree has CONTEXT + VALIDATION (written by validate-milestone) - const wtM004 = join(wtBase, ".gsd", "milestones", "M004"); + const wtM004 = join(wtBase, ".sf", "milestones", "M004"); mkdirSync(wtM004, { recursive: true }); writeFileSync(join(wtM004, "M004-CONTEXT.md"), "# worktree context"); writeFileSync( @@ -100,7 +100,7 @@ async function main(): Promise<void> { const wtBase = createBase("wt"); try { - const prM004 = join(mainBase, ".gsd", "milestones", "M004"); + const prM004 = join(mainBase, ".sf", "milestones", "M004"); mkdirSync(prM004, { recursive: true }); writeFileSync(join(prM004, "M004-CONTEXT.md"), "# from project root"); writeFileSync(join(prM004, "M004-ROADMAP.md"), "# roadmap"); @@ -109,11 +109,11 @@ async function main(): Promise<void> { syncProjectRootToWorktree(mainBase, wtBase, "M004"); assertTrue( - existsSync(join(wtBase, ".gsd", "milestones", "M004", "M004-CONTEXT.md")), + existsSync(join(wtBase, ".sf", "milestones", "M004", "M004-CONTEXT.md")), "#1886: missing CONTEXT.md copied from project root", ); assertTrue( - existsSync(join(wtBase, ".gsd", "milestones", "M004", "M004-ROADMAP.md")), + existsSync(join(wtBase, ".sf", "milestones", "M004", "M004-ROADMAP.md")), "#1886: missing ROADMAP.md copied from project root", ); } finally { @@ -133,20 +133,20 @@ async function main(): Promise<void> { try { // Project root has completed units (authoritative after crash recovery) writeFileSync( - join(mainBase, ".gsd", "completed-units.json"), + join(mainBase, ".sf", "completed-units.json"), JSON.stringify(["validate-milestone/M004"]), ); // Worktree has empty completed-units writeFileSync( - join(wtBase, ".gsd", "completed-units.json"), + join(wtBase, ".sf", "completed-units.json"), JSON.stringify([]), ); syncProjectRootToWorktree(mainBase, wtBase, "M004"); const wtCompleted = JSON.parse( - readFileSync(join(wtBase, ".gsd", "completed-units.json"), "utf-8"), + readFileSync(join(wtBase, ".sf", "completed-units.json"), "utf-8"), ); assertEq( wtCompleted, @@ -169,20 +169,20 @@ async function main(): Promise<void> { try { // Project root milestone dir must exist for sync to run - const prM004 = join(mainBase, ".gsd", "milestones", "M004"); + const prM004 = join(mainBase, ".sf", "milestones", "M004"); mkdirSync(prM004, { recursive: true }); // No completed-units.json in project root // Worktree has its own writeFileSync( - join(wtBase, ".gsd", "completed-units.json"), + join(wtBase, ".sf", "completed-units.json"), JSON.stringify(["some-unit/M001"]), ); syncProjectRootToWorktree(mainBase, wtBase, "M004"); const wtCompleted = JSON.parse( - readFileSync(join(wtBase, ".gsd", "completed-units.json"), "utf-8"), + readFileSync(join(wtBase, ".sf", "completed-units.json"), "utf-8"), ); assertEq( wtCompleted, diff --git a/src/resources/extensions/sf/tests/worktree-sync-tasks.test.ts b/src/resources/extensions/sf/tests/worktree-sync-tasks.test.ts index 4926a466f..c3a9e5e70 100644 --- a/src/resources/extensions/sf/tests/worktree-sync-tasks.test.ts +++ b/src/resources/extensions/sf/tests/worktree-sync-tasks.test.ts @@ -52,57 +52,57 @@ test("syncWorktreeStateBack copies task summaries from tasks/ subdirectory (#167 try { // Set up worktree with milestone, slice, and task files - writeFile(wtBase, `.gsd/milestones/${mid}/${mid}-ROADMAP.md`, "# Roadmap\n"); - writeFile(wtBase, `.gsd/milestones/${mid}/${mid}-SUMMARY.md`, "# Summary\n"); - writeFile(wtBase, `.gsd/milestones/${mid}/slices/S01/S01-PLAN.md`, "# Plan\n"); - writeFile(wtBase, `.gsd/milestones/${mid}/slices/S01/S01-SUMMARY.md`, "# Slice Summary\n"); - writeFile(wtBase, `.gsd/milestones/${mid}/slices/S01/S01-UAT.md`, "# UAT\n"); - writeFile(wtBase, `.gsd/milestones/${mid}/slices/S01/tasks/T01-PLAN.md`, "# Task 1 Plan\n"); - writeFile(wtBase, `.gsd/milestones/${mid}/slices/S01/tasks/T01-SUMMARY.md`, "# Task 1 Summary\n"); - writeFile(wtBase, `.gsd/milestones/${mid}/slices/S01/tasks/T02-PLAN.md`, "# Task 2 Plan\n"); - writeFile(wtBase, `.gsd/milestones/${mid}/slices/S01/tasks/T02-SUMMARY.md`, "# Task 2 Summary\n"); + writeFile(wtBase, `.sf/milestones/${mid}/${mid}-ROADMAP.md`, "# Roadmap\n"); + writeFile(wtBase, `.sf/milestones/${mid}/${mid}-SUMMARY.md`, "# Summary\n"); + writeFile(wtBase, `.sf/milestones/${mid}/slices/S01/S01-PLAN.md`, "# Plan\n"); + writeFile(wtBase, `.sf/milestones/${mid}/slices/S01/S01-SUMMARY.md`, "# Slice Summary\n"); + writeFile(wtBase, `.sf/milestones/${mid}/slices/S01/S01-UAT.md`, "# UAT\n"); + writeFile(wtBase, `.sf/milestones/${mid}/slices/S01/tasks/T01-PLAN.md`, "# Task 1 Plan\n"); + writeFile(wtBase, `.sf/milestones/${mid}/slices/S01/tasks/T01-SUMMARY.md`, "# Task 1 Summary\n"); + writeFile(wtBase, `.sf/milestones/${mid}/slices/S01/tasks/T02-PLAN.md`, "# Task 2 Plan\n"); + writeFile(wtBase, `.sf/milestones/${mid}/slices/S01/tasks/T02-SUMMARY.md`, "# Task 2 Summary\n"); - // Set up main with empty .gsd - mkdirSync(join(mainBase, ".gsd"), { recursive: true }); + // Set up main with empty .sf + mkdirSync(join(mainBase, ".sf"), { recursive: true }); // Run sync — currentMid is skipped, mid (M001) should be synced const result = syncWorktreeStateBack(mainBase, wtBase, currentMid); // Verify milestone-level files synced assert.ok( - existsSync(join(mainBase, `.gsd/milestones/${mid}/${mid}-ROADMAP.md`)), + existsSync(join(mainBase, `.sf/milestones/${mid}/${mid}-ROADMAP.md`)), "ROADMAP should be synced", ); assert.ok( - existsSync(join(mainBase, `.gsd/milestones/${mid}/${mid}-SUMMARY.md`)), + existsSync(join(mainBase, `.sf/milestones/${mid}/${mid}-SUMMARY.md`)), "SUMMARY should be synced", ); // Verify slice-level files synced assert.ok( - existsSync(join(mainBase, `.gsd/milestones/${mid}/slices/S01/S01-PLAN.md`)), + existsSync(join(mainBase, `.sf/milestones/${mid}/slices/S01/S01-PLAN.md`)), "S01-PLAN should be synced", ); assert.ok( - existsSync(join(mainBase, `.gsd/milestones/${mid}/slices/S01/S01-SUMMARY.md`)), + existsSync(join(mainBase, `.sf/milestones/${mid}/slices/S01/S01-SUMMARY.md`)), "S01-SUMMARY should be synced", ); // Verify task-level files synced (THE BUG FIX) assert.ok( - existsSync(join(mainBase, `.gsd/milestones/${mid}/slices/S01/tasks/T01-PLAN.md`)), + existsSync(join(mainBase, `.sf/milestones/${mid}/slices/S01/tasks/T01-PLAN.md`)), "T01-PLAN should be synced (was dropped before fix)", ); assert.ok( - existsSync(join(mainBase, `.gsd/milestones/${mid}/slices/S01/tasks/T01-SUMMARY.md`)), + existsSync(join(mainBase, `.sf/milestones/${mid}/slices/S01/tasks/T01-SUMMARY.md`)), "T01-SUMMARY should be synced (was dropped before fix)", ); assert.ok( - existsSync(join(mainBase, `.gsd/milestones/${mid}/slices/S01/tasks/T02-PLAN.md`)), + existsSync(join(mainBase, `.sf/milestones/${mid}/slices/S01/tasks/T02-PLAN.md`)), "T02-PLAN should be synced (was dropped before fix)", ); assert.ok( - existsSync(join(mainBase, `.gsd/milestones/${mid}/slices/S01/tasks/T02-SUMMARY.md`)), + existsSync(join(mainBase, `.sf/milestones/${mid}/slices/S01/tasks/T02-SUMMARY.md`)), "T02-SUMMARY should be synced (was dropped before fix)", ); @@ -115,7 +115,7 @@ test("syncWorktreeStateBack copies task summaries from tasks/ subdirectory (#167 // Verify content integrity const t1Summary = readFileSync( - join(mainBase, `.gsd/milestones/${mid}/slices/S01/tasks/T01-SUMMARY.md`), + join(mainBase, `.sf/milestones/${mid}/slices/S01/tasks/T01-SUMMARY.md`), "utf-8", ); assert.equal(t1Summary, "# Task 1 Summary\n"); @@ -132,26 +132,26 @@ test("syncWorktreeStateBack handles multiple slices with tasks (#1678)", () => { try { // Set up two slices with tasks - writeFile(wtBase, `.gsd/milestones/${mid}/slices/S01/S01-SUMMARY.md`, "# S01\n"); - writeFile(wtBase, `.gsd/milestones/${mid}/slices/S01/tasks/T01-SUMMARY.md`, "# S01-T01\n"); - writeFile(wtBase, `.gsd/milestones/${mid}/slices/S02/S02-SUMMARY.md`, "# S02\n"); - writeFile(wtBase, `.gsd/milestones/${mid}/slices/S02/tasks/T01-SUMMARY.md`, "# S02-T01\n"); - writeFile(wtBase, `.gsd/milestones/${mid}/slices/S02/tasks/T02-SUMMARY.md`, "# S02-T02\n"); - writeFile(wtBase, `.gsd/milestones/${mid}/slices/S02/tasks/T03-SUMMARY.md`, "# S02-T03\n"); + writeFile(wtBase, `.sf/milestones/${mid}/slices/S01/S01-SUMMARY.md`, "# S01\n"); + writeFile(wtBase, `.sf/milestones/${mid}/slices/S01/tasks/T01-SUMMARY.md`, "# S01-T01\n"); + writeFile(wtBase, `.sf/milestones/${mid}/slices/S02/S02-SUMMARY.md`, "# S02\n"); + writeFile(wtBase, `.sf/milestones/${mid}/slices/S02/tasks/T01-SUMMARY.md`, "# S02-T01\n"); + writeFile(wtBase, `.sf/milestones/${mid}/slices/S02/tasks/T02-SUMMARY.md`, "# S02-T02\n"); + writeFile(wtBase, `.sf/milestones/${mid}/slices/S02/tasks/T03-SUMMARY.md`, "# S02-T03\n"); - mkdirSync(join(mainBase, ".gsd"), { recursive: true }); + mkdirSync(join(mainBase, ".sf"), { recursive: true }); const result = syncWorktreeStateBack(mainBase, wtBase, currentMid); // All task summaries from both slices should be synced - assert.ok(existsSync(join(mainBase, `.gsd/milestones/${mid}/slices/S01/tasks/T01-SUMMARY.md`))); - assert.ok(existsSync(join(mainBase, `.gsd/milestones/${mid}/slices/S02/tasks/T01-SUMMARY.md`))); - assert.ok(existsSync(join(mainBase, `.gsd/milestones/${mid}/slices/S02/tasks/T02-SUMMARY.md`))); - assert.ok(existsSync(join(mainBase, `.gsd/milestones/${mid}/slices/S02/tasks/T03-SUMMARY.md`))); + assert.ok(existsSync(join(mainBase, `.sf/milestones/${mid}/slices/S01/tasks/T01-SUMMARY.md`))); + assert.ok(existsSync(join(mainBase, `.sf/milestones/${mid}/slices/S02/tasks/T01-SUMMARY.md`))); + assert.ok(existsSync(join(mainBase, `.sf/milestones/${mid}/slices/S02/tasks/T02-SUMMARY.md`))); + assert.ok(existsSync(join(mainBase, `.sf/milestones/${mid}/slices/S02/tasks/T03-SUMMARY.md`))); // Verify content integrity across slices assert.equal( - readFileSync(join(mainBase, `.gsd/milestones/${mid}/slices/S02/tasks/T03-SUMMARY.md`), "utf-8"), + readFileSync(join(mainBase, `.sf/milestones/${mid}/slices/S02/tasks/T03-SUMMARY.md`), "utf-8"), "# S02-T03\n", ); } finally { @@ -167,14 +167,14 @@ test("syncWorktreeStateBack handles slices without tasks/ directory", () => { try { // Slice with no tasks/ subdirectory (legitimate case: pre-planning) - writeFile(wtBase, `.gsd/milestones/${mid}/slices/S01/S01-RESEARCH.md`, "# Research\n"); + writeFile(wtBase, `.sf/milestones/${mid}/slices/S01/S01-RESEARCH.md`, "# Research\n"); - mkdirSync(join(mainBase, ".gsd"), { recursive: true }); + mkdirSync(join(mainBase, ".sf"), { recursive: true }); const result = syncWorktreeStateBack(mainBase, wtBase, currentMid); // Should sync the slice file without errors - assert.ok(existsSync(join(mainBase, `.gsd/milestones/${mid}/slices/S01/S01-RESEARCH.md`))); + assert.ok(existsSync(join(mainBase, `.sf/milestones/${mid}/slices/S01/S01-RESEARCH.md`))); // Should not have any task entries const taskSynced = result.synced.filter(p => p.includes("/tasks/")); assert.equal(taskSynced.length, 0); @@ -190,20 +190,20 @@ test("syncWorktreeStateBack ignores non-md files in tasks/", () => { const mid = "M004"; // other milestone that should be synced try { - writeFile(wtBase, `.gsd/milestones/${mid}/slices/S01/S01-PLAN.md`, "# Plan\n"); - writeFile(wtBase, `.gsd/milestones/${mid}/slices/S01/tasks/T01-SUMMARY.md`, "# T01\n"); + writeFile(wtBase, `.sf/milestones/${mid}/slices/S01/S01-PLAN.md`, "# Plan\n"); + writeFile(wtBase, `.sf/milestones/${mid}/slices/S01/tasks/T01-SUMMARY.md`, "# T01\n"); // Non-md file should be ignored - writeFile(wtBase, `.gsd/milestones/${mid}/slices/S01/tasks/.DS_Store`, "junk"); - writeFile(wtBase, `.gsd/milestones/${mid}/slices/S01/tasks/notes.txt`, "notes"); + writeFile(wtBase, `.sf/milestones/${mid}/slices/S01/tasks/.DS_Store`, "junk"); + writeFile(wtBase, `.sf/milestones/${mid}/slices/S01/tasks/notes.txt`, "notes"); - mkdirSync(join(mainBase, ".gsd"), { recursive: true }); + mkdirSync(join(mainBase, ".sf"), { recursive: true }); const result = syncWorktreeStateBack(mainBase, wtBase, currentMid); // Only .md files should be synced - assert.ok(existsSync(join(mainBase, `.gsd/milestones/${mid}/slices/S01/tasks/T01-SUMMARY.md`))); - assert.ok(!existsSync(join(mainBase, `.gsd/milestones/${mid}/slices/S01/tasks/.DS_Store`))); - assert.ok(!existsSync(join(mainBase, `.gsd/milestones/${mid}/slices/S01/tasks/notes.txt`))); + assert.ok(existsSync(join(mainBase, `.sf/milestones/${mid}/slices/S01/tasks/T01-SUMMARY.md`))); + assert.ok(!existsSync(join(mainBase, `.sf/milestones/${mid}/slices/S01/tasks/.DS_Store`))); + assert.ok(!existsSync(join(mainBase, `.sf/milestones/${mid}/slices/S01/tasks/notes.txt`))); } finally { cleanup(mainBase, wtBase); } diff --git a/src/resources/extensions/sf/tests/worktree-teardown-safety.test.ts b/src/resources/extensions/sf/tests/worktree-teardown-safety.test.ts index e6f9ef134..aa17f4c26 100644 --- a/src/resources/extensions/sf/tests/worktree-teardown-safety.test.ts +++ b/src/resources/extensions/sf/tests/worktree-teardown-safety.test.ts @@ -2,14 +2,14 @@ * worktree-teardown-safety.test.ts — Regression test for #2365. * * Ensures that removeWorktree() and teardownAutoWorktree() never delete - * directories outside .gsd/worktrees/. The bug: removeWorktree overrides + * directories outside .sf/worktrees/. The bug: removeWorktree overrides * the computed worktree path with whatever `git worktree list` reports. - * When .gsd/ was (or is) a symlink, git resolves the symlink at creation + * When .sf/ was (or is) a symlink, git resolves the symlink at creation * time, so its registered path can point to an external directory. If that * external path happens to be a project data directory, teardown destroys it. * * The fix adds path validation so rmSync / nativeWorktreeRemove only operate - * on paths that are actually under .gsd/worktrees/. + * on paths that are actually under .sf/worktrees/. */ import { @@ -63,7 +63,7 @@ describe("worktree-teardown-safety", () => { const tempDir = createTempRepo(); dirs.push(tempDir); - // Create a project data directory that lives alongside .gsd/ + // Create a project data directory that lives alongside .sf/ const dataDir = join(tempDir, "project-data"); mkdirSync(dataDir, { recursive: true }); writeFileSync(join(dataDir, "important.db"), "precious data"); @@ -86,7 +86,7 @@ describe("worktree-teardown-safety", () => { ); }); - it("path validation rejects paths outside .gsd/worktrees/", () => { + it("path validation rejects paths outside .sf/worktrees/", () => { const tempDir = createTempRepo(); dirs.push(tempDir); @@ -110,14 +110,14 @@ describe("worktree-teardown-safety", () => { ); }); - it("worktreePath always returns paths under .gsd/worktrees/", () => { + it("worktreePath always returns paths under .sf/worktrees/", () => { const tempDir = createTempRepo(); dirs.push(tempDir); const wtPathResult = worktreePath(tempDir, "anything"); assertTrue( - wtPathResult.startsWith(join(tempDir, ".gsd", "worktrees")), - "worktreePath returns path under .gsd/worktrees/", + wtPathResult.startsWith(join(tempDir, ".sf", "worktrees")), + "worktreePath returns path under .sf/worktrees/", ); }); @@ -126,17 +126,17 @@ describe("worktree-teardown-safety", () => { dirs.push(tempDir); assertTrue( - isInsideWorktreesDir(tempDir, join(tempDir, ".gsd", "worktrees", "my-wt")), - "path inside .gsd/worktrees/ is accepted", + isInsideWorktreesDir(tempDir, join(tempDir, ".sf", "worktrees", "my-wt")), + "path inside .sf/worktrees/ is accepted", ); assertTrue( !isInsideWorktreesDir(tempDir, join(tempDir, "project-data")), - "path outside .gsd/worktrees/ is rejected", + "path outside .sf/worktrees/ is rejected", ); assertTrue( - !isInsideWorktreesDir(tempDir, join(tempDir, ".gsd", "worktrees", "..", "..", "project-data")), + !isInsideWorktreesDir(tempDir, join(tempDir, ".sf", "worktrees", "..", "..", "project-data")), "path traversal via .. is rejected", ); diff --git a/src/resources/extensions/sf/tests/worktree.test.ts b/src/resources/extensions/sf/tests/worktree.test.ts index 244b8a85f..b63c1f6de 100644 --- a/src/resources/extensions/sf/tests/worktree.test.ts +++ b/src/resources/extensions/sf/tests/worktree.test.ts @@ -42,10 +42,10 @@ const base = mkdtempSync(join(tmpdir(), "sf-branch-test-")); run("git init -b main", base); run('git config user.name "Pi Test"', base); run('git config user.email "pi@example.com"', base); -mkdirSync(join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks"), { recursive: true }); +mkdirSync(join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks"), { recursive: true }); writeFileSync(join(base, "README.md"), "hello\n", "utf-8"); -writeFileSync(join(base, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), `# M001: Demo\n\n## Slices\n- [ ] **S01: Slice One** \`risk:low\` \`depends:[]\`\n > After this: demo works\n`, "utf-8"); -writeFileSync(join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-PLAN.md"), `# S01: Slice One\n\n**Goal:** Demo\n**Demo:** Demo\n\n## Must-Haves\n- done\n\n## Tasks\n- [ ] **T01: Implement** \`est:10m\`\n do it\n`, "utf-8"); +writeFileSync(join(base, ".sf", "milestones", "M001", "M001-ROADMAP.md"), `# M001: Demo\n\n## Slices\n- [ ] **S01: Slice One** \`risk:low\` \`depends:[]\`\n > After this: demo works\n`, "utf-8"); +writeFileSync(join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-PLAN.md"), `# S01: Slice One\n\n**Goal:** Demo\n**Demo:** Demo\n\n## Must-Haves\n- done\n\n## Tasks\n- [ ] **T01: Implement** \`est:10m\`\n do it\n`, "utf-8"); run("git add .", base); run('git commit -m "chore: init"', base); @@ -97,8 +97,8 @@ describe('worktree', async () => { console.log("\n=== detectWorktreeName ==="); assert.deepStrictEqual(detectWorktreeName("/projects/myapp"), null, "no worktree in plain path"); - assert.deepStrictEqual(detectWorktreeName("/projects/myapp/.gsd/worktrees/feature-auth"), "feature-auth", "detects worktree name"); - assert.deepStrictEqual(detectWorktreeName("/projects/myapp/.gsd/worktrees/my-wt/subdir"), "my-wt", "detects worktree with subdir"); + assert.deepStrictEqual(detectWorktreeName("/projects/myapp/.sf/worktrees/feature-auth"), "feature-auth", "detects worktree name"); + assert.deepStrictEqual(detectWorktreeName("/projects/myapp/.sf/worktrees/my-wt/subdir"), "my-wt", "detects worktree with subdir"); // ═══════════════════════════════════════════════════════════════════════ // Integration branch — facade-level tests @@ -198,22 +198,22 @@ describe('worktree', async () => { // ── detectWorktreeName: symlink-resolved paths ─────────────────────────── console.log("\n=== detectWorktreeName (symlink-resolved paths) ==="); assert.deepStrictEqual( - detectWorktreeName("/Users/fran/.gsd/projects/89e1c9ad49bf/worktrees/M001"), + detectWorktreeName("/Users/fran/.sf/projects/89e1c9ad49bf/worktrees/M001"), "M001", "detects milestone in symlink-resolved path", ); assert.deepStrictEqual( - detectWorktreeName("/Users/fran/.gsd/projects/abc123/worktrees/M002/subdir"), + detectWorktreeName("/Users/fran/.sf/projects/abc123/worktrees/M002/subdir"), "M002", "detects milestone with trailing subdir in symlink-resolved path", ); assert.deepStrictEqual( - detectWorktreeName("/Users/fran/.gsd/projects/abc123"), + detectWorktreeName("/Users/fran/.sf/projects/abc123"), null, "returns null for project root without worktrees segment", ); assert.deepStrictEqual( - detectWorktreeName("/foo/.gsd/worktrees/M001"), + detectWorktreeName("/foo/.sf/worktrees/M001"), "M001", "still detects direct layout path", ); @@ -221,7 +221,7 @@ describe('worktree', async () => { // ── resolveProjectRoot: symlink-resolved paths ────────────────────────── console.log("\n=== resolveProjectRoot (symlink-resolved paths) ==="); - // BUG FIX: symlink-resolved paths that land inside ~/.gsd should NOT + // BUG FIX: symlink-resolved paths that land inside ~/.sf should NOT // resolve to the home directory. When the .git file fallback can't find // the real project root (no git worktree metadata in these synthetic paths), // resolveProjectRoot returns the input unchanged rather than returning ~. @@ -229,13 +229,13 @@ describe('worktree', async () => { // With SF_PROJECT_ROOT env var set (layer 1 — coordinator passes it) process.env.SF_PROJECT_ROOT = "/real/project"; assert.deepStrictEqual( - resolveProjectRoot("/Users/fran/.gsd/projects/89e1c9ad49bf/worktrees/M001"), + resolveProjectRoot("/Users/fran/.sf/projects/89e1c9ad49bf/worktrees/M001"), "/real/project", "uses SF_PROJECT_ROOT when set", ); delete process.env.SF_PROJECT_ROOT; - // Without SF_PROJECT_ROOT, direct layout still works (no ~/.gsd collision) + // Without SF_PROJECT_ROOT, direct layout still works (no ~/.sf collision) assert.deepStrictEqual( resolveProjectRoot("/some/repo"), "/some/repo", @@ -243,9 +243,9 @@ describe('worktree', async () => { ); delete process.env.SF_PROJECT_ROOT; - // Without SF_PROJECT_ROOT, direct layout still works (no ~/.gsd collision) + // Without SF_PROJECT_ROOT, direct layout still works (no ~/.sf collision) assert.deepStrictEqual( - resolveProjectRoot("/foo/.gsd/worktrees/M001"), + resolveProjectRoot("/foo/.sf/worktrees/M001"), "/foo", "still resolves direct layout path", ); @@ -257,7 +257,7 @@ describe('worktree', async () => { // Without SF_PROJECT_ROOT, direct layout with nested subdirs assert.deepStrictEqual( - resolveProjectRoot("/data/.gsd/worktrees/M003/nested"), + resolveProjectRoot("/data/.sf/worktrees/M003/nested"), "/data", "resolves correctly with nested subdirs after worktree name (direct layout)", ); @@ -266,21 +266,21 @@ describe('worktree', async () => { { const fakeHome = mkdtempSync(join(tmpdir(), "sf-home-")); const project = realpathSync(mkdtempSync(join(tmpdir(), "sf-proj-"))); - const storage = join(fakeHome, ".gsd", "projects", "abc123def456"); + const storage = join(fakeHome, ".sf", "projects", "abc123def456"); mkdirSync(storage, { recursive: true }); - symlinkSync(storage, join(project, ".gsd")); + symlinkSync(storage, join(project, ".sf")); run("git init -b main", project); run("git config user.name 'Pi Test'", project); run("git config user.email 'pi@example.com'", project); writeFileSync(join(project, "README.md"), "init\n"); run("git add -A && git commit -m init", project); - run("git worktree add .gsd/worktrees/M001 -b worktree/M001", project); + run("git worktree add .sf/worktrees/M001 -b worktree/M001", project); - const deep = join(project, ".gsd", "worktrees", "M001", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k"); + const deep = join(project, ".sf", "worktrees", "M001", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k"); mkdirSync(deep, { recursive: true }); - process.env.SF_HOME = join(fakeHome, ".gsd"); + process.env.SF_HOME = join(fakeHome, ".sf"); assert.deepStrictEqual( normalizePath(resolveProjectRoot(realpathSync(deep))), normalizePath(project), diff --git a/src/resources/extensions/sf/tests/write-gate.test.ts b/src/resources/extensions/sf/tests/write-gate.test.ts index c162b2b28..8b847a376 100644 --- a/src/resources/extensions/sf/tests/write-gate.test.ts +++ b/src/resources/extensions/sf/tests/write-gate.test.ts @@ -29,7 +29,7 @@ import { test('write-gate: blocks CONTEXT.md write during discussion without depth verification (absolute path)', () => { const result = shouldBlockContextWrite( 'write', - '/Users/dev/project/.gsd/milestones/M001/M001-CONTEXT.md', + '/Users/dev/project/.sf/milestones/M001/M001-CONTEXT.md', 'M001', false, ); @@ -42,7 +42,7 @@ test('write-gate: blocks CONTEXT.md write during discussion without depth verifi test('write-gate: blocks CONTEXT.md write during discussion without depth verification (relative path)', () => { const result = shouldBlockContextWrite( 'write', - '.gsd/milestones/M005/M005-CONTEXT.md', + '.sf/milestones/M005/M005-CONTEXT.md', 'M005', false, ); @@ -57,7 +57,7 @@ test('write-gate: allows CONTEXT.md write after depth verification', () => { markDepthVerified('M001'); const result = shouldBlockContextWrite( 'write', - '/Users/dev/project/.gsd/milestones/M001/M001-CONTEXT.md', + '/Users/dev/project/.sf/milestones/M001/M001-CONTEXT.md', 'M001', ); assert.strictEqual(result.block, false, 'should not block after depth verification'); @@ -70,7 +70,7 @@ test('write-gate: allows CONTEXT.md write after depth verification', () => { test('write-gate: blocks CONTEXT.md write when milestoneId is ambiguous', () => { const result = shouldBlockContextWrite( 'write', - '.gsd/milestones/M001/M001-CONTEXT.md', + '.sf/milestones/M001/M001-CONTEXT.md', null, ); assert.strictEqual(result.block, true, 'should block when milestone context is ambiguous'); @@ -82,7 +82,7 @@ test('write-gate: allows non-CONTEXT.md writes during discussion', () => { // DISCUSSION.md const r1 = shouldBlockContextWrite( 'write', - '.gsd/milestones/M001/M001-DISCUSSION.md', + '.sf/milestones/M001/M001-DISCUSSION.md', 'M001', ); assert.strictEqual(r1.block, false, 'DISCUSSION.md should pass'); @@ -90,7 +90,7 @@ test('write-gate: allows non-CONTEXT.md writes during discussion', () => { // Slice file const r2 = shouldBlockContextWrite( 'write', - '.gsd/milestones/M001/slices/S01/S01-PLAN.md', + '.sf/milestones/M001/slices/S01/S01-PLAN.md', 'M001', ); assert.strictEqual(r2.block, false, 'slice plan should pass'); @@ -109,7 +109,7 @@ test('write-gate: allows non-CONTEXT.md writes during discussion', () => { test('write-gate: regex does not match slice context files (S01-CONTEXT.md)', () => { const result = shouldBlockContextWrite( 'write', - '.gsd/milestones/M001/slices/S01/S01-CONTEXT.md', + '.sf/milestones/M001/slices/S01/S01-CONTEXT.md', 'M001', ); assert.strictEqual(result.block, false, 'S01-CONTEXT.md should not be blocked'); @@ -120,7 +120,7 @@ test('write-gate: regex does not match slice context files (S01-CONTEXT.md)', () test('write-gate: blocked reason contains depth_verification keyword and anti-bypass language', () => { const result = shouldBlockContextWrite( 'write', - '.gsd/milestones/M999/M999-CONTEXT.md', + '.sf/milestones/M999/M999-CONTEXT.md', 'M999', ); assert.strictEqual(result.block, true); @@ -135,7 +135,7 @@ test('write-gate: blocked reason contains depth_verification keyword and anti-by test('write-gate: blocks CONTEXT.md write in queue mode without depth verification', () => { const result = shouldBlockContextWrite( 'write', - '.gsd/milestones/M001/M001-CONTEXT.md', + '.sf/milestones/M001/M001-CONTEXT.md', null, // no milestoneId in queue mode true, // queue phase active ); @@ -150,7 +150,7 @@ test('write-gate: allows CONTEXT.md write in queue mode after depth verification markDepthVerified('M001'); const result = shouldBlockContextWrite( 'write', - '.gsd/milestones/M001/M001-CONTEXT.md', + '.sf/milestones/M001/M001-CONTEXT.md', null, // no milestoneId in queue mode true, // queue phase active ); @@ -166,14 +166,14 @@ test('write-gate: markDepthVerified unlocks only the matching milestone', () => const allowed = shouldBlockContextWrite( 'write', - '.gsd/milestones/M001/M001-CONTEXT.md', + '.sf/milestones/M001/M001-CONTEXT.md', null, ); assert.strictEqual(allowed.block, false, 'should allow the verified milestone'); const blockedOther = shouldBlockContextWrite( 'write', - '.gsd/milestones/M002/M002-CONTEXT.md', + '.sf/milestones/M002/M002-CONTEXT.md', null, ); assert.strictEqual(blockedOther.block, true, 'other milestones should remain blocked'); diff --git a/src/resources/extensions/sf/tests/write-intercept.test.ts b/src/resources/extensions/sf/tests/write-intercept.test.ts index 371e60b9e..755d57b00 100644 --- a/src/resources/extensions/sf/tests/write-intercept.test.ts +++ b/src/resources/extensions/sf/tests/write-intercept.test.ts @@ -7,48 +7,48 @@ import { isBlockedStateFile, BLOCKED_WRITE_ERROR } from '../write-intercept.ts'; // ─── isBlockedStateFile: blocked paths ─────────────────────────────────── -test('write-intercept: blocks unix .gsd/STATE.md path', () => { - assert.strictEqual(isBlockedStateFile('/project/.gsd/STATE.md'), true); +test('write-intercept: blocks unix .sf/STATE.md path', () => { + assert.strictEqual(isBlockedStateFile('/project/.sf/STATE.md'), true); }); -test('write-intercept: blocks relative path with dir prefix before .gsd/STATE.md', () => { - assert.strictEqual(isBlockedStateFile('project/.gsd/STATE.md'), true); +test('write-intercept: blocks relative path with dir prefix before .sf/STATE.md', () => { + assert.strictEqual(isBlockedStateFile('project/.sf/STATE.md'), true); }); -test('write-intercept: blocks bare relative .gsd/STATE.md (no leading separator)', () => { - // (^|[/\\]) matches paths that start with .gsd/ — covers the case where write +test('write-intercept: blocks bare relative .sf/STATE.md (no leading separator)', () => { + // (^|[/\\]) matches paths that start with .sf/ — covers the case where write // tools receive a bare relative path before the file exists (realpathSync fails). - assert.strictEqual(isBlockedStateFile('.gsd/STATE.md'), true); + assert.strictEqual(isBlockedStateFile('.sf/STATE.md'), true); }); -test('write-intercept: blocks nested project .gsd/STATE.md path', () => { - assert.strictEqual(isBlockedStateFile('/Users/dev/my-project/.gsd/STATE.md'), true); +test('write-intercept: blocks nested project .sf/STATE.md path', () => { + assert.strictEqual(isBlockedStateFile('/Users/dev/my-project/.sf/STATE.md'), true); }); -test('write-intercept: blocks .gsd/projects/<name>/STATE.md (symlinked projects path)', () => { - assert.strictEqual(isBlockedStateFile('/home/user/.gsd/projects/my-project/STATE.md'), true); +test('write-intercept: blocks .sf/projects/<name>/STATE.md (symlinked projects path)', () => { + assert.strictEqual(isBlockedStateFile('/home/user/.sf/projects/my-project/STATE.md'), true); }); // ─── isBlockedStateFile: allowed paths ─────────────────────────────────── -test('write-intercept: allows .gsd/ROADMAP.md', () => { - assert.strictEqual(isBlockedStateFile('/project/.gsd/ROADMAP.md'), false); +test('write-intercept: allows .sf/ROADMAP.md', () => { + assert.strictEqual(isBlockedStateFile('/project/.sf/ROADMAP.md'), false); }); -test('write-intercept: allows .gsd/PLAN.md', () => { - assert.strictEqual(isBlockedStateFile('/project/.gsd/PLAN.md'), false); +test('write-intercept: allows .sf/PLAN.md', () => { + assert.strictEqual(isBlockedStateFile('/project/.sf/PLAN.md'), false); }); -test('write-intercept: allows .gsd/REQUIREMENTS.md', () => { - assert.strictEqual(isBlockedStateFile('/project/.gsd/REQUIREMENTS.md'), false); +test('write-intercept: allows .sf/REQUIREMENTS.md', () => { + assert.strictEqual(isBlockedStateFile('/project/.sf/REQUIREMENTS.md'), false); }); -test('write-intercept: allows .gsd/SUMMARY.md', () => { - assert.strictEqual(isBlockedStateFile('/project/.gsd/SUMMARY.md'), false); +test('write-intercept: allows .sf/SUMMARY.md', () => { + assert.strictEqual(isBlockedStateFile('/project/.sf/SUMMARY.md'), false); }); -test('write-intercept: allows .gsd/PROJECT.md', () => { - assert.strictEqual(isBlockedStateFile('/project/.gsd/PROJECT.md'), false); +test('write-intercept: allows .sf/PROJECT.md', () => { + assert.strictEqual(isBlockedStateFile('/project/.sf/PROJECT.md'), false); }); test('write-intercept: allows regular source files', () => { @@ -56,10 +56,10 @@ test('write-intercept: allows regular source files', () => { }); test('write-intercept: allows slice plan files', () => { - assert.strictEqual(isBlockedStateFile('/project/.gsd/milestones/M001/slices/S01/S01-PLAN.md'), false); + assert.strictEqual(isBlockedStateFile('/project/.sf/milestones/M001/slices/S01/S01-PLAN.md'), false); }); -test('write-intercept: does not block files named STATE.md outside .gsd/', () => { +test('write-intercept: does not block files named STATE.md outside .sf/', () => { assert.strictEqual(isBlockedStateFile('/project/docs/STATE.md'), false); }); diff --git a/src/resources/extensions/sf/tests/zombie-sf-state.test.ts b/src/resources/extensions/sf/tests/zombie-sf-state.test.ts index c0b8d9d34..a22d16e09 100644 --- a/src/resources/extensions/sf/tests/zombie-sf-state.test.ts +++ b/src/resources/extensions/sf/tests/zombie-sf-state.test.ts @@ -5,13 +5,13 @@ import { createTestContext } from "./test-helpers.ts"; const { assertTrue, assertMatch, assertNoMatch, report } = createTestContext(); -// ─── #2942: Zombie .gsd state skips init wizard ───────────────────────────── +// ─── #2942: Zombie .sf state skips init wizard ───────────────────────────── // -// A partially initialized .gsd/ (symlink exists but no PREFERENCES.md or +// A partially initialized .sf/ (symlink exists but no PREFERENCES.md or // milestones/) causes the init wizard gate in showWorkflowEntry to be skipped, // resulting in an uninitialized project session. -console.log("\n=== #2942: zombie .gsd state must not skip init wizard ==="); +console.log("\n=== #2942: zombie .sf state must not skip init wizard ==="); // ── guided-flow.ts — init wizard gate must check bootstrap completeness ── @@ -32,15 +32,15 @@ const afterSmartEntry = smartEntryIdx >= 0 ? guidedFlowSrc.slice(smartEntryIdx, // It must also verify that bootstrap artifacts (PREFERENCES.md or milestones/) exist. assertTrue( afterSmartEntry.includes("PREFERENCES.md") || afterSmartEntry.includes("PREFERENCES"), - "init wizard gate checks for PREFERENCES.md, not just .gsd/ existence (#2942)", + "init wizard gate checks for PREFERENCES.md, not just .sf/ existence (#2942)", ); assertTrue( afterSmartEntry.includes("milestones"), - "init wizard gate checks for milestones/ directory, not just .gsd/ existence (#2942)", + "init wizard gate checks for milestones/ directory, not just .sf/ existence (#2942)", ); -// The init wizard should be shown when .gsd/ exists but has no bootstrap artifacts. +// The init wizard should be shown when .sf/ exists but has no bootstrap artifacts. // The old code was: if (!existsSync(sfRoot(basePath))) { ... showProjectInit ... } // The fix should use a compound check so zombie states trigger the wizard. // Verify we no longer have the bare existence check as the sole gate. @@ -68,7 +68,7 @@ const autoStartSrc = readFileSync( ); // After ensureGsdSymlink, the code that creates milestones/ must check for -// the milestones directory specifically (not .gsd/ which ensureGsdSymlink already created). +// the milestones directory specifically (not .sf/ which ensureGsdSymlink already created). const symlinkIdx = autoStartSrc.indexOf("ensureGsdSymlink(base)"); assertTrue(symlinkIdx >= 0, "auto-start.ts calls ensureGsdSymlink(base)"); @@ -89,7 +89,7 @@ const mkdirRegion = afterSymlink.slice(0, afterSymlink.indexOf("mkdirSync") + 20 assertMatch( mkdirRegion, /existsSync\([^)]*milestones/, - "milestones bootstrap checks milestones path existence, not .gsd/ (#2942)", + "milestones bootstrap checks milestones path existence, not .sf/ (#2942)", ); report(); diff --git a/src/resources/extensions/sf/tools/complete-milestone.ts b/src/resources/extensions/sf/tools/complete-milestone.ts index a94fd4a46..551452c77 100644 --- a/src/resources/extensions/sf/tools/complete-milestone.ts +++ b/src/resources/extensions/sf/tools/complete-milestone.ts @@ -196,7 +196,7 @@ export async function handleCompleteMilestone( if (milestoneDir) { summaryPath = join(milestoneDir, `${params.milestoneId}-SUMMARY.md`); } else { - const sfDir = join(basePath, ".gsd"); + const sfDir = join(basePath, ".sf"); const manualDir = join(sfDir, "milestones", params.milestoneId); mkdirSync(manualDir, { recursive: true }); summaryPath = join(manualDir, `${params.milestoneId}-SUMMARY.md`); diff --git a/src/resources/extensions/sf/tools/complete-slice.ts b/src/resources/extensions/sf/tools/complete-slice.ts index b84733ce5..472ddd8fc 100644 --- a/src/resources/extensions/sf/tools/complete-slice.ts +++ b/src/resources/extensions/sf/tools/complete-slice.ts @@ -325,7 +325,7 @@ export async function handleCompleteSlice( summaryPath = join(sliceDir, `${params.sliceId}-SUMMARY.md`); } else { // Slice dir doesn't exist on disk yet — build path manually and ensure dirs - const sfDir = join(basePath, ".gsd"); + const sfDir = join(basePath, ".sf"); const manualSliceDir = join(sfDir, "milestones", params.milestoneId, "slices", params.sliceId); mkdirSync(manualSliceDir, { recursive: true }); summaryPath = join(manualSliceDir, `${params.sliceId}-SUMMARY.md`); diff --git a/src/resources/extensions/sf/tools/complete-task.ts b/src/resources/extensions/sf/tools/complete-task.ts index 45ea34d26..35e1d912f 100644 --- a/src/resources/extensions/sf/tools/complete-task.ts +++ b/src/resources/extensions/sf/tools/complete-task.ts @@ -227,7 +227,7 @@ export async function handleCompleteTask( summaryPath = join(tasksDir, `${params.taskId}-SUMMARY.md`); } else { // Tasks dir doesn't exist on disk yet — build path manually and ensure dirs - const sfDir = join(basePath, ".gsd"); + const sfDir = join(basePath, ".sf"); const manualTasksDir = join(sfDir, "milestones", params.milestoneId, "slices", params.sliceId, "tasks"); mkdirSync(manualTasksDir, { recursive: true }); summaryPath = join(manualTasksDir, `${params.taskId}-SUMMARY.md`); diff --git a/src/resources/extensions/sf/tools/reassess-roadmap.ts b/src/resources/extensions/sf/tools/reassess-roadmap.ts index 53a6d0ec4..7da6b7f0c 100644 --- a/src/resources/extensions/sf/tools/reassess-roadmap.ts +++ b/src/resources/extensions/sf/tools/reassess-roadmap.ts @@ -109,7 +109,7 @@ export async function handleReassessRoadmap( // ── Compute assessment artifact path ────────────────────────────── // Assessment lives in the completed slice's directory const assessmentRelPath = join( - ".gsd", "milestones", params.milestoneId, + ".sf", "milestones", params.milestoneId, "slices", params.completedSliceId, `${params.completedSliceId}-ASSESSMENT.md`, ); @@ -247,7 +247,7 @@ export async function handleReassessRoadmap( if (hasStructuralChanges) { const validationFile = join( - basePath, ".gsd", "milestones", params.milestoneId, + basePath, ".sf", "milestones", params.milestoneId, `${params.milestoneId}-VALIDATION.md`, ); try { diff --git a/src/resources/extensions/sf/tools/validate-milestone.ts b/src/resources/extensions/sf/tools/validate-milestone.ts index a4008cb84..7c46ac9de 100644 --- a/src/resources/extensions/sf/tools/validate-milestone.ts +++ b/src/resources/extensions/sf/tools/validate-milestone.ts @@ -107,7 +107,7 @@ export async function handleValidateMilestone( if (milestoneDir) { validationPath = join(milestoneDir, `${params.milestoneId}-VALIDATION.md`); } else { - const sfDir = join(basePath, ".gsd"); + const sfDir = join(basePath, ".sf"); const manualDir = join(sfDir, "milestones", params.milestoneId); validationPath = join(manualDir, `${params.milestoneId}-VALIDATION.md`); } diff --git a/src/resources/extensions/sf/triage-resolution.ts b/src/resources/extensions/sf/triage-resolution.ts index 40d966ffd..761726574 100644 --- a/src/resources/extensions/sf/triage-resolution.ts +++ b/src/resources/extensions/sf/triage-resolution.ts @@ -91,7 +91,7 @@ export function executeReplan( ): boolean { try { const triggerPath = join( - basePath, ".gsd", "milestones", mid, "slices", sid, `${sid}-REPLAN-TRIGGER.md`, + basePath, ".sf", "milestones", mid, "slices", sid, `${sid}-REPLAN-TRIGGER.md`, ); const ts = new Date().toISOString(); const content = [ @@ -131,7 +131,7 @@ export function executeReplan( * Execute a backtrack directive — user wants to abandon current milestone * and return to a previous one (milestone regression). * - * Writes a BACKTRACK-TRIGGER.md marker at `.gsd/BACKTRACK-TRIGGER.md` with + * Writes a BACKTRACK-TRIGGER.md marker at `.sf/BACKTRACK-TRIGGER.md` with * the target milestone, reason, and timestamp. The state machine (deriveState) * detects this and transitions the project to the target milestone, resetting * its slices to allow re-planning. @@ -403,7 +403,7 @@ export function buildQuickTaskPrompt(capture: CaptureEntry): string { ` the current codebase. If the issue has already been fixed (e.g., by planned`, ` milestone work), report "Already resolved — no changes needed." and stop.`, `2. Execute this task as a small, self-contained change.`, - `3. Do NOT modify any \`.gsd/\` plan files — this is a one-off, not a planned task.`, + `3. Do NOT modify any \`.sf/\` plan files — this is a one-off, not a planned task.`, `4. Commit your changes with a descriptive message.`, `5. Keep changes minimal and focused on the capture text.`, `6. When done, say: "Quick task complete."`, diff --git a/src/resources/extensions/sf/unit-ownership.ts b/src/resources/extensions/sf/unit-ownership.ts index 89bb08f5e..389b616dc 100644 --- a/src/resources/extensions/sf/unit-ownership.ts +++ b/src/resources/extensions/sf/unit-ownership.ts @@ -3,7 +3,7 @@ // // An agent can claim a unit (task, slice) before working on it. // complete-task and complete-slice enforce ownership when claims exist. -// Claims are stored in SQLite (.gsd/unit-claims.db) for atomic +// Claims are stored in SQLite (.sf/unit-claims.db) for atomic // first-writer-wins semantics via INSERT OR IGNORE. // // Unit key format: @@ -144,7 +144,7 @@ function wrapDb(rawDb: unknown): DbLike { const dbPool = new Map<string, DbLike>(); function claimsDbPath(basePath: string): string { - return join(basePath, ".gsd", "unit-claims.db"); + return join(basePath, ".sf", "unit-claims.db"); } function getDb(basePath: string): DbLike | null { @@ -167,13 +167,13 @@ export function sliceUnitKey(milestoneId: string, sliceId: string): string { /** * Initialize the ownership SQLite database for a given basePath. - * Creates .gsd/ directory and unit-claims.db with the unit_claims table. + * Creates .sf/ directory and unit-claims.db with the unit_claims table. * Safe to call multiple times (idempotent). */ export function initOwnershipTable(basePath: string): void { if (dbPool.has(basePath)) return; - const dir = join(basePath, ".gsd"); + const dir = join(basePath, ".sf"); mkdirSync(dir, { recursive: true }); const raw = openRawDb(claimsDbPath(basePath)); diff --git a/src/resources/extensions/sf/validate-directory.ts b/src/resources/extensions/sf/validate-directory.ts index 524212a7e..b16ca3d9d 100644 --- a/src/resources/extensions/sf/validate-directory.ts +++ b/src/resources/extensions/sf/validate-directory.ts @@ -1,7 +1,7 @@ /** * SF Directory Validation — Safeguards against running in dangerous directories. * - * Prevents SF from creating .gsd/ structures in system paths, home directories, + * Prevents SF from creating .sf/ structures in system paths, home directories, * or other locations where writing project scaffolding would be harmful. */ @@ -22,7 +22,7 @@ export interface DirectoryValidationResult { // ─── Blocked Paths ────────────────────────────────────────────────────────────── -/** Paths where SF must never create .gsd/ — no override possible. */ +/** Paths where SF must never create .sf/ — no override possible. */ const UNIX_BLOCKED_PATHS = new Set([ "/", "/bin", diff --git a/src/resources/extensions/sf/watch/header-renderer.ts b/src/resources/extensions/sf/watch/header-renderer.ts index 25953243e..74a3cb693 100644 --- a/src/resources/extensions/sf/watch/header-renderer.ts +++ b/src/resources/extensions/sf/watch/header-renderer.ts @@ -98,13 +98,13 @@ export function readGitBranch(projectRoot: string): string { } /** - * Read MCP server names from .mcp.json or .gsd/mcp.json. + * Read MCP server names from .mcp.json or .sf/mcp.json. * Returns array of server name strings. */ export function readMcpServerNames(projectRoot: string): string[] { const configPaths = [ join(projectRoot, ".mcp.json"), - join(projectRoot, ".gsd", "mcp.json"), + join(projectRoot, ".sf", "mcp.json"), ]; const names: string[] = []; const seen = new Set<string>(); diff --git a/src/resources/extensions/sf/workflow-events.ts b/src/resources/extensions/sf/workflow-events.ts index 40bdab31f..111bcf7b9 100644 --- a/src/resources/extensions/sf/workflow-events.ts +++ b/src/resources/extensions/sf/workflow-events.ts @@ -34,9 +34,9 @@ export interface WorkflowEvent { // ─── appendEvent ───────────────────────────────────────────────────────── /** - * Append one event to .gsd/event-log.jsonl. + * Append one event to .sf/event-log.jsonl. * Computes a content hash from cmd+params (deterministic, independent of ts/actor/session). - * Creates .gsd directory if needed. + * Creates .sf directory if needed. */ export function appendEvent( basePath: string, @@ -53,7 +53,7 @@ export function appendEvent( hash, session_id: ENGINE_SESSION_ID, }; - const dir = join(basePath, ".gsd"); + const dir = join(basePath, ".sf"); mkdirSync(dir, { recursive: true }); appendFileSync(join(dir, "event-log.jsonl"), JSON.stringify(fullEvent) + "\n", "utf-8"); } @@ -117,7 +117,7 @@ export function findForkPoint( * Active log retains only events from other milestones. * Archived file is kept on disk for forensics. * - * @param basePath - Project root (parent of .gsd/) + * @param basePath - Project root (parent of .sf/) * @param milestoneId - The milestone whose events should be archived * @returns { archived: number } — count of events moved to archive */ @@ -125,8 +125,8 @@ export function compactMilestoneEvents( basePath: string, milestoneId: string, ): { archived: number } { - const logPath = join(basePath, ".gsd", "event-log.jsonl"); - const archivePath = join(basePath, ".gsd", `event-log-${milestoneId}.jsonl.archived`); + const logPath = join(basePath, ".sf", "event-log.jsonl"); + const archivePath = join(basePath, ".sf", `event-log-${milestoneId}.jsonl.archived`); return withFileLockSync(logPath, () => { const allEvents = readEvents(logPath); diff --git a/src/resources/extensions/sf/workflow-logger.ts b/src/resources/extensions/sf/workflow-logger.ts index f995e1a8f..f82beeb2e 100644 --- a/src/resources/extensions/sf/workflow-logger.ts +++ b/src/resources/extensions/sf/workflow-logger.ts @@ -2,7 +2,7 @@ // Centralized warning/error accumulator for the workflow engine pipeline. // Captures structured entries that the auto-loop can drain after each unit // to surface root causes for stuck loops, silent degradation, and blocked writes. -// Error-severity entries are persisted to .gsd/audit-log.jsonl (sanitized) for +// Error-severity entries are persisted to .sf/audit-log.jsonl (sanitized) for // post-mortem analysis. Warnings are ephemeral (stderr + buffer only) to avoid // log amplification from expected-control-flow catch paths. // @@ -215,7 +215,7 @@ export function formatForNotification(entries: readonly LogEntry[]): string { export function readAuditLog(basePath?: string): LogEntry[] { const bp = basePath ?? _auditBasePath; if (!bp) return []; - const auditPath = join(bp, ".gsd", "audit-log.jsonl"); + const auditPath = join(bp, ".sf", "audit-log.jsonl"); if (!existsSync(auditPath)) return []; try { const content = readFileSync(auditPath, "utf-8"); @@ -300,12 +300,12 @@ function _push( } } - // Persist errors to .gsd/audit-log.jsonl so they survive context resets. + // Persist errors to .sf/audit-log.jsonl so they survive context resets. // Only error-severity entries are persisted — warnings are ephemeral (stderr + buffer) // to avoid log amplification from expected-control-flow catch paths. if (_auditBasePath && severity === "error") { try { - const auditDir = join(_auditBasePath, ".gsd"); + const auditDir = join(_auditBasePath, ".sf"); mkdirSync(auditDir, { recursive: true }); const sanitized = _sanitizeForAudit(entry); appendFileSync(join(auditDir, "audit-log.jsonl"), JSON.stringify(sanitized) + "\n", "utf-8"); diff --git a/src/resources/extensions/sf/workflow-manifest.ts b/src/resources/extensions/sf/workflow-manifest.ts index b5d9d438d..b37912d4a 100644 --- a/src/resources/extensions/sf/workflow-manifest.ts +++ b/src/resources/extensions/sf/workflow-manifest.ts @@ -198,13 +198,13 @@ export function snapshotState(): StateManifest { // ─── writeManifest ─────────────────────────────────────────────────────── /** - * Write current DB state to .gsd/state-manifest.json via atomicWriteSync. + * Write current DB state to .sf/state-manifest.json via atomicWriteSync. * Uses JSON.stringify with 2-space indent for git three-way merge friendliness. */ export function writeManifest(basePath: string): void { const manifest = snapshotState(); const json = JSON.stringify(manifest, null, 2); - const dir = join(basePath, ".gsd"); + const dir = join(basePath, ".sf"); mkdirSync(dir, { recursive: true }); atomicWriteSync(join(dir, "state-manifest.json"), json); } @@ -215,7 +215,7 @@ export function writeManifest(basePath: string): void { * Read state-manifest.json and return parsed manifest, or null if not found. */ export function readManifest(basePath: string): StateManifest | null { - const manifestPath = join(basePath, ".gsd", "state-manifest.json"); + const manifestPath = join(basePath, ".sf", "state-manifest.json"); if (!existsSync(manifestPath)) { return null; diff --git a/src/resources/extensions/sf/workflow-migration.ts b/src/resources/extensions/sf/workflow-migration.ts index ca1639925..c08c0d117 100644 --- a/src/resources/extensions/sf/workflow-migration.ts +++ b/src/resources/extensions/sf/workflow-migration.ts @@ -12,7 +12,7 @@ import { logWarning } from "./workflow-logger.js"; // ─── needsAutoMigration ─────────────────────────────────────────────────── /** - * Returns true when engine tables are empty AND a .gsd/milestones/ directory + * Returns true when engine tables are empty AND a .sf/milestones/ directory * with markdown files exists — signals that this is a legacy project that needs * one-time migration from markdown to engine state. */ @@ -29,8 +29,8 @@ export function needsAutoMigration(basePath: string): boolean { return false; } - // Check if .gsd/milestones/ directory exists - const milestonesDir = join(basePath, ".gsd", "milestones"); + // Check if .sf/milestones/ directory exists + const milestonesDir = join(basePath, ".sf", "milestones"); if (!existsSync(milestonesDir)) return false; return true; @@ -39,8 +39,8 @@ export function needsAutoMigration(basePath: string): boolean { // ─── migrateFromMarkdown ────────────────────────────────────────────────── /** - * Migrate legacy markdown-only .gsd/ projects to engine DB state. - * Reads .gsd/milestones/<ID>/ directories and parses ROADMAP.md, *-PLAN.md + * Migrate legacy markdown-only .sf/ projects to engine DB state. + * Reads .sf/milestones/<ID>/ directories and parses ROADMAP.md, *-PLAN.md * files. All inserts are wrapped in a transaction. * * This function only INSERTs data into the already-existing v10 schema tables @@ -59,9 +59,9 @@ export function migrateFromMarkdown(basePath: string): void { return; } - const milestonesDir = join(basePath, ".gsd", "milestones"); + const milestonesDir = join(basePath, ".sf", "milestones"); if (!existsSync(milestonesDir)) { - process.stderr.write("workflow-migration: no .gsd/milestones/ directory found, nothing to migrate\n"); + process.stderr.write("workflow-migration: no .sf/milestones/ directory found, nothing to migrate\n"); return; } @@ -77,7 +77,7 @@ export function migrateFromMarkdown(basePath: string): void { } if (milestoneDirs.length === 0) { - process.stderr.write("workflow-migration: no milestone directories found in .gsd/milestones/\n"); + process.stderr.write("workflow-migration: no milestone directories found in .sf/milestones/\n"); return; } @@ -268,7 +268,7 @@ export function validateMigration(basePath: string): { discrepancies: string[] } const engineTaskCount = engTasks ? (engTasks["cnt"] as number) : 0; // Count from markdown - const milestonesDir = join(basePath, ".gsd", "milestones"); + const milestonesDir = join(basePath, ".sf", "milestones"); if (!existsSync(milestonesDir)) { return { discrepancies }; } diff --git a/src/resources/extensions/sf/workflow-projections.ts b/src/resources/extensions/sf/workflow-projections.ts index d293249fa..01f0208e4 100644 --- a/src/resources/extensions/sf/workflow-projections.ts +++ b/src/resources/extensions/sf/workflow-projections.ts @@ -101,7 +101,7 @@ export function renderPlanProjection(basePath: string, milestoneId: string, slic const taskRows = getSliceTasks(milestoneId, sliceId); const content = renderPlanContent(sliceRow, taskRows); - const dir = join(basePath, ".gsd", "milestones", milestoneId, "slices", sliceId); + const dir = join(basePath, ".sf", "milestones", milestoneId, "slices", sliceId); mkdirSync(dir, { recursive: true }); atomicWriteSync(join(dir, `${sliceId}-PLAN.md`), content); } @@ -158,7 +158,7 @@ export function renderRoadmapProjection(basePath: string, milestoneId: string): const sliceRows = getMilestoneSlices(milestoneId); const content = renderRoadmapContent(milestoneRow, sliceRows); - const dir = join(basePath, ".gsd", "milestones", milestoneId); + const dir = join(basePath, ".sf", "milestones", milestoneId); mkdirSync(dir, { recursive: true }); atomicWriteSync(join(dir, `${milestoneId}-ROADMAP.md`), content); } @@ -275,7 +275,7 @@ export function renderSummaryProjection(basePath: string, milestoneId: string, s const evidenceRows = getVerificationEvidence(milestoneId, sliceId, taskId); const content = renderSummaryContent(taskRow, sliceId, milestoneId, evidenceRows); - const dir = join(basePath, ".gsd", "milestones", milestoneId, "slices", sliceId, "tasks"); + const dir = join(basePath, ".sf", "milestones", milestoneId, "slices", sliceId, "tasks"); mkdirSync(dir, { recursive: true }); atomicWriteSync(join(dir, `${taskId}-SUMMARY.md`), content); } @@ -360,7 +360,7 @@ export async function renderStateProjection(basePath: string): Promise<void> { } const state = await deriveState(basePath); const content = renderStateContent(state); - const dir = join(basePath, ".gsd"); + const dir = join(basePath, ".sf"); mkdirSync(dir, { recursive: true }); atomicWriteSync(join(dir, "STATE.md"), content); } catch (err) { @@ -429,17 +429,17 @@ export function regenerateIfMissing( switch (fileType) { case "PLAN": - filePath = join(basePath, ".gsd", "milestones", milestoneId, "slices", sliceId, `${sliceId}-PLAN.md`); + filePath = join(basePath, ".sf", "milestones", milestoneId, "slices", sliceId, `${sliceId}-PLAN.md`); break; case "ROADMAP": - filePath = join(basePath, ".gsd", "milestones", milestoneId, `${milestoneId}-ROADMAP.md`); + filePath = join(basePath, ".sf", "milestones", milestoneId, `${milestoneId}-ROADMAP.md`); break; case "SUMMARY": // For SUMMARY, we regenerate all task summaries in the slice - filePath = join(basePath, ".gsd", "milestones", milestoneId, "slices", sliceId, "tasks"); + filePath = join(basePath, ".sf", "milestones", milestoneId, "slices", sliceId, "tasks"); break; case "STATE": - filePath = join(basePath, ".gsd", "STATE.md"); + filePath = join(basePath, ".sf", "STATE.md"); break; } @@ -449,7 +449,7 @@ export function regenerateIfMissing( const doneTasks = taskRows.filter(t => t.status === "done" || t.status === "complete"); let regenerated = 0; for (const task of doneTasks) { - const summaryPath = join(basePath, ".gsd", "milestones", milestoneId, "slices", sliceId, "tasks", `${task.id}-SUMMARY.md`); + const summaryPath = join(basePath, ".sf", "milestones", milestoneId, "slices", sliceId, "tasks", `${task.id}-SUMMARY.md`); if (!existsSync(summaryPath)) { try { renderSummaryProjection(basePath, milestoneId, sliceId, task.id); diff --git a/src/resources/extensions/sf/workflow-reconcile.ts b/src/resources/extensions/sf/workflow-reconcile.ts index 09f22d4f3..0fc3aca14 100644 --- a/src/resources/extensions/sf/workflow-reconcile.ts +++ b/src/resources/extensions/sf/workflow-reconcile.ts @@ -353,7 +353,7 @@ function rewriteDivergedEventsForEntity( } function writeEventLog(basePath: string, events: WorkflowEvent[]): void { - const dir = join(basePath, ".gsd"); + const dir = join(basePath, ".sf"); mkdirSync(dir, { recursive: true }); const content = events.map((e) => JSON.stringify(e)).join("\n") + (events.length > 0 ? "\n" : ""); atomicWriteSync(join(dir, "event-log.jsonl"), content); @@ -362,7 +362,7 @@ function writeEventLog(basePath: string, events: WorkflowEvent[]): void { // ─── writeConflictsFile ─────────────────────────────────────────────────────── /** - * Write a human-readable CONFLICTS.md to basePath/.gsd/CONFLICTS.md. + * Write a human-readable CONFLICTS.md to basePath/.sf/CONFLICTS.md. * Lists each conflict with both sides' event payloads and resolution instructions. */ export function writeConflictsFile( @@ -399,7 +399,7 @@ export function writeConflictsFile( }); const content = lines.join("\n"); - const dir = join(basePath, ".gsd"); + const dir = join(basePath, ".sf"); mkdirSync(dir, { recursive: true }); atomicWriteSync(join(dir, "CONFLICTS.md"), content); } @@ -442,8 +442,8 @@ function _reconcileWorktreeLogsInner( worktreeBasePath: string, ): ReconcileResult { // Step 1: Read both logs - const mainLogPath = join(mainBasePath, ".gsd", "event-log.jsonl"); - const wtLogPath = join(worktreeBasePath, ".gsd", "event-log.jsonl"); + const mainLogPath = join(mainBasePath, ".sf", "event-log.jsonl"); + const wtLogPath = join(worktreeBasePath, ".sf", "event-log.jsonl"); const mainEvents = readEvents(mainLogPath); const wtEvents = readEvents(wtLogPath); @@ -467,7 +467,7 @@ function _reconcileWorktreeLogsInner( writeConflictsFile(mainBasePath, conflicts, worktreeBasePath); const conflictSummary = conflicts.slice(0, 3).map(c => `${c.entityType}:${c.entityId}`).join(", "); const truncated = conflicts.length > 3 ? `... and ${conflicts.length - 3} more` : ""; - logError("reconcile", `${conflicts.length} conflict(s) detected on ${conflictSummary}${truncated}. Details: .gsd/CONFLICTS.md`, { count: String(conflicts.length), path: join(mainBasePath, ".gsd", "CONFLICTS.md") }); + logError("reconcile", `${conflicts.length} conflict(s) detected on ${conflictSummary}${truncated}. Details: .sf/CONFLICTS.md`, { count: String(conflicts.length), path: join(mainBasePath, ".sf", "CONFLICTS.md") }); return { autoMerged: 0, conflicts }; } @@ -488,11 +488,11 @@ function _reconcileWorktreeLogsInner( const baseEvents = mainEvents.slice(0, forkPoint + 1); const mergedLog = baseEvents.concat(merged); const logContent = mergedLog.map((e) => JSON.stringify(e)).join("\n") + (mergedLog.length > 0 ? "\n" : ""); - mkdirSync(join(mainBasePath, ".gsd"), { recursive: true }); - atomicWriteSync(join(mainBasePath, ".gsd", "event-log.jsonl"), logContent); + mkdirSync(join(mainBasePath, ".sf"), { recursive: true }); + atomicWriteSync(join(mainBasePath, ".sf", "event-log.jsonl"), logContent); // Step 8: Replay into DB (wrapped in a transaction by replayEvents) - openDatabase(join(mainBasePath, ".gsd", "sf.db")); + openDatabase(join(mainBasePath, ".sf", "sf.db")); replayEvents(merged); // Step 9: Write manifest @@ -527,7 +527,7 @@ function _reconcileWorktreeLogsInner( * params: {JSON} */ export function listConflicts(basePath: string): ConflictEntry[] { - const conflictsPath = join(basePath, ".gsd", "CONFLICTS.md"); + const conflictsPath = join(basePath, ".sf", "CONFLICTS.md"); if (!existsSync(conflictsPath)) return []; const content = readFileSync(conflictsPath, "utf-8"); @@ -628,8 +628,8 @@ export function resolveConflict( const conflict = conflicts[idx]!; const eventsToReplay = pick === "main" ? conflict.mainSideEvents : conflict.worktreeSideEvents; - const mainLogPath = join(basePath, ".gsd", "event-log.jsonl"); - const wtLogPath = join(worktreeBasePath, ".gsd", "event-log.jsonl"); + const mainLogPath = join(basePath, ".sf", "event-log.jsonl"); + const wtLogPath = join(worktreeBasePath, ".sf", "event-log.jsonl"); const mainEvents = readEvents(mainLogPath); const wtEvents = readEvents(wtLogPath); const forkPoint = findForkPoint(mainEvents, wtEvents); @@ -647,7 +647,7 @@ export function resolveConflict( writeEventLog(targetBasePath, targetBaseEvents.concat(rewrittenTargetEvents)); // Replay resolved events through the DB (updates DB state) - openDatabase(join(basePath, ".gsd", "sf.db")); + openDatabase(join(basePath, ".sf", "sf.db")); replayEvents(eventsToReplay); invalidateStateCache(); clearPathCache(); @@ -674,7 +674,7 @@ export function resolveConflict( * No-op if CONFLICTS.md does not exist. */ export function removeConflictsFile(basePath: string): void { - const conflictsPath = join(basePath, ".gsd", "CONFLICTS.md"); + const conflictsPath = join(basePath, ".sf", "CONFLICTS.md"); if (existsSync(conflictsPath)) { unlinkSync(conflictsPath); } diff --git a/src/resources/extensions/sf/workflow-templates.ts b/src/resources/extensions/sf/workflow-templates.ts index 108186b81..32df12d0b 100644 --- a/src/resources/extensions/sf/workflow-templates.ts +++ b/src/resources/extensions/sf/workflow-templates.ts @@ -13,11 +13,11 @@ import { homedir } from "node:os"; const __extensionDir = resolveGsdExtensionDir(); const registryPath = join(__extensionDir, "workflow-templates", "registry.json"); -/** Resolve the SF extension dir with fallback to ~/.gsd/agent/extensions/sf/. */ +/** Resolve the SF extension dir with fallback to ~/.sf/agent/extensions/sf/. */ function resolveGsdExtensionDir(): string { const moduleDir = dirname(fileURLToPath(import.meta.url)); if (existsSync(join(moduleDir, "workflow-templates"))) return moduleDir; - const sfHome = process.env.SF_HOME || join(homedir(), ".gsd"); + const sfHome = process.env.SF_HOME || join(homedir(), ".sf"); const agentGsdDir = join(sfHome, "agent", "extensions", "sf"); if (existsSync(join(agentGsdDir, "workflow-templates"))) return agentGsdDir; return moduleDir; @@ -224,7 +224,7 @@ export function getTemplateInfo(name: string): string | null { "", `Description: ${t.description}`, `Complexity: ${t.estimated_complexity}`, - `Requires .gsd/: ${t.requires_project ? "yes" : "no"}`, + `Requires .sf/: ${t.requires_project ? "yes" : "no"}`, "", "Phases:", ...t.phases.map((p, i) => ` ${i + 1}. ${p}`), diff --git a/src/resources/extensions/sf/workflow-templates/registry.json b/src/resources/extensions/sf/workflow-templates/registry.json index 69ed17bc0..0fbe89517 100644 --- a/src/resources/extensions/sf/workflow-templates/registry.json +++ b/src/resources/extensions/sf/workflow-templates/registry.json @@ -7,7 +7,7 @@ "file": "full-project.md", "phases": ["init", "discuss", "plan", "execute", "verify"], "triggers": ["new project", "greenfield", "from scratch", "build an app", "create a new"], - "artifact_dir": ".gsd/", + "artifact_dir": ".sf/", "estimated_complexity": "high", "requires_project": true }, @@ -17,7 +17,7 @@ "file": "bugfix.md", "phases": ["triage", "fix", "verify", "ship"], "triggers": ["bug", "issue", "fix", "broken", "regression", "error", "crash", "failing", "github.com/*/issues/*"], - "artifact_dir": ".gsd/workflows/bugfixes/", + "artifact_dir": ".sf/workflows/bugfixes/", "estimated_complexity": "low", "requires_project": false }, @@ -27,7 +27,7 @@ "file": "small-feature.md", "phases": ["scope", "plan", "implement", "verify"], "triggers": ["add", "feature", "implement", "build", "create", "new command", "new endpoint"], - "artifact_dir": ".gsd/workflows/features/", + "artifact_dir": ".sf/workflows/features/", "estimated_complexity": "medium", "requires_project": false }, @@ -37,7 +37,7 @@ "file": "refactor.md", "phases": ["inventory", "plan", "migrate", "verify"], "triggers": ["refactor", "migrate", "rename", "restructure", "move", "reorganize", "clean up"], - "artifact_dir": ".gsd/workflows/refactors/", + "artifact_dir": ".sf/workflows/refactors/", "estimated_complexity": "medium", "requires_project": false }, @@ -47,7 +47,7 @@ "file": "spike.md", "phases": ["scope", "research", "synthesize"], "triggers": ["research", "investigate", "explore", "spike", "compare", "evaluate", "should we", "what if", "how does"], - "artifact_dir": ".gsd/workflows/spikes/", + "artifact_dir": ".sf/workflows/spikes/", "estimated_complexity": "low", "requires_project": false }, @@ -67,7 +67,7 @@ "file": "security-audit.md", "phases": ["scan", "triage", "remediate", "re-scan"], "triggers": ["security", "audit", "vulnerability", "owasp", "cve", "penetration", "hardening"], - "artifact_dir": ".gsd/workflows/audits/", + "artifact_dir": ".sf/workflows/audits/", "estimated_complexity": "medium", "requires_project": false }, @@ -77,7 +77,7 @@ "file": "dep-upgrade.md", "phases": ["assess", "upgrade", "fix", "verify"], "triggers": ["upgrade", "update", "dependency", "deps", "bump", "outdated", "npm update", "renovate"], - "artifact_dir": ".gsd/workflows/upgrades/", + "artifact_dir": ".sf/workflows/upgrades/", "estimated_complexity": "medium", "requires_project": false } diff --git a/src/resources/extensions/sf/worktree-command.ts b/src/resources/extensions/sf/worktree-command.ts index 776730919..e87446917 100644 --- a/src/resources/extensions/sf/worktree-command.ts +++ b/src/resources/extensions/sf/worktree-command.ts @@ -1,7 +1,7 @@ /** * SF Worktree Command — /worktree * - * Create, list, merge, and remove git worktrees under .gsd/worktrees/. + * Create, list, merge, and remove git worktrees under .sf/worktrees/. * * Usage: * /worktree <name> — create a new worktree @@ -50,7 +50,7 @@ export function getWorktreeOriginalCwd(): string | null { export function getActiveWorktreeName(): string | null { if (!originalCwd) return null; const cwd = process.cwd(); - const wtDir = join(originalCwd, ".gsd", "worktrees"); + const wtDir = join(originalCwd, ".sf", "worktrees"); if (!cwd.startsWith(wtDir)) return null; const rel = cwd.slice(wtDir.length + 1); const name = rel.split("/")[0] ?? rel.split("\\")[0]; @@ -243,7 +243,7 @@ export function registerWorktreeCommand(pi: ExtensionAPI): void { // but process.cwd() is still inside the worktree. Detect this and recover. if (!originalCwd) { const cwd = process.cwd(); - const marker = `${sep}.gsd${sep}worktrees${sep}`; + const marker = `${sep}.sf${sep}worktrees${sep}`; const markerIdx = cwd.indexOf(marker); if (markerIdx !== -1) { originalCwd = cwd.slice(0, markerIdx); @@ -289,7 +289,7 @@ function hasExistingMilestones(wtPath: string): boolean { /** * Clear SF planning artifacts so auto-mode starts fresh with the discuss flow. - * Keeps the .gsd/ directory structure intact but removes milestones and root planning files. + * Keeps the .sf/ directory structure intact but removes milestones and root planning files. */ function clearSFPlans(wtPath: string): void { const mDir = milestonesDir(wtPath); @@ -582,7 +582,7 @@ async function handleMerge( return; } - // Gather merge context — full repo diff, not just .gsd/ + // Gather merge context — full repo diff, not just .sf/ const diffSummary = diffWorktreeAll(basePath, name); const numstat = diffWorktreeNumstat(basePath, name); const sfDiff = getWorktreeSFDiff(basePath, name); @@ -605,7 +605,7 @@ async function handleMerge( for (const s of numstat) { totalAdded += s.added; totalRemoved += s.removed; } // Split files into code vs SF for the preview - const isSF = (f: string) => f.startsWith(".gsd/"); + const isSF = (f: string) => f.startsWith(".sf/"); const codeChanges = diffSummary.added.filter(f => !isSF(f)).length + diffSummary.modified.filter(f => !isSF(f)).length + diffSummary.removed.filter(f => !isSF(f)).length; @@ -664,8 +664,8 @@ async function handleMerge( const commitMessage = `${commitType}: merge worktree ${name}\n\nSF-Worktree: ${name}`; // Reconcile worktree DB into main DB before squash merge - const wtDbPath = join(worktreePath(basePath, name), ".gsd", "sf.db"); - const mainDbPath = join(basePath, ".gsd", "sf.db"); + const wtDbPath = join(worktreePath(basePath, name), ".sf", "sf.db"); + const mainDbPath = join(basePath, ".sf", "sf.db"); if (existsSync(wtDbPath) && existsSync(mainDbPath)) { try { const { reconcileWorktreeDb } = await import("./sf-db.js"); diff --git a/src/resources/extensions/sf/worktree-health.ts b/src/resources/extensions/sf/worktree-health.ts index a3b4fb829..a50271316 100644 --- a/src/resources/extensions/sf/worktree-health.ts +++ b/src/resources/extensions/sf/worktree-health.ts @@ -4,7 +4,7 @@ * Used by doctor-checks.ts for health audits and by worktree-command.ts * for the enhanced `/worktree list` display. * - * Only inspects worktrees under .gsd/worktrees/ — SF owns what SF creates. + * Only inspects worktrees under .sf/worktrees/ — SF owns what SF creates. */ import { existsSync } from "node:fs"; diff --git a/src/resources/extensions/sf/worktree-manager.ts b/src/resources/extensions/sf/worktree-manager.ts index 085b101a1..8eac3c9b2 100644 --- a/src/resources/extensions/sf/worktree-manager.ts +++ b/src/resources/extensions/sf/worktree-manager.ts @@ -1,17 +1,17 @@ /** * SF Worktree Manager * - * Creates and manages git worktrees under .gsd/worktrees/<name>/. + * Creates and manages git worktrees under .sf/worktrees/<name>/. * Each worktree gets its own branch (worktree/<name>) and a full * working copy of the project, enabling parallel work streams. * - * The merge helper compares .gsd/ artifacts between a worktree and + * The merge helper compares .sf/ artifacts between a worktree and * the main branch, then dispatches an LLM-guided merge flow. * * Flow: - * 1. create() — git worktree add .gsd/worktrees/<name> -b worktree/<name> + * 1. create() — git worktree add .sf/worktrees/<name> -b worktree/<name> * 2. user works in the worktree (new plans, milestones, etc.) - * 3. merge() — LLM-guided reconciliation of .gsd/ artifacts back to main + * 3. merge() — LLM-guided reconciliation of .sf/ artifacts back to main * 4. remove() — git worktree remove + branch cleanup */ @@ -55,11 +55,11 @@ export interface FileLineStat { } export interface WorktreeDiffSummary { - /** Files only in the worktree .gsd/ (new artifacts) */ + /** Files only in the worktree .sf/ (new artifacts) */ added: string[]; /** Files in both but with different content */ modified: string[]; - /** Files only in main .gsd/ (deleted in worktree) */ + /** Files only in main .sf/ (deleted in worktree) */ removed: string[]; } @@ -104,7 +104,7 @@ export function resolveGitDir(basePath: string): string { } export function worktreesDir(basePath: string): string { - return join(basePath, ".gsd", "worktrees"); + return join(basePath, ".sf", "worktrees"); } export function worktreePath(basePath: string, name: string): string { @@ -116,7 +116,7 @@ export function worktreeBranchName(name: string): string { } /** - * Validate that a path is inside the .gsd/worktrees/ directory. + * Validate that a path is inside the .sf/worktrees/ directory. * Resolves symlinks and normalizes ".." traversals before comparison * so that a symlink-resolved or crafted path cannot escape containment. * @@ -128,14 +128,14 @@ export function isInsideWorktreesDir(basePath: string, targetPath: string): bool const wtDir = existsSync(wtDirPath) ? realpathSync(wtDirPath) : resolve(wtDirPath); const resolved = existsSync(targetPath) ? realpathSync(targetPath) : resolve(targetPath); // The resolved path must start with the worktrees dir followed by a separator, - // not merely be a prefix match (e.g. ".gsd/worktrees-extra" must not match). + // not merely be a prefix match (e.g. ".sf/worktrees-extra" must not match). return resolved === wtDir || resolved.startsWith(wtDir + sep); } // ─── Core Operations ─────────────────────────────────────────────────────── /** - * Create a new git worktree under .gsd/worktrees/<name>/ with branch worktree/<name>. + * Create a new git worktree under .sf/worktrees/<name>/ with branch worktree/<name>. * The branch is created from the current HEAD of the main branch. * * @param opts.branch — override the default `worktree/<name>` branch name @@ -163,7 +163,7 @@ export function createWorktree(basePath: string, name: string, opts: { branch?: } } - // Ensure the .gsd/worktrees/ directory exists + // Ensure the .sf/worktrees/ directory exists const wtDir = worktreesDir(basePath); mkdirSync(wtDir, { recursive: true }); @@ -214,7 +214,7 @@ export function createWorktree(basePath: string, name: string, opts: { branch?: /** * List all SF-managed worktrees. - * Uses native worktree list and filters to those under .gsd/worktrees/. + * Uses native worktree list and filters to those under .sf/worktrees/. */ export function listWorktrees(basePath: string): WorktreeInfo[] { const baseVariants = [resolve(basePath)]; @@ -224,7 +224,7 @@ export function listWorktrees(basePath: string): WorktreeInfo[] { const seenRoots = new Set<string>(); const worktreeRoots = baseVariants .map(baseVariant => { - const path = join(baseVariant, ".gsd", "worktrees"); + const path = join(baseVariant, ".sf", "worktrees"); return { normalized: normalizePathForComparison(path), }; @@ -267,7 +267,7 @@ export function listWorktrees(basePath: string): WorktreeInfo[] { ? normalizedEntryVariants.some(entryVariant => entryVariant.split("/").pop() === branchWorktreeName) : false; - // Only include worktrees under .gsd/worktrees/ + // Only include worktrees under .sf/worktrees/ if (!matchedRoot && !matchesBranchLeaf) continue; const matchedEntryPath = normalizedEntryVariants.find(entryVariant => @@ -305,14 +305,14 @@ export function listWorktrees(basePath: string): WorktreeInfo[] { /** Directories to skip when scanning for nested .git dirs. */ const NESTED_GIT_SKIP_DIRS = new Set([ - ".git", ".gsd", "node_modules", ".next", ".nuxt", "dist", "build", + ".git", ".sf", "node_modules", ".next", ".nuxt", "dist", "build", "__pycache__", ".tox", ".venv", "venv", "target", "vendor", ]); /** * Recursively find nested .git directories inside a worktree root. * Returns paths to directories that contain their own .git (directory, not file). - * Skips node_modules, .gsd, and other non-project directories for performance. + * Skips node_modules, .sf, and other non-project directories for performance. * * A nested .git *directory* (not a .git file — which is a legitimate worktree * pointer) indicates a scaffolded repo that will become an orphaned gitlink. @@ -384,10 +384,10 @@ export function removeWorktree( const { deleteBranch = true, force = true } = opts; // Resolve the ACTUAL worktree path from git's worktree list. - // The computed path may differ when .gsd/ is (or was) a symlink to an + // The computed path may differ when .sf/ is (or was) a symlink to an // external state directory — git resolves symlinks at worktree creation // time, so its registered path points to the resolved external location. - // If syncStateToProjectRoot later creates a real .gsd/ directory that + // If syncStateToProjectRoot later creates a real .sf/ directory that // shadows the symlink, the computed path diverges from git's record. let gitReportedPath: string | null = null; try { @@ -399,14 +399,14 @@ export function removeWorktree( } catch (e) { logWarning("worktree", `nativeWorktreeList parse failed: ${(e as Error).message}`); } // Safety gate (#2365): only use the git-reported path if it is actually - // inside .gsd/worktrees/. When .gsd/ was a symlink, git may have resolved + // inside .sf/worktrees/. When .sf/ was a symlink, git may have resolved // it to an external directory (e.g. a project data folder). Using that // path for removal would destroy user data. if (gitReportedPath && isInsideWorktreesDir(basePath, gitReportedPath)) { wtPath = gitReportedPath; } else if (gitReportedPath) { console.error( - `[SF] WARNING: git worktree list reported path outside .gsd/worktrees/: ${gitReportedPath}\n` + + `[SF] WARNING: git worktree list reported path outside .sf/worktrees/: ${gitReportedPath}\n` + ` Refusing to use it for removal — falling back to computed path: ${wtPath}`, ); // Still tell git to unregister the worktree entry via its reported path, @@ -417,7 +417,7 @@ export function removeWorktree( const resolvedWtPath = existsSync(wtPath) ? realpathSync(wtPath) : wtPath; // Double-check: the resolved path (after symlink resolution) must also be - // inside .gsd/worktrees/ — a symlink inside the directory could point out. + // inside .sf/worktrees/ — a symlink inside the directory could point out. const resolvedPathSafe = isInsideWorktreesDir(basePath, resolvedWtPath); // If we're inside the worktree, move out first — git can't remove an in-use directory @@ -534,7 +534,7 @@ export function removeWorktree( // Path is outside containment — only do a non-force git worktree remove // (which refuses to delete dirty worktrees) and never fall back to rmSync. console.error( - `[SF] WARNING: Resolved worktree path is outside .gsd/worktrees/: ${resolvedWtPath}\n` + + `[SF] WARNING: Resolved worktree path is outside .sf/worktrees/: ${resolvedWtPath}\n` + ` Skipping forced removal to prevent data loss.`, ); try { nativeWorktreeRemove(basePath, resolvedWtPath, false); } catch (e) { logWarning("worktree", `non-force worktree remove failed for ${resolvedWtPath}: ${e instanceof Error ? e.message : String(e)}`); } @@ -556,25 +556,25 @@ export function removeWorktree( * This module uses a split representation (paths/exact/prefixes) for efficient matching. */ const SKIP_PATHS = [ - ".gsd/worktrees/", - ".gsd/runtime/", - ".gsd/activity/", - ".gsd/forensics/", - ".gsd/parallel/", - ".gsd/journal/", + ".sf/worktrees/", + ".sf/runtime/", + ".sf/activity/", + ".sf/forensics/", + ".sf/parallel/", + ".sf/journal/", ]; const SKIP_EXACT = [ - ".gsd/STATE.md", - ".gsd/auto.lock", - ".gsd/metrics.json", - ".gsd/state-manifest.json", - ".gsd/doctor-history.jsonl", - ".gsd/event-log.jsonl", + ".sf/STATE.md", + ".sf/auto.lock", + ".sf/metrics.json", + ".sf/state-manifest.json", + ".sf/doctor-history.jsonl", + ".sf/event-log.jsonl", ]; /** File prefixes to skip (for wildcard patterns like completed-units*.json, sf.db*). */ const SKIP_PREFIXES = [ - ".gsd/completed-units", - ".gsd/sf.db", + ".sf/completed-units", + ".sf/sf.db", ]; function shouldSkipPath(filePath: string): boolean { @@ -608,14 +608,14 @@ function parseDiffNameStatus(entries: { status: string; path: string }[]): Workt } /** - * Diff the .gsd/ directory between the worktree branch and main branch. + * Diff the .sf/ directory between the worktree branch and main branch. * Returns a summary of added, modified, and removed SF artifacts. */ export function diffWorktreeSF(basePath: string, name: string): WorktreeDiffSummary { const branch = worktreeBranchName(name); const mainBranch = nativeDetectMainBranch(basePath); - const entries = nativeDiffNameStatus(basePath, mainBranch, branch, ".gsd/", true); + const entries = nativeDiffNameStatus(basePath, mainBranch, branch, ".sf/", true); return parseDiffNameStatus(entries); } @@ -654,25 +654,25 @@ export function diffWorktreeNumstat(basePath: string, name: string): FileLineSta } /** - * Get the full diff content for .gsd/ between the worktree branch and main. + * Get the full diff content for .sf/ between the worktree branch and main. * Returns the raw unified diff for LLM consumption. */ export function getWorktreeSFDiff(basePath: string, name: string): string { const branch = worktreeBranchName(name); const mainBranch = nativeDetectMainBranch(basePath); - return nativeDiffContent(basePath, mainBranch, branch, ".gsd/", undefined, true); + return nativeDiffContent(basePath, mainBranch, branch, ".sf/", undefined, true); } /** - * Get the full diff content for non-.gsd/ files between the worktree branch and main. + * Get the full diff content for non-.sf/ files between the worktree branch and main. * Returns the raw unified diff for LLM consumption. */ export function getWorktreeCodeDiff(basePath: string, name: string): string { const branch = worktreeBranchName(name); const mainBranch = nativeDetectMainBranch(basePath); - return nativeDiffContent(basePath, mainBranch, branch, undefined, ".gsd/", true); + return nativeDiffContent(basePath, mainBranch, branch, undefined, ".sf/", true); } /** diff --git a/src/resources/extensions/sf/worktree-resolver.ts b/src/resources/extensions/sf/worktree-resolver.ts index 5e509d3bc..597a36f33 100644 --- a/src/resources/extensions/sf/worktree-resolver.ts +++ b/src/resources/extensions/sf/worktree-resolver.ts @@ -406,7 +406,7 @@ export class WorktreeResolver { // Resolve roadmap — try project root first, then worktree path as fallback. // The worktree may hold the only copy when syncWorktreeStateBack fails - // silently or .gsd/ is not symlinked. Without the fallback, a missing + // silently or .sf/ is not symlinked. Without the fallback, a missing // roadmap triggers bare teardown which deletes the branch and orphans all // milestone commits (#1573). let roadmapPath = this.deps.resolveMilestoneFile( @@ -457,12 +457,12 @@ export class WorktreeResolver { "info", ); } else { - // (#1906) Milestone produced only .gsd/ metadata — no actual code was + // (#1906) Milestone produced only .sf/ metadata — no actual code was // merged. This typically means the LLM wrote planning artifacts // (summaries, roadmaps) but never implemented the code. Surface this // clearly so the user knows the milestone is not truly complete. ctx.notify( - `WARNING: Milestone ${milestoneId} merged to main but contained NO code changes — only .gsd/ metadata files. ` + + `WARNING: Milestone ${milestoneId} merged to main but contained NO code changes — only .sf/ metadata files. ` + `The milestone summary may describe planned work that was never implemented. ` + `Review the milestone output and re-run if code is missing.`, "warning", @@ -592,7 +592,7 @@ export class WorktreeResolver { ); } else { ctx.notify( - `WARNING: Milestone ${milestoneId} merged (branch mode) but contained NO code changes — only .gsd/ metadata. ` + + `WARNING: Milestone ${milestoneId} merged (branch mode) but contained NO code changes — only .sf/ metadata. ` + `Review the milestone output and re-run if code is missing.`, "warning", ); diff --git a/src/resources/extensions/sf/worktree.ts b/src/resources/extensions/sf/worktree.ts index 956b9ecb8..19ab52ad0 100644 --- a/src/resources/extensions/sf/worktree.ts +++ b/src/resources/extensions/sf/worktree.ts @@ -80,20 +80,20 @@ export function captureIntegrationBranch(basePath: string, milestoneId: string): /** * Find the worktrees segment in a path, supporting both direct - * (`/.gsd/worktrees/`) and symlink-resolved (`/.gsd/projects/<hash>/worktrees/`) - * layouts. When `.gsd` is a symlink to `~/.gsd/projects/<hash>`, resolved + * (`/.sf/worktrees/`) and symlink-resolved (`/.sf/projects/<hash>/worktrees/`) + * layouts. When `.sf` is a symlink to `~/.sf/projects/<hash>`, resolved * paths contain the intermediate `projects/<hash>/` segment that the old * single-marker check missed. */ function findWorktreeSegment(normalizedPath: string): { sfIdx: number; afterWorktrees: number } | null { - // Direct layout: /.gsd/worktrees/<name> - const directMarker = "/.gsd/worktrees/"; + // Direct layout: /.sf/worktrees/<name> + const directMarker = "/.sf/worktrees/"; const idx = normalizedPath.indexOf(directMarker); if (idx !== -1) { return { sfIdx: idx, afterWorktrees: idx + directMarker.length }; } - // Symlink-resolved layout: /.gsd/projects/<hash>/worktrees/<name> - const symlinkRe = /\/\.gsd\/projects\/[a-f0-9]+\/worktrees\//; + // Symlink-resolved layout: /.sf/projects/<hash>/worktrees/<name> + const symlinkRe = /\/\.sf\/projects\/[a-f0-9]+\/worktrees\//; const match = normalizedPath.match(symlinkRe); if (match && match.index !== undefined) { return { sfIdx: match.index, afterWorktrees: match.index + match[0].length }; @@ -103,7 +103,7 @@ function findWorktreeSegment(normalizedPath: string): { sfIdx: number; afterWork /** * Detect the active worktree name from the current working directory. - * Returns null if not inside a SF worktree (.gsd/worktrees/<name>/). + * Returns null if not inside a SF worktree (.sf/worktrees/<name>/). */ export function detectWorktreeName(basePath: string): string | null { const normalizedPath = basePath.replaceAll("\\", "/"); @@ -117,13 +117,13 @@ export function detectWorktreeName(basePath: string): string | null { /** * Resolve the project root from a path that may be inside a worktree. * If the path contains a worktrees segment, returns the portion before - * `/.gsd/`. Otherwise returns the input unchanged. + * `/.sf/`. Otherwise returns the input unchanged. * * When the worker was spawned with SF_PROJECT_ROOT set, use that directly — * the coordinator already knows the real project root unambiguously. * - * When `/.gsd/` in the resolved path is actually the user-level `~/.gsd/` - * (common when `.gsd` is a symlink into `~/.gsd/projects/<hash>`), the + * When `/.sf/` in the resolved path is actually the user-level `~/.sf/` + * (common when `.sf` is a symlink into `~/.sf/projects/<hash>`), the * string-slice heuristic would return `~` — which is catastrophically wrong. * In that case, fall back to reading the worktree's `.git` file, which * contains a `gitdir:` pointer to the real project's `.git/worktrees/<name>`, @@ -144,20 +144,20 @@ export function resolveProjectRoot(basePath: string): string { // Candidate root via the string-slice heuristic const sepChar = basePath.includes("\\") ? "\\" : "/"; - const sfMarker = `${sepChar}.gsd${sepChar}`; + const sfMarker = `${sepChar}.sf${sepChar}`; const sfIdx = basePath.indexOf(sfMarker); const candidate = sfIdx !== -1 ? basePath.slice(0, sfIdx) : basePath.slice(0, seg.sfIdx); // Layer 2: Guard against resolving to the user's home directory. - // When .gsd is a symlink into ~/.gsd/projects/<hash>, the resolved path - // contains /.gsd/ at the user-level boundary. Slicing there yields ~ — wrong. - const sfHome = normalizePathForCompare(process.env.SF_HOME || join(homedir(), ".gsd")); - const candidateSfPath = normalizePathForCompare(join(candidate, ".gsd")); + // When .sf is a symlink into ~/.sf/projects/<hash>, the resolved path + // contains /.sf/ at the user-level boundary. Slicing there yields ~ — wrong. + const sfHome = normalizePathForCompare(process.env.SF_HOME || join(homedir(), ".sf")); + const candidateSfPath = normalizePathForCompare(join(candidate, ".sf")); if (candidateSfPath === sfHome || candidateSfPath.startsWith(sfHome + "/")) { - // The candidate is the home directory (or within it in a way that .gsd + // The candidate is the home directory (or within it in a way that .sf // maps to the user-level SF dir). Try to recover the real project root // from the worktree's .git file. const realRoot = resolveProjectRootFromGitFile(basePath); diff --git a/src/resources/extensions/sf/write-intercept.ts b/src/resources/extensions/sf/write-intercept.ts index 2344ae80d..0097892ff 100644 --- a/src/resources/extensions/sf/write-intercept.ts +++ b/src/resources/extensions/sf/write-intercept.ts @@ -6,10 +6,10 @@ import { realpathSync } from "node:fs"; import { resolve } from "node:path"; /** - * Patterns matching authoritative .gsd/ state files that agents must NOT write directly. + * Patterns matching authoritative .sf/ state files that agents must NOT write directly. * * Only STATE.md is blocked — it is purely engine-rendered from DB state. - * All other .gsd/ files are agent-authored content that agents create and + * All other .sf/ files are agent-authored content that agents create and * update during discuss, plan, and execute phases: * - REQUIREMENTS.md — agents create during discuss, read during planning * - PROJECT.md — agents create during discuss, update at milestone close @@ -19,14 +19,14 @@ import { resolve } from "node:path"; const BLOCKED_PATTERNS: RegExp[] = [ // STATE.md is the only purely engine-rendered file. // Case-insensitive to prevent bypass on macOS (case-insensitive APFS). - // (^|[/\\]) matches both absolute paths (/project/.gsd/…) and bare relative - // paths (.gsd/STATE.md) so a path without a leading separator is also blocked. - /(^|[/\\])\.gsd[/\\]STATE\.md$/i, - // Also match resolved symlink paths under ~/.gsd/projects/ (Pitfall #6) - /(^|[/\\])\.gsd[/\\]projects[/\\][^/\\]+[/\\]STATE\.md$/i, + // (^|[/\\]) matches both absolute paths (/project/.sf/…) and bare relative + // paths (.sf/STATE.md) so a path without a leading separator is also blocked. + /(^|[/\\])\.sf[/\\]STATE\.md$/i, + // Also match resolved symlink paths under ~/.sf/projects/ (Pitfall #6) + /(^|[/\\])\.sf[/\\]projects[/\\][^/\\]+[/\\]STATE\.md$/i, // sf.db and WAL/SHM files — single-writer WAL connection managed by engine (#3625) - /(^|[/\\])\.gsd[/\\]sf\.db(-wal|-shm)?$/i, - /(^|[/\\])\.gsd[/\\]projects[/\\][^/\\]+[/\\]sf\.db(-wal|-shm)?$/i, + /(^|[/\\])\.sf[/\\]sf\.db(-wal|-shm)?$/i, + /(^|[/\\])\.sf[/\\]projects[/\\][^/\\]+[/\\]sf\.db(-wal|-shm)?$/i, ]; /** @@ -53,7 +53,7 @@ const BASH_STATE_PATTERNS: RegExp[] = [ ]; /** - * Tests whether the given file path matches a blocked authoritative .gsd/ state file. + * Tests whether the given file path matches a blocked authoritative .sf/ state file. * Resolves `..` segments via path.resolve() and attempts realpathSync for symlinks. */ export function isBlockedStateFile(filePath: string): boolean { @@ -87,10 +87,10 @@ function matchesBlockedPattern(path: string): boolean { } /** - * Error message returned when an agent attempts to directly write an authoritative .gsd/ state file. + * Error message returned when an agent attempts to directly write an authoritative .sf/ state file. * Directs the agent to use engine tool calls instead. */ -export const BLOCKED_WRITE_ERROR = `Direct writes to .gsd/STATE.md and .gsd/sf.db are blocked. Use engine tool calls instead: +export const BLOCKED_WRITE_ERROR = `Direct writes to .sf/STATE.md and .sf/sf.db are blocked. Use engine tool calls instead: - To complete a task: call sf_complete_task(milestone_id, slice_id, task_id, summary) - To complete a slice: call sf_complete_slice(milestone_id, slice_id, summary, uat_result) - To save a decision: call sf_save_decision(scope, decision, choice, rationale) diff --git a/src/resources/extensions/shared/interview-ui.ts b/src/resources/extensions/shared/interview-ui.ts index 5eb121232..9d862dde8 100644 --- a/src/resources/extensions/shared/interview-ui.ts +++ b/src/resources/extensions/shared/interview-ui.ts @@ -1,7 +1,7 @@ /** * Shared interview round UI widget. * - * Used by /interview-me and /gsd-new-project. + * Used by /interview-me and /sf-new-project. * * Renders a paged, keyboard-driven question UI with: * - Single-select (radio) questions diff --git a/src/resources/extensions/shared/next-action-ui.ts b/src/resources/extensions/shared/next-action-ui.ts index ea33df9c2..8ec8f5ef7 100644 --- a/src/resources/extensions/shared/next-action-ui.ts +++ b/src/resources/extensions/shared/next-action-ui.ts @@ -11,13 +11,13 @@ * [optional extra content block] * * Files written: - * .gsd/phases/01-foo/01-RESEARCH.md + * .sf/phases/01-foo/01-RESEARCH.md * * › 1. Plan phase 1 ← recommended, pre-selected * Create PLAN.md files for execution * * 2. Not yet - * Run /gsd-plan-phase 1 when ready. + * Run /sf-plan-phase 1 when ready. * ───────────────────────────────────────── * * Usage: @@ -31,7 +31,7 @@ * { id: "plan", label: "Plan phase 1", description: "Create PLAN.md files for execution", recommended: true }, * { id: "later", label: "Discuss first", description: "Capture constraints before planning" }, * ], - * notYetMessage: "Run /gsd-plan-phase 1 when ready.", + * notYetMessage: "Run /sf-plan-phase 1 when ready.", * }); * * // choice is one of the action ids, or "not_yet" @@ -81,7 +81,7 @@ export interface NextActionOptions { actions: NextAction[]; /** * Message shown in the "Not yet" description line. - * e.g. "Run /gsd-plan-phase 1 when ready." + * e.g. "Run /sf-plan-phase 1 when ready." */ notYetMessage?: string; /** diff --git a/src/resources/extensions/shared/rtk-session-stats.ts b/src/resources/extensions/shared/rtk-session-stats.ts index 4de3ae0dd..2eaabbcb8 100644 --- a/src/resources/extensions/shared/rtk-session-stats.ts +++ b/src/resources/extensions/shared/rtk-session-stats.ts @@ -2,7 +2,7 @@ import { spawnSync } from "node:child_process"; import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs"; import { join } from "node:path"; -import { gsdRoot } from "../gsd/paths.js"; +import { sfRoot } from "../sf/paths.js"; import { formatTokenCount } from "./format-utils.js"; import { buildRtkEnv, isRtkEnabled, resolveRtkBinaryPath } from "./rtk.js"; @@ -46,7 +46,7 @@ interface BaselineStore { let cachedSummary: { at: number; binaryPath: string; summary: RtkGainSummary | null } | null = null; function getRuntimeDir(basePath: string): string { - return join(gsdRoot(basePath), "runtime"); + return join(sfRoot(basePath), "runtime"); } function getBaselinesPath(basePath: string): string { diff --git a/src/resources/extensions/shared/rtk.ts b/src/resources/extensions/shared/rtk.ts index 7f5110c15..5de926862 100644 --- a/src/resources/extensions/shared/rtk.ts +++ b/src/resources/extensions/shared/rtk.ts @@ -35,7 +35,7 @@ export function buildRtkEnv(env: NodeJS.ProcessEnv = process.env): NodeJS.Proces } function getManagedRtkDir(env: NodeJS.ProcessEnv = process.env): string { - return join(env.SF_HOME || join(homedir(), ".gsd"), "agent", "bin"); + return join(env.SF_HOME || join(homedir(), ".sf"), "agent", "bin"); } function getRtkBinaryName(platform: NodeJS.Platform = process.platform): string { diff --git a/src/resources/extensions/slash-commands/audit.ts b/src/resources/extensions/slash-commands/audit.ts index 4bc4b311f..c2eb46011 100644 --- a/src/resources/extensions/slash-commands/audit.ts +++ b/src/resources/extensions/slash-commands/audit.ts @@ -3,7 +3,7 @@ import { mkdirSync } from "node:fs"; export default function auditCommand(pi: ExtensionAPI) { pi.registerCommand("audit", { - description: "Audit the current codebase against a specific goal and write a structured report to .gsd/audits/", + description: "Audit the current codebase against a specific goal and write a structured report to .sf/audits/", async handler(args: string, ctx: ExtensionCommandContext) { // ── Step 1: Get the audit goal ──────────────────────────────────────── @@ -21,7 +21,7 @@ export default function auditCommand(pi: ExtensionAPI) { goal = input.trim(); } - // ── Step 2: Build output path (.gsd/audits/<timestamp>-<slug>.md) ──── + // ── Step 2: Build output path (.sf/audits/<timestamp>-<slug>.md) ──── const now = new Date(); const timestamp = now @@ -36,11 +36,11 @@ export default function auditCommand(pi: ExtensionAPI) { .replace(/^-+|-+$/g, "") .slice(0, 40); - const outputPath = `.gsd/audits/${timestamp}-${slug}.md`; + const outputPath = `.sf/audits/${timestamp}-${slug}.md`; // ── Step 3: Ensure the output directory exists ─────────────────────── - mkdirSync(".gsd/audits", { recursive: true }); + mkdirSync(".sf/audits", { recursive: true }); // ── Step 4: Send the audit prompt to the agent ─────────────────────── @@ -73,7 +73,7 @@ ${goal} <!-- What's missing, incomplete, or problematic relative to this goal? Be specific: file paths, patterns, missing abstractions. --> ## Next Steps -<!-- Concrete, prioritised actions. These should be directly usable as input to /gsd-roadmap. --> +<!-- Concrete, prioritised actions. These should be directly usable as input to /sf-roadmap. --> --- diff --git a/src/resources/extensions/slash-commands/create-extension.ts b/src/resources/extensions/slash-commands/create-extension.ts index ec24b78b6..1ea957da8 100644 --- a/src/resources/extensions/slash-commands/create-extension.ts +++ b/src/resources/extensions/slash-commands/create-extension.ts @@ -221,41 +221,41 @@ function sendPrompt(description: string, result: RoundResult, pi: ExtensionAPI): : ""; const docHints: string[] = [ - "- `~/.gsd/agent/docs/extending-pi/01-what-are-extensions.md` — capabilities overview", - "- `~/.gsd/agent/docs/extending-pi/03-getting-started.md` — minimal extension, hot reload", - "- `~/.gsd/agent/docs/extending-pi/08-extensioncontext-what-you-can-access.md` — ExtensionContext API", - "- `~/.gsd/agent/docs/extending-pi/09-extensionapi-what-you-can-do.md` — ExtensionAPI: registration, messaging", - "- `~/.gsd/agent/docs/extending-pi/22-key-rules-gotchas.md` — must-read rules before shipping", + "- `~/.sf/agent/docs/extending-pi/01-what-are-extensions.md` — capabilities overview", + "- `~/.sf/agent/docs/extending-pi/03-getting-started.md` — minimal extension, hot reload", + "- `~/.sf/agent/docs/extending-pi/08-extensioncontext-what-you-can-access.md` — ExtensionContext API", + "- `~/.sf/agent/docs/extending-pi/09-extensionapi-what-you-can-do.md` — ExtensionAPI: registration, messaging", + "- `~/.sf/agent/docs/extending-pi/22-key-rules-gotchas.md` — must-read rules before shipping", ]; if (uiSelected.includes("custom component")) { - docHints.push("- `~/.gsd/agent/docs/extending-pi/12-custom-ui-visual-components.md` — dialogs, widgets, overlays"); - docHints.push("- `~/.gsd/agent/docs/pi-ui-tui/06-ctx-ui-custom-full-custom-components.md` — ctx.ui.custom() API"); - docHints.push("- `~/.gsd/agent/docs/pi-ui-tui/07-built-in-components-the-building-blocks.md` — Text, Box, SelectList"); - docHints.push("- `~/.gsd/agent/docs/pi-ui-tui/09-keyboard-input-how-to-handle-keys.md` — Key, matchesKey"); - docHints.push("- `~/.gsd/agent/docs/pi-ui-tui/10-line-width-the-cardinal-rule.md` — truncation, width rules"); - docHints.push("- `~/.gsd/agent/docs/pi-ui-tui/19-building-a-complete-component-step-by-step.md` — step-by-step guide"); - docHints.push("- `~/.gsd/agent/docs/pi-ui-tui/21-common-mistakes-and-how-to-avoid-them.md` — pitfalls"); + docHints.push("- `~/.sf/agent/docs/extending-pi/12-custom-ui-visual-components.md` — dialogs, widgets, overlays"); + docHints.push("- `~/.sf/agent/docs/pi-ui-tui/06-ctx-ui-custom-full-custom-components.md` — ctx.ui.custom() API"); + docHints.push("- `~/.sf/agent/docs/pi-ui-tui/07-built-in-components-the-building-blocks.md` — Text, Box, SelectList"); + docHints.push("- `~/.sf/agent/docs/pi-ui-tui/09-keyboard-input-how-to-handle-keys.md` — Key, matchesKey"); + docHints.push("- `~/.sf/agent/docs/pi-ui-tui/10-line-width-the-cardinal-rule.md` — truncation, width rules"); + docHints.push("- `~/.sf/agent/docs/pi-ui-tui/19-building-a-complete-component-step-by-step.md` — step-by-step guide"); + docHints.push("- `~/.sf/agent/docs/pi-ui-tui/21-common-mistakes-and-how-to-avoid-them.md` — pitfalls"); } else if (uiSelected.includes("Dialogs")) { - docHints.push("- `~/.gsd/agent/docs/pi-ui-tui/04-built-in-dialog-methods.md` — select, confirm, input, editor"); + docHints.push("- `~/.sf/agent/docs/pi-ui-tui/04-built-in-dialog-methods.md` — select, confirm, input, editor"); } else if (uiSelected.includes("Status")) { - docHints.push("- `~/.gsd/agent/docs/pi-ui-tui/05-persistent-ui-elements.md` — status, widgets, footer, header"); + docHints.push("- `~/.sf/agent/docs/pi-ui-tui/05-persistent-ui-elements.md` — status, widgets, footer, header"); } if (uiSelected.includes("tool") || result.answers["purpose"]) { - docHints.push("- `~/.gsd/agent/docs/extending-pi/14-custom-rendering-controlling-what-the-user-sees.md` — renderCall / renderResult"); + docHints.push("- `~/.sf/agent/docs/extending-pi/14-custom-rendering-controlling-what-the-user-sees.md` — renderCall / renderResult"); } if (eventsSelected && !eventsSelected.includes("standalone")) { - docHints.push("- `~/.gsd/agent/docs/extending-pi/07-events-the-nervous-system.md` — all events reference"); + docHints.push("- `~/.sf/agent/docs/extending-pi/07-events-the-nervous-system.md` — all events reference"); } if (eventsSelected.includes("context / prompt")) { - docHints.push("- `~/.gsd/agent/docs/extending-pi/15-system-prompt-modification.md` — system prompt hooks"); + docHints.push("- `~/.sf/agent/docs/extending-pi/15-system-prompt-modification.md` — system prompt hooks"); } if (persistenceSelected.includes("session")) { - docHints.push("- `~/.gsd/agent/docs/extending-pi/13-state-management-persistence.md` — pi.appendEntry, session state"); + docHints.push("- `~/.sf/agent/docs/extending-pi/13-state-management-persistence.md` — pi.appendEntry, session state"); } const prompt = `Create a new pi extension based on this description: @@ -272,11 +272,11 @@ ${docHints.join("\n")} Write the complete implementation as a single self-contained extension file: -\`~/.gsd/agent/extensions/<kebab-case-name>.ts\` +\`~/.sf/agent/extensions/<kebab-case-name>.ts\` Then register it in the main extensions index: -\`~/.gsd/agent/extensions/index.ts\` — import and call the new extension's default export alongside existing ones +\`~/.sf/agent/extensions/index.ts\` — import and call the new extension's default export alongside existing ones ## Rules you must follow exactly diff --git a/src/resources/extensions/slash-commands/create-slash-command.ts b/src/resources/extensions/slash-commands/create-slash-command.ts index 4c8e642c6..f254b705f 100644 --- a/src/resources/extensions/slash-commands/create-slash-command.ts +++ b/src/resources/extensions/slash-commands/create-slash-command.ts @@ -211,8 +211,8 @@ function sendPrompt(description: string, result: RoundResult, pi: ExtensionAPI): ${contextSection} Write the complete file contents for two files: -1. \`~/.gsd/agent/extensions/slash-commands/<name>.ts\` — the command implementation -2. Update \`~/.gsd/agent/extensions/slash-commands/index.ts\` — import and register the new command alongside existing ones +1. \`~/.sf/agent/extensions/slash-commands/<name>.ts\` — the command implementation +2. Update \`~/.sf/agent/extensions/slash-commands/index.ts\` — import and register the new command alongside existing ones Rules you must follow exactly: - Command registration: \`pi.registerCommand("name", { description, handler })\` diff --git a/src/resources/extensions/subagent/agents.ts b/src/resources/extensions/subagent/agents.ts index e1601872d..305a30f5f 100644 --- a/src/resources/extensions/subagent/agents.ts +++ b/src/resources/extensions/subagent/agents.ts @@ -6,7 +6,7 @@ import * as fs from "node:fs"; import * as path from "node:path"; import { getAgentDir, parseFrontmatter } from "@sf-run/pi-coding-agent"; -const PROJECT_AGENT_DIR_CANDIDATES = [".gsd", ".pi"] as const; +const PROJECT_AGENT_DIR_CANDIDATES = [".sf", ".pi"] as const; export type AgentScope = "user" | "project" | "both"; diff --git a/src/resources/extensions/subagent/index.ts b/src/resources/extensions/subagent/index.ts index 2af176f3b..86d2feba1 100644 --- a/src/resources/extensions/subagent/index.ts +++ b/src/resources/extensions/subagent/index.ts @@ -24,7 +24,7 @@ import { type ExtensionAPI, getMarkdownTheme } from "@sf-run/pi-coding-agent"; import { Container, Markdown, Spacer, Text } from "@sf-run/pi-tui"; import { Type } from "@sinclair/typebox"; import { formatTokenCount } from "../shared/mod.js"; -import { getCurrentPhase } from "../shared/gsd-phase-state.js"; +import { getCurrentPhase } from "../shared/sf-phase-state.js"; import { type AgentConfig, type AgentScope, discoverAgents } from "./agents.js"; import { type IsolationEnvironment, @@ -35,7 +35,7 @@ import { readIsolationMode, } from "./isolation.js"; import { registerWorker, updateWorker } from "./worker-registry.js"; -import { loadEffectiveGSDPreferences } from "../gsd/preferences.js"; +import { loadEffectiveSFPreferences } from "../sf/preferences.js"; import { CmuxClient, shellEscape } from "../cmux/index.js"; const MAX_PARALLEL_TASKS = 8; @@ -640,7 +640,7 @@ export default function (pi: ExtensionAPI) { handler: async (_args, ctx) => { const discovery = discoverAgents(ctx.cwd, "both"); if (discovery.agents.length === 0) { - ctx.ui.notify("No agents found. Add .md files to ~/.gsd/agent/agents/ or .gsd/agents/", "warning"); + ctx.ui.notify("No agents found. Add .md files to ~/.sf/agent/agents/ or .sf/agents/", "warning"); return; } const lines = discovery.agents.map( @@ -657,7 +657,7 @@ export default function (pi: ExtensionAPI) { "Delegate tasks to specialized subagents with isolated context windows.", "Each subagent is a separate pi process with its own tools, model, and system prompt.", "Modes: single ({ agent, task }), parallel ({ tasks: [{agent, task},...] }), chain ({ chain: [{agent, task},...] } with {previous} placeholder).", - "Agents are defined as .md files in ~/.gsd/agent/agents/ (user) or .gsd/agents/ (project).", + "Agents are defined as .md files in ~/.sf/agent/agents/ (user) or .sf/agents/ (project).", "Use the /subagent command to list available agents and their descriptions.", "Use chain mode to pipeline: scout finds context, planner designs, worker implements.", ].join(" "), @@ -675,7 +675,7 @@ export default function (pi: ExtensionAPI) { const discovery = discoverAgents(ctx.cwd, agentScope); const agents = discovery.agents; const confirmProjectAgents = params.confirmProjectAgents ?? false; - const cmuxClient = CmuxClient.fromPreferences(loadEffectiveGSDPreferences()?.preferences); + const cmuxClient = CmuxClient.fromPreferences(loadEffectiveSFPreferences()?.preferences); const cmuxSplitsEnabled = cmuxClient.getConfig().splits; // Resolve isolation mode diff --git a/src/resources/extensions/subagent/isolation.ts b/src/resources/extensions/subagent/isolation.ts index 037c7abdd..6c716d039 100644 --- a/src/resources/extensions/subagent/isolation.ts +++ b/src/resources/extensions/subagent/isolation.ts @@ -59,7 +59,7 @@ export function encodeCwd(cwd: string): string { return Buffer.from(cwd, "utf8").toString("base64url"); } -const gsdHome = process.env.SF_HOME || path.join(os.homedir(), ".gsd"); +const gsdHome = process.env.SF_HOME || path.join(os.homedir(), ".sf"); function getIsolationBaseDir(cwd: string, taskId: string): string { return path.join(gsdHome, "wt", encodeCwd(cwd), taskId); @@ -162,7 +162,7 @@ async function applyBaseline( ): Promise<void> { // Apply staged diff if (baseline.stagedDiff.trim()) { - const patchPath = path.join(worktreeDir, ".gsd-staged.patch"); + const patchPath = path.join(worktreeDir, ".sf-staged.patch"); fs.writeFileSync(patchPath, baseline.stagedDiff); try { await git(["apply", "--binary", patchPath], worktreeDir); @@ -176,7 +176,7 @@ async function applyBaseline( // Apply unstaged diff on top if (baseline.unstagedDiff.trim()) { - const patchPath = path.join(worktreeDir, ".gsd-unstaged.patch"); + const patchPath = path.join(worktreeDir, ".sf-unstaged.patch"); fs.writeFileSync(patchPath, baseline.unstagedDiff); try { await git(["apply", "--binary", patchPath], worktreeDir); @@ -199,7 +199,7 @@ async function applyBaseline( // without accidentally including the parent's dirty state in the delta. await gitSilent(["add", "-A"], worktreeDir); await gitSilent( - ["commit", "--allow-empty", "-m", "gsd: baseline snapshot"], + ["commit", "--allow-empty", "-m", "sf: baseline snapshot"], worktreeDir, ); } @@ -445,7 +445,7 @@ export async function mergeDeltaPatches( const combined = patches.map((p) => p.content).join("\n"); const patchFile = path.join( os.tmpdir(), - `gsd-merge-${Date.now()}.patch`, + `sf-merge-${Date.now()}.patch`, ); const appliedPatches: string[] = []; diff --git a/src/resources/extensions/ttsr/rule-loader.ts b/src/resources/extensions/ttsr/rule-loader.ts index d8aec2bff..269e3d865 100644 --- a/src/resources/extensions/ttsr/rule-loader.ts +++ b/src/resources/extensions/ttsr/rule-loader.ts @@ -1,7 +1,7 @@ /** * TTSR Rule Loader * - * Scans global (~/.gsd/agent/rules/*.md) and project-local (.gsd/rules/*.md) + * Scans global (~/.sf/agent/rules/*.md) and project-local (.sf/rules/*.md) * rule files. Parses YAML frontmatter for condition, scope, globs. * Project rules override global rules with the same name. */ @@ -9,7 +9,7 @@ import { readdirSync, readFileSync, existsSync } from "node:fs"; import { join, basename } from "node:path"; import { homedir } from "node:os"; -const gsdHome = process.env.SF_HOME || join(homedir(), ".gsd"); +const gsdHome = process.env.SF_HOME || join(homedir(), ".sf"); import type { Rule } from "./ttsr-manager.js"; import { splitFrontmatter, parseFrontmatterMap } from "../shared/frontmatter.js"; @@ -62,7 +62,7 @@ function scanDir(dir: string): Rule[] { */ export function loadRules(cwd: string): Rule[] { const globalDir = join(gsdHome, "agent", "rules"); - const projectDir = join(cwd, ".gsd", "rules"); + const projectDir = join(cwd, ".sf", "rules"); const globalRules = scanDir(globalDir); const projectRules = scanDir(projectDir); diff --git a/src/resources/extensions/ttsr/ttsr-manager.ts b/src/resources/extensions/ttsr/ttsr-manager.ts index 8aa530fe6..44b37499e 100644 --- a/src/resources/extensions/ttsr/ttsr-manager.ts +++ b/src/resources/extensions/ttsr/ttsr-manager.ts @@ -10,7 +10,7 @@ * per-rule JS RegExp iteration when the native module is not loaded. */ import { createRequire } from "node:module"; -import { debugTime, debugCount, debugPeak } from "../gsd/debug-logger.js"; +import { debugTime, debugCount, debugPeak } from "../sf/debug-logger.js"; const _require = createRequire(import.meta.url); type PicomatchMatcher = (input: string) => boolean; diff --git a/src/resources/extensions/voice/index.ts b/src/resources/extensions/voice/index.ts index 8d9c3682b..c401d5323 100644 --- a/src/resources/extensions/voice/index.ts +++ b/src/resources/extensions/voice/index.ts @@ -35,7 +35,7 @@ function ensureLinuxReady(ctx: ExtensionContext): boolean { // Check GROQ_API_KEY is available if (!process.env.GROQ_API_KEY) { - ctx.ui.notify("Voice: GROQ_API_KEY not set — run 'gsd config' to configure", "error"); + ctx.ui.notify("Voice: GROQ_API_KEY not set — run 'sf config' to configure", "error"); return false; } diff --git a/src/resources/extensions/voice/linux-ready.ts b/src/resources/extensions/voice/linux-ready.ts index 560046b2d..a216171cd 100644 --- a/src/resources/extensions/voice/linux-ready.ts +++ b/src/resources/extensions/voice/linux-ready.ts @@ -14,7 +14,7 @@ import * as path from "node:path"; export const VOICE_VENV_DIR = path.join( process.env.HOME || process.env.USERPROFILE || os.homedir(), - ".gsd", + ".sf", "voice-venv", ); export const VOICE_VENV_PYTHON = path.join(VOICE_VENV_DIR, "bin", "python3"); @@ -81,7 +81,7 @@ export function ensureVoiceVenv(cb: ReadinessCallbacks): boolean { ); return true; } catch { - cb.notify("Voice: failed to create Python venv — run: python3 -m venv ~/.gsd/voice-venv", "error"); + cb.notify("Voice: failed to create Python venv — run: python3 -m venv ~/.sf/voice-venv", "error"); return false; } } diff --git a/src/resources/extensions/voice/speech-recognizer.py b/src/resources/extensions/voice/speech-recognizer.py index bb5ade0ff..e9793fcdf 100644 --- a/src/resources/extensions/voice/speech-recognizer.py +++ b/src/resources/extensions/voice/speech-recognizer.py @@ -82,8 +82,8 @@ def ensure_deps(): emit( "ERROR", "Python environment is externally managed (PEP 668). " - "Create a venv first: python3 -m venv ~/.gsd/voice-venv && " - "~/.gsd/voice-venv/bin/pip install sounddevice requests", + "Create a venv first: python3 -m venv ~/.sf/voice-venv && " + "~/.sf/voice-venv/bin/pip install sounddevice requests", ) elif "pip not found" in detail: emit("ERROR", "pip is not available. Install: sudo apt install python3-pip") @@ -166,7 +166,7 @@ def run_groq(): api_key = os.environ.get("GROQ_API_KEY", "") if not api_key: - emit("ERROR", "GROQ_API_KEY not set. Run 'gsd config' to set up, or get a free key at https://console.groq.com") + emit("ERROR", "GROQ_API_KEY not set. Run 'sf config' to set up, or get a free key at https://console.groq.com") sys.exit(1) groq_model = os.environ.get("SF_GROQ_MODEL", "whisper-large-v3-turbo") @@ -340,7 +340,7 @@ def run_local(): model_size = os.environ.get("SF_WHISPER_MODEL", "small") cache_root = os.path.join( os.environ.get("XDG_CACHE_HOME", os.path.expanduser("~/.cache")), - "gsd", "whisper", + "sf", "whisper", ) try: model = WhisperModel( diff --git a/src/resources/extensions/voice/tests/linux-ready.test.ts b/src/resources/extensions/voice/tests/linux-ready.test.ts index 8e0327a88..6edc604b1 100644 --- a/src/resources/extensions/voice/tests/linux-ready.test.ts +++ b/src/resources/extensions/voice/tests/linux-ready.test.ts @@ -8,7 +8,7 @@ * - linuxPython venv detection */ -import { createTestContext } from "../../gsd/tests/test-helpers.ts"; +import { createTestContext } from "../../sf/tests/test-helpers.ts"; import { diagnoseSounddeviceError, ensureVoiceVenv } from "../linux-ready.ts"; const { assertEq, assertTrue, report } = createTestContext(); diff --git a/src/rtk.ts b/src/rtk.ts index 2cc1a551b..bd1f57af8 100644 --- a/src/rtk.ts +++ b/src/rtk.ts @@ -49,7 +49,7 @@ export function isRtkEnabled(env: NodeJS.ProcessEnv = process.env): boolean { } function resolveAppRoot(env: NodeJS.ProcessEnv = process.env): string { - return env.SF_HOME || env.GSD_HOME || join(osHomedir(), ".gsd"); + return env.SF_HOME || env.GSD_HOME || join(osHomedir(), ".sf"); } export function getManagedRtkDir(env: NodeJS.ProcessEnv = process.env): string { diff --git a/src/tests/app-smoke.test.ts b/src/tests/app-smoke.test.ts index 0849b3527..bb8b588af 100644 --- a/src/tests/app-smoke.test.ts +++ b/src/tests/app-smoke.test.ts @@ -1,8 +1,8 @@ /** - * Unit tests for the gsd CLI package. + * Unit tests for the sf CLI package. * * Tests the glue code that IS the product: - * - app-paths resolve to ~/.gsd/ + * - app-paths resolve to ~/.sf/ * - loader sets all required env vars * - resource-loader syncs bundled resources * - wizard loadStoredEnvKeys hydrates env @@ -32,16 +32,16 @@ function assertExtensionIndexExists(agentDir: string, extensionName: string): vo // 1. app-paths // ═══════════════════════════════════════════════════════════════════════════ -test("app-paths resolve to ~/.gsd/", async () => { +test("app-paths resolve to ~/.sf/", async () => { const { appRoot, agentDir, sessionsDir, authFilePath } = await import("../app-paths.ts"); // Use homedir() — process.env.HOME is undefined on Windows (uses USERPROFILE instead) const { homedir } = await import("node:os"); const home = homedir(); - assert.equal(appRoot, join(home, ".gsd"), "appRoot is ~/.gsd/"); - assert.equal(agentDir, join(home, ".gsd", "agent"), "agentDir is ~/.gsd/agent/"); - assert.equal(sessionsDir, join(home, ".gsd", "sessions"), "sessionsDir is ~/.gsd/sessions/"); - assert.equal(authFilePath, join(home, ".gsd", "agent", "auth.json"), "authFilePath is ~/.gsd/agent/auth.json"); + assert.equal(appRoot, join(home, ".sf"), "appRoot is ~/.sf/"); + assert.equal(agentDir, join(home, ".sf", "agent"), "agentDir is ~/.sf/agent/"); + assert.equal(sessionsDir, join(home, ".sf", "sessions"), "sessionsDir is ~/.sf/sessions/"); + assert.equal(authFilePath, join(home, ".sf", "agent", "auth.json"), "authFilePath is ~/.sf/agent/auth.json"); }); // ═══════════════════════════════════════════════════════════════════════════ @@ -61,7 +61,7 @@ test("loader sets all 4 SF_ env vars and PI_PACKAGE_DIR", async (t) => { process.env.SF_BIN_PATH = process.argv[1]; const resourcesDir = resolve(dirname(fileURLToPath(import.meta.url)), '..', 'src', 'resources'); process.env.SF_WORKFLOW_PATH = join(resourcesDir, 'SF-WORKFLOW.md'); - const exts = ['extensions/gsd/index.ts'].map(r => join(resourcesDir, r)); + const exts = ['extensions/sf/index.ts'].map(r => join(resourcesDir, r)); process.env.SF_BUNDLED_EXTENSION_PATHS = exts.join(delimiter); // Print for verification @@ -73,7 +73,7 @@ test("loader sets all 4 SF_ env vars and PI_PACKAGE_DIR", async (t) => { process.exit(0); `; - const tmp = mkdtempSync(join(tmpdir(), "gsd-loader-test-")); + const tmp = mkdtempSync(join(tmpdir(), "sf-loader-test-")); const scriptPath = join(tmp, "check-env.ts"); writeFileSync(scriptPath, script); @@ -93,7 +93,7 @@ test("loader sets all 4 SF_ env vars and PI_PACKAGE_DIR", async (t) => { // Direct logic verification (no subprocess needed) const { agentDir: ad } = await import("../app-paths.ts"); - assert.ok(ad.endsWith(join(".gsd", "agent")), "agentDir ends with .gsd/agent"); + assert.ok(ad.endsWith(join(".sf", "agent")), "agentDir ends with .sf/agent"); // Verify the env var names are in loader.ts source const loaderSrc = readFileSync(join(projectRoot, "src", "loader.ts"), "utf-8"); @@ -127,7 +127,7 @@ test("loader sets all 4 SF_ env vars and PI_PACKAGE_DIR", async (t) => { const rel = p.slice(bundledExtensionsDir.length + 1); return rel.split(/[\\/]/)[0].replace(/\.(?:ts|js)$/, ""); }); - for (const core of ["gsd", "bg-shell", "browser-tools", "subagent", "search-the-web"]) { + for (const core of ["sf", "bg-shell", "browser-tools", "subagent", "search-the-web"]) { assert.ok(discoveredNames.includes(core), `core extension '${core}' is discoverable`); } @@ -187,7 +187,7 @@ test("loader MIN_NODE_MAJOR matches package.json engines field", () => { `loader MIN_NODE_MAJOR (${loaderMin}) must match package.json engines.node (>=${engineMin}.0.0)`); }); -test("cli.ts lets gsd update bypass the managed-resource mismatch gate", () => { +test("cli.ts lets sf update bypass the managed-resource mismatch gate", () => { const cliSrc = readFileSync(join(projectRoot, "src", "cli.ts"), "utf-8"); const updateBranchIndex = cliSrc.indexOf("if (cliFlags.messages[0] === 'update')") const mismatchGateIndex = cliSrc.indexOf("exitIfManagedResourcesAreNewer(agentDir)") @@ -196,7 +196,7 @@ test("cli.ts lets gsd update bypass the managed-resource mismatch gate", () => { assert.ok(mismatchGateIndex !== -1, "cli.ts contains the managed-resource mismatch gate") assert.ok( updateBranchIndex < mismatchGateIndex, - "gsd update must run before the managed-resource mismatch gate", + "sf update must run before the managed-resource mismatch gate", ) }); @@ -206,13 +206,13 @@ test("cli.ts lets gsd update bypass the managed-resource mismatch gate", () => { test("initResources syncs extensions, agents, and skills to target dir", async (t) => { const { initResources, readManagedResourceVersion } = await import("../resource-loader.ts"); - const tmp = mkdtempSync(join(tmpdir(), "gsd-resources-test-")); + const tmp = mkdtempSync(join(tmpdir(), "sf-resources-test-")); const fakeAgentDir = join(tmp, "agent"); initResources(fakeAgentDir); // Extensions synced - assertExtensionIndexExists(fakeAgentDir, "gsd"); + assertExtensionIndexExists(fakeAgentDir, "sf"); assertExtensionIndexExists(fakeAgentDir, "browser-tools"); assertExtensionIndexExists(fakeAgentDir, "search-the-web"); assertExtensionIndexExists(fakeAgentDir, "context7"); @@ -229,12 +229,12 @@ test("initResources syncs extensions, agents, and skills to target dir", async ( // Idempotent: run again, no crash initResources(fakeAgentDir); - assertExtensionIndexExists(fakeAgentDir, "gsd"); + assertExtensionIndexExists(fakeAgentDir, "sf"); }); test("initResources skips copy when managed version matches current version", async (t) => { const { initResources, readManagedResourceVersion } = await import("../resource-loader.ts"); - const tmp = mkdtempSync(join(tmpdir(), "gsd-resources-skip-")); + const tmp = mkdtempSync(join(tmpdir(), "sf-resources-skip-")); const fakeAgentDir = join(tmp, "agent"); t.after(() => rmSync(tmp, { recursive: true, force: true })); @@ -244,7 +244,7 @@ test("initResources skips copy when managed version matches current version", as assert.ok(version, "manifest written after first sync"); // Add a marker file to detect whether sync runs again - const markerPath = join(fakeAgentDir, "extensions", "gsd", "_marker.txt"); + const markerPath = join(fakeAgentDir, "extensions", "sf", "_marker.txt"); writeFileSync(markerPath, "test-marker"); // Second run: version matches — should skip, marker survives @@ -272,7 +272,7 @@ test("loadStoredEnvKeys hydrates process.env from auth.json", async (t) => { const { loadStoredEnvKeys } = await import("../wizard.ts"); const { AuthStorage } = await import("@sf-run/pi-coding-agent"); - const tmp = mkdtempSync(join(tmpdir(), "gsd-wizard-test-")); + const tmp = mkdtempSync(join(tmpdir(), "sf-wizard-test-")); const authPath = join(tmp, "auth.json"); writeFileSync(authPath, JSON.stringify({ brave: { type: "api_key", key: "test-brave-key" }, @@ -321,7 +321,7 @@ test("loadStoredEnvKeys does not overwrite existing env vars", async (t) => { const { loadStoredEnvKeys } = await import("../wizard.ts"); const { AuthStorage } = await import("@sf-run/pi-coding-agent"); - const tmp = mkdtempSync(join(tmpdir(), "gsd-wizard-nooverwrite-")); + const tmp = mkdtempSync(join(tmpdir(), "sf-wizard-nooverwrite-")); const authPath = join(tmp, "auth.json"); writeFileSync(authPath, JSON.stringify({ brave: { type: "api_key", key: "stored-key" }, @@ -344,18 +344,18 @@ test("loadStoredEnvKeys does not overwrite existing env vars", async (t) => { // 6. State derivation — Gap 2 // ═══════════════════════════════════════════════════════════════════════════ -test("deriveState returns pre-planning phase for empty .gsd/ directory", async (t) => { +test("deriveState returns pre-planning phase for empty .sf/ directory", async (t) => { const { deriveState } = await import("../resources/extensions/sf/state.ts"); - const tmp = mkdtempSync(join(tmpdir(), "gsd-state-smoke-")); + const tmp = mkdtempSync(join(tmpdir(), "sf-state-smoke-")); - // Create minimal .gsd/ structure with no milestones - mkdirSync(join(tmp, ".gsd"), { recursive: true }); + // Create minimal .sf/ structure with no milestones + mkdirSync(join(tmp, ".sf"), { recursive: true }); t.after(() => rmSync(tmp, { recursive: true, force: true })); const state = await deriveState(tmp); assert.equal(state.phase, "pre-planning", - `expected pre-planning phase for empty .gsd/, got: ${state.phase}`); + `expected pre-planning phase for empty .sf/, got: ${state.phase}`); assert.equal(state.activeMilestone, null, "no active milestone"); assert.equal(state.activeSlice, null, "no active slice"); assert.equal(state.activeTask, null, "no active task"); @@ -366,24 +366,24 @@ test("deriveState returns pre-planning phase for empty .gsd/ directory", async ( assert.ok(state.nextAction.length > 0, "nextAction is non-empty"); }); -test("deriveState returns pre-planning phase when no .gsd/ directory exists", async (t) => { +test("deriveState returns pre-planning phase when no .sf/ directory exists", async (t) => { const { deriveState } = await import("../resources/extensions/sf/state.ts"); - // Use a temp dir with no .gsd/ subdirectory at all - const tmp = mkdtempSync(join(tmpdir(), "gsd-state-nogsd-")); + // Use a temp dir with no .sf/ subdirectory at all + const tmp = mkdtempSync(join(tmpdir(), "sf-state-nogsd-")); t.after(() => rmSync(tmp, { recursive: true, force: true })); - // Should not throw — missing .gsd/ is a valid "no project" state + // Should not throw — missing .sf/ is a valid "no project" state const state = await deriveState(tmp); assert.equal(state.phase, "pre-planning", - `expected pre-planning phase when .gsd/ absent, got: ${state.phase}`); + `expected pre-planning phase when .sf/ absent, got: ${state.phase}`); assert.equal(state.activeMilestone, null, "no active milestone"); }); test("deriveState shape is structurally complete", async (t) => { const { deriveState } = await import("../resources/extensions/sf/state.ts"); - const tmp = mkdtempSync(join(tmpdir(), "gsd-state-shape-")); - mkdirSync(join(tmp, ".gsd"), { recursive: true }); + const tmp = mkdtempSync(join(tmpdir(), "sf-state-shape-")); + mkdirSync(join(tmp, ".sf"), { recursive: true }); t.after(() => rmSync(tmp, { recursive: true, force: true })); const state = await deriveState(tmp); @@ -411,10 +411,10 @@ test("deriveState shape is structurally complete", async (t) => { // 7. Doctor health checks — Gap 3 // ═══════════════════════════════════════════════════════════════════════════ -test("runGSDDoctor completes without throwing on empty .gsd/ directory", async (t) => { +test("runGSDDoctor completes without throwing on empty .sf/ directory", async (t) => { const { runGSDDoctor } = await import("../resources/extensions/sf/doctor.ts"); - const tmp = mkdtempSync(join(tmpdir(), "gsd-doctor-smoke-")); - mkdirSync(join(tmp, ".gsd"), { recursive: true }); + const tmp = mkdtempSync(join(tmpdir(), "sf-doctor-smoke-")); + mkdirSync(join(tmp, ".sf"), { recursive: true }); t.after(() => rmSync(tmp, { recursive: true, force: true })); // audit-only mode (fix: false) — should never throw @@ -434,11 +434,11 @@ test("runGSDDoctor completes without throwing on empty .gsd/ directory", async ( test("runGSDDoctor issue objects have required fields", async (t) => { const { runGSDDoctor } = await import("../resources/extensions/sf/doctor.ts"); - const tmp = mkdtempSync(join(tmpdir(), "gsd-doctor-fields-")); - mkdirSync(join(tmp, ".gsd"), { recursive: true }); + const tmp = mkdtempSync(join(tmpdir(), "sf-doctor-fields-")); + mkdirSync(join(tmp, ".sf"), { recursive: true }); // Create a milestone dir with no ROADMAP.md to force a missing_roadmap issue - const mDir = join(tmp, ".gsd", "milestones", "M001"); + const mDir = join(tmp, ".sf", "milestones", "M001"); mkdirSync(mDir, { recursive: true }); writeFileSync(join(mDir, "M001-CONTEXT.md"), "# Context\n"); @@ -462,8 +462,8 @@ test("runGSDDoctor issue objects have required fields", async (t) => { test("runGSDDoctor with fix:false never modifies the filesystem", async (t) => { const { runGSDDoctor } = await import("../resources/extensions/sf/doctor.ts"); - const tmp = mkdtempSync(join(tmpdir(), "gsd-doctor-readonly-")); - const gsdDir = join(tmp, ".gsd"); + const tmp = mkdtempSync(join(tmpdir(), "sf-doctor-readonly-")); + const gsdDir = join(tmp, ".sf"); mkdirSync(gsdDir, { recursive: true }); // Write a sentinel file — doctor must not delete or modify it diff --git a/src/tests/auto-mode-piped.test.ts b/src/tests/auto-mode-piped.test.ts index 005dddadd..88c799fc9 100644 --- a/src/tests/auto-mode-piped.test.ts +++ b/src/tests/auto-mode-piped.test.ts @@ -1,5 +1,5 @@ /** - * Tests for `gsd auto` routing — verifies that `auto` is recognized as a + * Tests for `sf auto` routing — verifies that `auto` is recognized as a * subcommand alias for `headless auto` so it doesn't fall through to the * interactive TUI, which hangs when stdin/stdout are piped. * @@ -45,14 +45,14 @@ function cliSourceHandlesAutoBeforeTUI(): boolean { } // ═══════════════════════════════════════════════════════════════════════════ -// Core regression test: `gsd auto` must be handled before TUI (#2732) +// Core regression test: `sf auto` must be handled before TUI (#2732) // ═══════════════════════════════════════════════════════════════════════════ test('cli.ts handles `auto` subcommand before interactive TUI (#2732)', () => { assert.ok( cliSourceHandlesAutoBeforeTUI(), 'cli.ts must route messages[0] === "auto" to a handler BEFORE ' + - 'reaching `new InteractiveMode()`. Without this, `gsd auto` with ' + + 'reaching `new InteractiveMode()`. Without this, `sf auto` with ' + 'piped stdin/stdout falls through to the TUI and hangs.', ) }) @@ -77,18 +77,18 @@ test('cli.ts routes `auto` to headless runner', () => { // Verify piped-mode hint in error message when auto mode is not available // ═══════════════════════════════════════════════════════════════════════════ -test('TTY error message mentions `gsd auto` as a non-interactive alternative', () => { +test('TTY error message mentions `sf auto` as a non-interactive alternative', () => { const cliSource = readFileSync(join(projectRoot, 'src', 'cli.ts'), 'utf-8') // The TTY error message should mention auto as an alternative assert.ok( - cliSource.includes('gsd auto') || cliSource.includes('gsd headless'), + cliSource.includes('sf auto') || cliSource.includes('sf headless'), 'TTY error hints should mention headless/auto mode as alternatives', ) }) // ═══════════════════════════════════════════════════════════════════════════ -// `gsd headless` still works (no regression) +// `sf headless` still works (no regression) // ═══════════════════════════════════════════════════════════════════════════ test('cli.ts handles `headless` subcommand before interactive TUI', () => { diff --git a/src/tests/auto-piped-io.test.ts b/src/tests/auto-piped-io.test.ts index 84bb5fbc1..77158d3b1 100644 --- a/src/tests/auto-piped-io.test.ts +++ b/src/tests/auto-piped-io.test.ts @@ -1,7 +1,7 @@ /** * Tests for auto-mode piped I/O detection (#2732). * - * When `gsd auto` is run with piped stdout (e.g. `gsd auto | cat`), + * When `sf auto` is run with piped stdout (e.g. `sf auto | cat`), * the CLI should detect the non-TTY stdout and redirect to headless * mode instead of hanging in interactive mode trying to set up a TUI * on a non-terminal output stream. @@ -118,9 +118,9 @@ test("does NOT identify undefined as explicit subcommand", () => { assert.ok(!isExplicitSubcommand(undefined)); }); -// ─── End-to-end scenario: gsd auto | cat ────────────────────────────────── +// ─── End-to-end scenario: sf auto | cat ────────────────────────────────── -test("scenario: 'gsd auto 2>&1 | cat' — should redirect to headless", () => { +test("scenario: 'sf auto 2>&1 | cat' — should redirect to headless", () => { // Simulates: subcommand = "auto", stdin is TTY, stdout is piped const subcommand = "auto"; const stdinIsTTY = true; @@ -133,7 +133,7 @@ test("scenario: 'gsd auto 2>&1 | cat' — should redirect to headless", () => { assert.ok(shouldRedirectAutoToHeadless(subcommand, stdoutIsTTY)); }); -test("scenario: 'gsd auto > /tmp/output.txt' — should redirect to headless", () => { +test("scenario: 'sf auto > /tmp/output.txt' — should redirect to headless", () => { const subcommand = "auto"; const stdinIsTTY = true; const stdoutIsTTY = false; @@ -142,7 +142,7 @@ test("scenario: 'gsd auto > /tmp/output.txt' — should redirect to headless", ( assert.ok(shouldRedirectAutoToHeadless(subcommand, stdoutIsTTY)); }); -test("scenario: 'gsd auto' in terminal — normal interactive mode", () => { +test("scenario: 'sf auto' in terminal — normal interactive mode", () => { const subcommand = "auto"; const stdinIsTTY = true; const stdoutIsTTY = true; @@ -151,7 +151,7 @@ test("scenario: 'gsd auto' in terminal — normal interactive mode", () => { assert.ok(!shouldRedirectAutoToHeadless(subcommand, stdoutIsTTY)); }); -test("scenario: 'echo msg | gsd auto' — stdin piped, should redirect", () => { +test("scenario: 'echo msg | sf auto' — stdin piped, should redirect", () => { const subcommand = "auto"; const stdinIsTTY = false; const stdoutIsTTY = true; // stdout is TTY even though stdin is piped @@ -162,7 +162,7 @@ test("scenario: 'echo msg | gsd auto' — stdin piped, should redirect", () => { assert.ok(!canEnterInteractiveMode(stdinIsTTY, stdoutIsTTY)); }); -test("scenario: 'echo msg | gsd auto | cat' — both piped", () => { +test("scenario: 'echo msg | sf auto | cat' — both piped", () => { const subcommand = "auto"; const stdinIsTTY = false; const stdoutIsTTY = false; diff --git a/src/tests/auto-resume-resource-loader.test.ts b/src/tests/auto-resume-resource-loader.test.ts index baa251748..d46d70de1 100644 --- a/src/tests/auto-resume-resource-loader.test.ts +++ b/src/tests/auto-resume-resource-loader.test.ts @@ -7,7 +7,7 @@ import { join, resolve, dirname } from "node:path"; import { fileURLToPath } from "node:url"; const __dirname = dirname(fileURLToPath(import.meta.url)); -const autoTsPath = join(__dirname, "..", "resources", "extensions", "gsd", "auto.ts"); +const autoTsPath = join(__dirname, "..", "resources", "extensions", "sf", "auto.ts"); const loaderTsPath = join(__dirname, "..", "loader.ts"); test("loader.ts sets SF_PKG_ROOT env var", () => { diff --git a/src/tests/bg-shell-persistence-cwd.test.ts b/src/tests/bg-shell-persistence-cwd.test.ts index 15e63f8e5..2b349e48f 100644 --- a/src/tests/bg-shell-persistence-cwd.test.ts +++ b/src/tests/bg-shell-persistence-cwd.test.ts @@ -13,7 +13,7 @@ test("keeps non-worktree cwd unchanged", () => { }); test("rewrites stale auto-worktree cwd to live cwd after exit", () => { - const cached = "/repo/.gsd/worktrees/M001"; + const cached = "/repo/.sf/worktrees/M001"; const live = "/repo"; assert.equal( resolveBgShellPersistenceCwd(cached, live, (path) => path === live), @@ -22,7 +22,7 @@ test("rewrites stale auto-worktree cwd to live cwd after exit", () => { }); test("rewrites mismatched auto-worktree cwd to live cwd even if old path still exists", () => { - const cached = "/repo/.gsd/worktrees/M001"; + const cached = "/repo/.sf/worktrees/M001"; const live = "/repo"; assert.equal( resolveBgShellPersistenceCwd(cached, live, () => true), @@ -31,7 +31,7 @@ test("rewrites mismatched auto-worktree cwd to live cwd even if old path still e }); test("rewrites Windows-style auto-worktree cwd to live cwd", () => { - const cached = "C:\\repo\\.gsd\\worktrees\\M001"; + const cached = "C:\\repo\\.sf\\worktrees\\M001"; const live = "C:\\repo"; assert.equal( resolveBgShellPersistenceCwd(cached, live, () => true), @@ -40,7 +40,7 @@ test("rewrites Windows-style auto-worktree cwd to live cwd", () => { }); test("keeps current auto-worktree cwd when it still matches process cwd", () => { - const cached = "/repo/.gsd/worktrees/M001"; + const cached = "/repo/.sf/worktrees/M001"; assert.equal( resolveBgShellPersistenceCwd(cached, cached, () => true), cached, @@ -48,7 +48,7 @@ test("keeps current auto-worktree cwd when it still matches process cwd", () => }); test("falls back to project root when process.cwd throws inside a stale auto-worktree", () => { - const cached = "/repo/.gsd/worktrees/M001"; + const cached = "/repo/.sf/worktrees/M001"; const live = getBgShellLiveCwd( cached, (path) => path === "/repo", diff --git a/src/tests/bridge-package-root.test.ts b/src/tests/bridge-package-root.test.ts index 27a48c074..5d0f6dc71 100644 --- a/src/tests/bridge-package-root.test.ts +++ b/src/tests/bridge-package-root.test.ts @@ -8,7 +8,7 @@ import { join } from 'node:path' * standalone build. * * The Next.js standalone build bakes import.meta.url into compiled chunks as - * the CI runner's absolute Linux path (file:///home/runner/work/gsd-2/gsd-2/…). + * the CI runner's absolute Linux path (file:///home/runner/work/sf-2/sf-2/…). * On Windows, fileURLToPath() rejects this with "File URL path must be * absolute". The fix wraps the derivation in safePackageRootFromImportUrl() * so the module-level constant never throws, and resolveBridgeRuntimeConfig diff --git a/src/tests/bundled-extension-paths.test.ts b/src/tests/bundled-extension-paths.test.ts index 8bb21184a..abc425f8c 100644 --- a/src/tests/bundled-extension-paths.test.ts +++ b/src/tests/bundled-extension-paths.test.ts @@ -7,7 +7,7 @@ import { } from "../bundled-extension-paths.ts"; test("bundled extension paths use the platform delimiter by default", () => { - const paths = ["/tmp/gsd/a.ts", "/tmp/gsd/b.ts"]; + const paths = ["/tmp/sf/a.ts", "/tmp/sf/b.ts"]; const encoded = serializeBundledExtensionPaths(paths); assert.equal(encoded, paths.join(delimiter)); @@ -16,8 +16,8 @@ test("bundled extension paths use the platform delimiter by default", () => { test("bundled extension paths preserve Windows drive letters when semicolon-delimited", () => { const windowsPaths = [ - String.raw`C:\Users\dev\.gsd\agent\extensions\gsd\index.ts`, - String.raw`D:\work\gsd\extensions\browser-tools\index.ts`, + String.raw`C:\Users\dev\.sf\agent\extensions\sf\index.ts`, + String.raw`D:\work\sf\extensions\browser-tools\index.ts`, ]; const encoded = serializeBundledExtensionPaths(windowsPaths, ";"); diff --git a/src/tests/cli-onboarding-custom-provider.test.ts b/src/tests/cli-onboarding-custom-provider.test.ts index 3f7644d53..cac853e02 100644 --- a/src/tests/cli-onboarding-custom-provider.test.ts +++ b/src/tests/cli-onboarding-custom-provider.test.ts @@ -7,9 +7,9 @@ import { tmpdir } from "node:os"; import { SettingsManager } from "../../packages/pi-coding-agent/src/core/settings-manager.ts"; test("SettingsManager reads defaultProvider/defaultModel from the explicit agentDir used by CLI (#3860)", () => { - const root = mkdtempSync(join(tmpdir(), "gsd-cli-settings-")); + const root = mkdtempSync(join(tmpdir(), "sf-cli-settings-")); const cwd = join(root, "project"); - const agentDir = join(root, ".gsd", "agent"); + const agentDir = join(root, ".sf", "agent"); try { mkdirSync(cwd, { recursive: true }); diff --git a/src/tests/create-gsd-extension-paths.test.ts b/src/tests/create-gsd-extension-paths.test.ts index 88e5d6c0a..fb6a81b8a 100644 --- a/src/tests/create-gsd-extension-paths.test.ts +++ b/src/tests/create-gsd-extension-paths.test.ts @@ -1,11 +1,11 @@ /** - * Validates that the create-gsd-extension skill documentation uses the correct + * Validates that the create-sf-extension skill documentation uses the correct * community extension install path (~/.pi/agent/extensions/) instead of the - * bundled-only path (~/.gsd/agent/extensions/). + * bundled-only path (~/.sf/agent/extensions/). * * Bug: https://github.com/singularity-forge/sf-run/issues/3131 * - * ~/.gsd/agent/extensions/ is reserved for bundled extensions synced from + * ~/.sf/agent/extensions/ is reserved for bundled extensions synced from * the sf-run package. Community/user extensions must use ~/.pi/agent/extensions/. */ @@ -16,7 +16,7 @@ import { join, dirname } from "node:path"; import { fileURLToPath } from "node:url"; const __dirname = dirname(fileURLToPath(import.meta.url)); -const skillDir = join(__dirname, "..", "resources", "skills", "create-gsd-extension"); +const skillDir = join(__dirname, "..", "resources", "skills", "create-sf-extension"); function readSkillFile(relativePath: string): string { return readFileSync(join(skillDir, relativePath), "utf-8"); @@ -31,7 +31,7 @@ const docsToCheck: { file: string; label: string }[] = [ { file: "workflows/debug-extension.md", label: "debug-extension.md" }, ]; -test("create-gsd-extension docs use ~/.pi/agent/extensions/ for community extensions", async (t) => { +test("create-sf-extension docs use ~/.pi/agent/extensions/ for community extensions", async (t) => { for (const { file, label } of docsToCheck) { await t.test(`${label} references ~/.pi/agent/extensions/ for global extensions`, () => { const content = readSkillFile(file); @@ -45,19 +45,19 @@ test("create-gsd-extension docs use ~/.pi/agent/extensions/ for community extens } }); -test("create-gsd-extension docs do NOT direct users to install in ~/.gsd/agent/extensions/", async (t) => { +test("create-sf-extension docs do NOT direct users to install in ~/.sf/agent/extensions/", async (t) => { for (const { file, label } of docsToCheck) { - await t.test(`${label} does not tell users to place extensions in ~/.gsd/agent/extensions/`, () => { + await t.test(`${label} does not tell users to place extensions in ~/.sf/agent/extensions/`, () => { const content = readSkillFile(file); - // ~/.gsd/agent/extensions/ should only appear in context that clearly marks + // ~/.sf/agent/extensions/ should only appear in context that clearly marks // it as reserved/bundled, never as an install target for community extensions. // We check that it does NOT appear as a "Global extensions:" or "Global:" path directive. const lines = content.split("\n"); for (let i = 0; i < lines.length; i++) { const line = lines[i]; - if (line.includes("~/.gsd/agent/extensions/")) { - // If the line references ~/.gsd/agent/extensions/, it must be in a + if (line.includes("~/.sf/agent/extensions/")) { + // If the line references ~/.sf/agent/extensions/, it must be in a // context explaining it is reserved/bundled — not as an install instruction. const context = lines.slice(Math.max(0, i - 2), i + 3).join("\n"); const isBundledContext = @@ -66,7 +66,7 @@ test("create-gsd-extension docs do NOT direct users to install in ~/.gsd/agent/e context.toLowerCase().includes("synced"); assert.ok( isBundledContext, - `${label} line ${i + 1} references ~/.gsd/agent/extensions/ without ` + + `${label} line ${i + 1} references ~/.sf/agent/extensions/ without ` + `marking it as bundled/reserved. Context:\n${context}`, ); } diff --git a/src/tests/docker-template.test.ts b/src/tests/docker-template.test.ts index 1a8e96f54..8880a5513 100644 --- a/src/tests/docker-template.test.ts +++ b/src/tests/docker-template.test.ts @@ -40,10 +40,10 @@ test("docker/Dockerfile.sandbox installs git", () => { // ── docker-compose.yaml (minimal) ── -test("docker/docker-compose.yaml exists and defines gsd service", () => { +test("docker/docker-compose.yaml exists and defines sf service", () => { const content = readFile("docker/docker-compose.yaml"); assert.match(content, /services:/); - assert.match(content, /gsd:/); + assert.match(content, /sf:/); }); test("docker/docker-compose.yaml mounts workspace volume", () => { diff --git a/src/tests/ensure-workspace-builds.test.ts b/src/tests/ensure-workspace-builds.test.ts index 965d2348e..79ba6658b 100644 --- a/src/tests/ensure-workspace-builds.test.ts +++ b/src/tests/ensure-workspace-builds.test.ts @@ -11,7 +11,7 @@ const { newestSrcMtime, detectStalePackages } = require("../../scripts/ensure-wo describe("newestSrcMtime", () => { let tmp: string; - beforeEach(() => { tmp = mkdtempSync(join(tmpdir(), "gsd-mtime-test-")); }); + beforeEach(() => { tmp = mkdtempSync(join(tmpdir(), "sf-mtime-test-")); }); afterEach(() => { rmSync(tmp, { recursive: true, force: true }); }); it("returns 0 for a non-existent directory", () => { @@ -66,7 +66,7 @@ describe("newestSrcMtime", () => { describe("detectStalePackages", () => { let tmp: string; - beforeEach(() => { tmp = mkdtempSync(join(tmpdir(), "gsd-stale-test-")); }); + beforeEach(() => { tmp = mkdtempSync(join(tmpdir(), "sf-stale-test-")); }); afterEach(() => { rmSync(tmp, { recursive: true, force: true }); }); /** diff --git a/src/tests/extension-load-perf.test.ts b/src/tests/extension-load-perf.test.ts index 3256210f9..395a9b80c 100644 --- a/src/tests/extension-load-perf.test.ts +++ b/src/tests/extension-load-perf.test.ts @@ -32,7 +32,7 @@ test("loadExtensions shares module cache across extensions (perf regression #210 const { loadExtensions } = await import(loaderPath); // Create a temp directory with two extensions that import a shared helper - const tmp = mkdtempSync(join(tmpdir(), "gsd-perf-test-")); + const tmp = mkdtempSync(join(tmpdir(), "sf-perf-test-")); try { // Shared helper module diff --git a/src/tests/gsd-web-launcher-contract.test.ts b/src/tests/gsd-web-launcher-contract.test.ts index 5aae17b2b..0ec103301 100644 --- a/src/tests/gsd-web-launcher-contract.test.ts +++ b/src/tests/gsd-web-launcher-contract.test.ts @@ -8,8 +8,8 @@ const packageJson = JSON.parse(readFileSync(packageJsonPath, "utf-8")) as { scripts?: Record<string, string>; }; -test("gsd:web rebuilds bundled resources before launching the packaged web host", () => { - const script = packageJson.scripts?.["gsd:web"]; - assert.ok(script, "package.json must define a gsd:web script"); - assert.match(script, /npm run copy-resources/, "gsd:web must refresh dist/resources so packaged web hosts do not serve stale SF extensions"); +test("sf:web rebuilds bundled resources before launching the packaged web host", () => { + const script = packageJson.scripts?.["sf:web"]; + assert.ok(script, "package.json must define a sf:web script"); + assert.match(script, /npm run copy-resources/, "sf:web must refresh dist/resources so packaged web hosts do not serve stale SF extensions"); }); diff --git a/src/tests/headless-cli-surface.test.ts b/src/tests/headless-cli-surface.test.ts index 3771fb3bf..f60103e6c 100644 --- a/src/tests/headless-cli-surface.test.ts +++ b/src/tests/headless-cli-surface.test.ts @@ -120,39 +120,39 @@ function parseHeadlessArgs(argv: string[]): HeadlessOptions { // ─── --output-format flag parsing ────────────────────────────────────────── test('--output-format text sets outputFormat to text', () => { - const opts = parseHeadlessArgs(['node', 'gsd', 'headless', '--output-format', 'text', 'auto']) + const opts = parseHeadlessArgs(['node', 'sf', 'headless', '--output-format', 'text', 'auto']) assert.equal(opts.outputFormat, 'text') assert.equal(opts.json, false) }) test('--output-format json sets outputFormat to json and json=true', () => { - const opts = parseHeadlessArgs(['node', 'gsd', 'headless', '--output-format', 'json', 'auto']) + const opts = parseHeadlessArgs(['node', 'sf', 'headless', '--output-format', 'json', 'auto']) assert.equal(opts.outputFormat, 'json') assert.equal(opts.json, true) }) test('--output-format stream-json sets outputFormat to stream-json and json=true', () => { - const opts = parseHeadlessArgs(['node', 'gsd', 'headless', '--output-format', 'stream-json', 'auto']) + const opts = parseHeadlessArgs(['node', 'sf', 'headless', '--output-format', 'stream-json', 'auto']) assert.equal(opts.outputFormat, 'stream-json') assert.equal(opts.json, true) }) test('default output format is text', () => { - const opts = parseHeadlessArgs(['node', 'gsd', 'headless', 'auto']) + const opts = parseHeadlessArgs(['node', 'sf', 'headless', 'auto']) assert.equal(opts.outputFormat, 'text') assert.equal(opts.json, false) }) test('invalid --output-format value throws', () => { assert.throws( - () => parseHeadlessArgs(['node', 'gsd', 'headless', '--output-format', 'yaml', 'auto']), + () => parseHeadlessArgs(['node', 'sf', 'headless', '--output-format', 'yaml', 'auto']), /Invalid output format: yaml/, ) }) test('invalid --output-format value (empty) throws', () => { assert.throws( - () => parseHeadlessArgs(['node', 'gsd', 'headless', '--output-format', 'xml', 'auto']), + () => parseHeadlessArgs(['node', 'sf', 'headless', '--output-format', 'xml', 'auto']), /Invalid output format/, ) }) @@ -160,13 +160,13 @@ test('invalid --output-format value (empty) throws', () => { // ─── --json backward compatibility ───────────────────────────────────────── test('--json is alias for --output-format stream-json', () => { - const opts = parseHeadlessArgs(['node', 'gsd', 'headless', '--json', 'auto']) + const opts = parseHeadlessArgs(['node', 'sf', 'headless', '--json', 'auto']) assert.equal(opts.outputFormat, 'stream-json') assert.equal(opts.json, true) }) test('--json before --output-format json: last writer wins', () => { - const opts = parseHeadlessArgs(['node', 'gsd', 'headless', '--json', '--output-format', 'json', 'auto']) + const opts = parseHeadlessArgs(['node', 'sf', 'headless', '--json', '--output-format', 'json', 'auto']) assert.equal(opts.outputFormat, 'json') assert.equal(opts.json, true) }) @@ -174,13 +174,13 @@ test('--json before --output-format json: last writer wins', () => { // ─── --resume flag ───────────────────────────────────────────────────────── test('--resume parses session ID', () => { - const opts = parseHeadlessArgs(['node', 'gsd', 'headless', '--resume', 'abc-123', 'auto']) + const opts = parseHeadlessArgs(['node', 'sf', 'headless', '--resume', 'abc-123', 'auto']) assert.equal(opts.resumeSession, 'abc-123') assert.equal(opts.command, 'auto') }) test('no --resume means undefined', () => { - const opts = parseHeadlessArgs(['node', 'gsd', 'headless', 'auto']) + const opts = parseHeadlessArgs(['node', 'sf', 'headless', 'auto']) assert.equal(opts.resumeSession, undefined) }) @@ -292,7 +292,7 @@ test('VALID_OUTPUT_FORMATS contains exactly text, json, stream-json', () => { // ─── Regression: existing flags still parse correctly ────────────────────── test('--events still works with new outputFormat default', () => { - const opts = parseHeadlessArgs(['node', 'gsd', 'headless', '--events', 'agent_end,tool_execution_start', 'auto']) + const opts = parseHeadlessArgs(['node', 'sf', 'headless', '--events', 'agent_end,tool_execution_start', 'auto']) assert.ok(opts.eventFilter instanceof Set) assert.equal(opts.eventFilter!.size, 2) assert.equal(opts.json, true) @@ -300,30 +300,30 @@ test('--events still works with new outputFormat default', () => { }) test('--timeout still works', () => { - const opts = parseHeadlessArgs(['node', 'gsd', 'headless', '--timeout', '60000', 'auto']) + const opts = parseHeadlessArgs(['node', 'sf', 'headless', '--timeout', '60000', 'auto']) assert.equal(opts.timeout, 60000) }) test('--supervised still works and implies stream-json', () => { - const opts = parseHeadlessArgs(['node', 'gsd', 'headless', '--supervised', 'auto']) + const opts = parseHeadlessArgs(['node', 'sf', 'headless', '--supervised', 'auto']) assert.equal(opts.supervised, true) assert.equal(opts.json, true) assert.equal(opts.outputFormat, 'stream-json') }) test('--answers still works', () => { - const opts = parseHeadlessArgs(['node', 'gsd', 'headless', '--answers', 'answers.json', 'auto']) + const opts = parseHeadlessArgs(['node', 'sf', 'headless', '--answers', 'answers.json', 'auto']) assert.equal(opts.answers, 'answers.json') }) test('positional command parsing still works', () => { - const opts = parseHeadlessArgs(['node', 'gsd', 'headless', 'next']) + const opts = parseHeadlessArgs(['node', 'sf', 'headless', 'next']) assert.equal(opts.command, 'next') }) test('combined flags parse correctly', () => { const opts = parseHeadlessArgs([ - 'node', 'gsd', 'headless', + 'node', 'sf', 'headless', '--output-format', 'json', '--timeout', '120000', '--resume', 'sess-xyz', @@ -341,25 +341,25 @@ test('combined flags parse correctly', () => { // ─── --bare flag ─────────────────────────────────────────────────────────── test('--bare sets bare to true', () => { - const opts = parseHeadlessArgs(['node', 'gsd', 'headless', '--bare', 'auto']) + const opts = parseHeadlessArgs(['node', 'sf', 'headless', '--bare', 'auto']) assert.equal(opts.bare, true) assert.equal(opts.command, 'auto') }) test('no --bare means bare is undefined', () => { - const opts = parseHeadlessArgs(['node', 'gsd', 'headless', 'auto']) + const opts = parseHeadlessArgs(['node', 'sf', 'headless', 'auto']) assert.equal(opts.bare, undefined) }) test('--bare is a boolean flag (no value needed)', () => { - const opts = parseHeadlessArgs(['node', 'gsd', 'headless', '--bare', '--json', 'auto']) + const opts = parseHeadlessArgs(['node', 'sf', 'headless', '--bare', '--json', 'auto']) assert.equal(opts.bare, true) assert.equal(opts.json, true) }) test('--bare combined with --output-format json', () => { const opts = parseHeadlessArgs([ - 'node', 'gsd', 'headless', + 'node', 'sf', 'headless', '--bare', '--output-format', 'json', 'auto', @@ -374,7 +374,7 @@ test('--bare combined with --output-format json', () => { test('command before flags: new-milestone --context-text --auto --verbose', () => { const opts = parseHeadlessArgs([ - 'node', 'gsd', 'headless', + 'node', 'sf', 'headless', 'new-milestone', '--context-text', 'build something cool', '--auto', @@ -388,7 +388,7 @@ test('command before flags: new-milestone --context-text --auto --verbose', () = test('command before flags: next --json --timeout', () => { const opts = parseHeadlessArgs([ - 'node', 'gsd', 'headless', + 'node', 'sf', 'headless', 'next', '--json', '--timeout', '60000', @@ -400,7 +400,7 @@ test('command before flags: next --json --timeout', () => { test('command between flags: --auto new-milestone --verbose', () => { const opts = parseHeadlessArgs([ - 'node', 'gsd', 'headless', + 'node', 'sf', 'headless', '--auto', 'new-milestone', '--verbose', @@ -412,7 +412,7 @@ test('command between flags: --auto new-milestone --verbose', () => { test('--bare does not affect other flags', () => { const opts = parseHeadlessArgs([ - 'node', 'gsd', 'headless', + 'node', 'sf', 'headless', '--bare', '--timeout', '60000', '--resume', 'sess-abc', diff --git a/src/tests/headless-detection.test.ts b/src/tests/headless-detection.test.ts index 71bb82bac..2fa6a8d20 100644 --- a/src/tests/headless-detection.test.ts +++ b/src/tests/headless-detection.test.ts @@ -93,7 +93,7 @@ test("detects blocked notification with 'Blocked:' prefix", () => { }) test("detects inline 'Blocked:' message", () => { - assert.ok(isBlockedNotification(makeNotify("Blocked: no active milestone. Fix and run /gsd auto."))) + assert.ok(isBlockedNotification(makeNotify("Blocked: no active milestone. Fix and run /sf auto."))) }) test("does NOT match 'blocked' without colon (avoids false positives)", () => { diff --git a/src/tests/headless-events.test.ts b/src/tests/headless-events.test.ts index 9957c2350..578bc1544 100644 --- a/src/tests/headless-events.test.ts +++ b/src/tests/headless-events.test.ts @@ -87,7 +87,7 @@ function parseHeadlessArgs(argv: string[]): HeadlessOptions { // ─── parseHeadlessArgs: --events flag ────────────────────────────────────── test('--events parses comma-separated event types into a Set', () => { - const opts = parseHeadlessArgs(['node', 'gsd', 'headless', '--events', 'agent_end,extension_ui_request', 'auto']) + const opts = parseHeadlessArgs(['node', 'sf', 'headless', '--events', 'agent_end,extension_ui_request', 'auto']) assert.ok(opts.eventFilter instanceof Set) assert.equal(opts.eventFilter!.size, 2) assert.ok(opts.eventFilter!.has('agent_end')) @@ -95,29 +95,29 @@ test('--events parses comma-separated event types into a Set', () => { }) test('--events implies --json', () => { - const opts = parseHeadlessArgs(['node', 'gsd', 'headless', '--events', 'agent_end', 'auto']) + const opts = parseHeadlessArgs(['node', 'sf', 'headless', '--events', 'agent_end', 'auto']) assert.equal(opts.json, true) }) test('--events with single type', () => { - const opts = parseHeadlessArgs(['node', 'gsd', 'headless', '--events', 'agent_end', 'auto']) + const opts = parseHeadlessArgs(['node', 'sf', 'headless', '--events', 'agent_end', 'auto']) assert.equal(opts.eventFilter!.size, 1) assert.ok(opts.eventFilter!.has('agent_end')) }) test('no --events flag means no filter', () => { - const opts = parseHeadlessArgs(['node', 'gsd', 'headless', '--json', 'auto']) + const opts = parseHeadlessArgs(['node', 'sf', 'headless', '--json', 'auto']) assert.equal(opts.eventFilter, undefined) }) test('--events with all common types', () => { const types = 'agent_start,agent_end,tool_execution_start,tool_execution_end,extension_ui_request' - const opts = parseHeadlessArgs(['node', 'gsd', 'headless', '--events', types, 'auto']) + const opts = parseHeadlessArgs(['node', 'sf', 'headless', '--events', types, 'auto']) assert.equal(opts.eventFilter!.size, 5) }) test('--events combined with other flags', () => { - const opts = parseHeadlessArgs(['node', 'gsd', 'headless', '--timeout', '60000', '--events', 'agent_end', '--verbose', 'next']) + const opts = parseHeadlessArgs(['node', 'sf', 'headless', '--timeout', '60000', '--events', 'agent_end', '--verbose', 'next']) assert.equal(opts.timeout, 60000) assert.equal(opts.verbose, true) assert.equal(opts.command, 'next') diff --git a/src/tests/headless-progress.test.ts b/src/tests/headless-progress.test.ts index fd6763870..ab3f7ae35 100644 --- a/src/tests/headless-progress.test.ts +++ b/src/tests/headless-progress.test.ts @@ -232,7 +232,7 @@ describe('summarizeToolArgs', () => { assert.equal(summarizeToolArgs('ls', { path: 'src/utils' }), 'src/utils') }) - it('summarizes gsd tool with milestone/slice/task IDs', () => { + it('summarizes sf tool with milestone/slice/task IDs', () => { assert.equal(summarizeToolArgs('gsd_task_complete', { milestoneId: 'M001', sliceId: 'S01', taskId: 'T01', oneLiner: 'Built the thing', }), 'M001/S01/T01 Built the thing') diff --git a/src/tests/headless-query-extension-path.test.ts b/src/tests/headless-query-extension-path.test.ts index 499509187..54cf06c1c 100644 --- a/src/tests/headless-query-extension-path.test.ts +++ b/src/tests/headless-query-extension-path.test.ts @@ -13,14 +13,14 @@ const __dirname = dirname(fileURLToPath(import.meta.url)); test("headless-query resolves from agent extensions dir (#3471)", () => { const src = readFileSync(join(__dirname, "..", "headless-query.ts"), "utf-8"); assert.ok( - src.includes("agentExtensionsDir") || src.includes(".gsd/agent"), + src.includes("agentExtensionsDir") || src.includes(".sf/agent"), "headless-query must resolve from synced agent directory", ); }); test("cli.ts calls initResources before headless (#3471)", () => { const src = readFileSync(join(__dirname, "..", "cli.ts"), "utf-8"); - const headlessBlock = src.slice(src.indexOf("gsd headless")); + const headlessBlock = src.slice(src.indexOf("sf headless")); const initIdx = headlessBlock.indexOf("initResources"); const runIdx = headlessBlock.indexOf("runHeadless"); assert.ok(initIdx !== -1, "initResources must be called before headless"); diff --git a/src/tests/headless-v2-migration.test.ts b/src/tests/headless-v2-migration.test.ts index 1f233b710..e970fac23 100644 --- a/src/tests/headless-v2-migration.test.ts +++ b/src/tests/headless-v2-migration.test.ts @@ -391,7 +391,7 @@ test('v2 init success sets v2Enabled', async () => { const client = new MockRpcClient() let v2Enabled = false try { - await client.init({ clientId: 'gsd-headless' }) + await client.init({ clientId: 'sf-headless' }) v2Enabled = true } catch { // fall back to v1 @@ -406,7 +406,7 @@ test('v2 init failure falls back gracefully (v1 mode)', async () => { client.initShouldFail = true let v2Enabled = false try { - await client.init({ clientId: 'gsd-headless' }) + await client.init({ clientId: 'sf-headless' }) v2Enabled = true } catch { // fall back to v1 — this is expected diff --git a/src/tests/initial-gsd-header-filter.test.ts b/src/tests/initial-gsd-header-filter.test.ts index ed14057ea..4f84a09f0 100644 --- a/src/tests/initial-gsd-header-filter.test.ts +++ b/src/tests/initial-gsd-header-filter.test.ts @@ -1,7 +1,7 @@ import test from "node:test"; import assert from "node:assert/strict"; -const { filterInitialGsdHeader } = await import("../../web/lib/initial-gsd-header-filter.ts"); +const { filterInitialGsdHeader } = await import("../../web/lib/initial-sf-header-filter.ts"); const SF_LOGO_LINES = [ " ██████╗ ███████╗██████╗ ", diff --git a/src/tests/integration/e2e-headless.test.ts b/src/tests/integration/e2e-headless.test.ts index c78cc61b5..f540f3dff 100644 --- a/src/tests/integration/e2e-headless.test.ts +++ b/src/tests/integration/e2e-headless.test.ts @@ -1,7 +1,7 @@ /** - * E2E integration tests for `gsd headless` runtime behavior. + * E2E integration tests for `sf headless` runtime behavior. * - * Spawns real `gsd headless` child processes and asserts on + * Spawns real `sf headless` child processes and asserts on * stdout/stderr/exit-code for: JSON batch mode, SIGINT exit code, * stream-json NDJSON output, --resume error path, and invalid * --output-format handling. @@ -124,11 +124,11 @@ function stripAnsi(s: string): string { return s.replace(/\x1b\[[0-9;]*[A-Za-z]/g, ""); } -/** Bootstrap a temp directory with .gsd/ structure (milestones + runtime). */ +/** Bootstrap a temp directory with .sf/ structure (milestones + runtime). */ function createTempWithGsd(prefix: string): string { const dir = mkdtempSync(join(tmpdir(), prefix)); - mkdirSync(join(dir, ".gsd", "milestones"), { recursive: true }); - mkdirSync(join(dir, ".gsd", "runtime"), { recursive: true }); + mkdirSync(join(dir, ".sf", "milestones"), { recursive: true }); + mkdirSync(join(dir, ".sf", "runtime"), { recursive: true }); return dir; } @@ -158,7 +158,7 @@ function assertNoCrashMarkers(output: string): void { // =========================================================================== test("headless --output-format json emits a single HeadlessJsonResult on stdout", async (t) => { - const tmpDir = createTempWithGsd("gsd-e2e-json-batch-"); + const tmpDir = createTempWithGsd("sf-e2e-json-batch-"); t.after(() => { rmSync(tmpDir, { recursive: true, force: true }); }); // --max-restarts 0 prevents retry loops which would emit multiple JSON results. @@ -210,7 +210,7 @@ test("headless --output-format json emits a single HeadlessJsonResult on stdout" // =========================================================================== test("headless exits with code 11 after SIGINT", async (t) => { - const tmpDir = createTempWithGsd("gsd-e2e-sigint-"); + const tmpDir = createTempWithGsd("sf-e2e-sigint-"); t.after(() => { rmSync(tmpDir, { recursive: true, force: true }); }); // Spawn with long timeout and max-restarts 0 so the process stays alive @@ -259,7 +259,7 @@ test("headless exits with code 11 after SIGINT", async (t) => { ); } else { // Process exited before SIGINT arrived — acceptable in environments - // with running gsd sessions that cause auto-mode conflict. + // with running sf sessions that cause auto-mode conflict. // Verify it at least didn't crash. const combined = stripAnsi(result.stdout + result.stderr); assertNoCrashMarkers(combined); @@ -275,7 +275,7 @@ test("headless exits with code 11 after SIGINT", async (t) => { // =========================================================================== test("headless --output-format stream-json emits NDJSON on stdout", async (t) => { - const tmpDir = createTempWithGsd("gsd-e2e-stream-json-"); + const tmpDir = createTempWithGsd("sf-e2e-stream-json-"); t.after(() => { rmSync(tmpDir, { recursive: true, force: true }); }); // --max-restarts 0 to prevent retry loops that extend runtime. @@ -323,7 +323,7 @@ test("headless --output-format stream-json emits NDJSON on stdout", async (t) => // =========================================================================== test("headless --resume with nonexistent ID exits 1 with descriptive error", async (t) => { - const tmpDir = createTempWithGsd("gsd-e2e-resume-bad-"); + const tmpDir = createTempWithGsd("sf-e2e-resume-bad-"); t.after(() => { rmSync(tmpDir, { recursive: true, force: true }); }); const result = await runGsd( @@ -353,7 +353,7 @@ test("headless --resume with nonexistent ID exits 1 with descriptive error", asy // =========================================================================== test("headless --output-format with invalid value exits 1", async (t) => { - const tmpDir = createTempWithGsd("gsd-e2e-bad-format-"); + const tmpDir = createTempWithGsd("sf-e2e-bad-format-"); t.after(() => { rmSync(tmpDir, { recursive: true, force: true }); }); const result = await runGsd( diff --git a/src/tests/integration/e2e-smoke.test.ts b/src/tests/integration/e2e-smoke.test.ts index 4a2c5785d..0d71bae5a 100644 --- a/src/tests/integration/e2e-smoke.test.ts +++ b/src/tests/integration/e2e-smoke.test.ts @@ -98,10 +98,10 @@ function createTempGitRepo(prefix: string): string { } // --------------------------------------------------------------------------- -// 1. gsd --version outputs a semver string and exits 0 +// 1. sf --version outputs a semver string and exits 0 // --------------------------------------------------------------------------- -test("gsd --version outputs a semver version string and exits 0", async () => { +test("sf --version outputs a semver version string and exits 0", async () => { const result = await runGsd(["--version"]); assert.strictEqual(result.code, 0, `expected exit 0, got ${result.code}`); @@ -117,10 +117,10 @@ test("gsd --version outputs a semver version string and exits 0", async () => { }); // --------------------------------------------------------------------------- -// 2. gsd --help outputs usage information and exits 0 +// 2. sf --help outputs usage information and exits 0 // --------------------------------------------------------------------------- -test("gsd --help outputs usage information and exits 0", async () => { +test("sf --help outputs usage information and exits 0", async () => { const result = await runGsd(["--help"]); assert.strictEqual(result.code, 0, `expected exit 0, got ${result.code}`); @@ -151,10 +151,10 @@ test("gsd --help outputs usage information and exits 0", async () => { }); // --------------------------------------------------------------------------- -// 3. gsd config --help outputs config-specific or general help and exits 0 +// 3. sf config --help outputs config-specific or general help and exits 0 // --------------------------------------------------------------------------- -test("gsd config --help outputs help and exits 0", async () => { +test("sf config --help outputs help and exits 0", async () => { const result = await runGsd(["config", "--help"]); assert.strictEqual(result.code, 0, `expected exit 0, got ${result.code}`); @@ -171,10 +171,10 @@ test("gsd config --help outputs help and exits 0", async () => { }); // --------------------------------------------------------------------------- -// 4. gsd update --help outputs update-specific or general help and exits 0 +// 4. sf update --help outputs update-specific or general help and exits 0 // --------------------------------------------------------------------------- -test("gsd update --help outputs help and exits 0", async () => { +test("sf update --help outputs help and exits 0", async () => { const result = await runGsd(["update", "--help"]); assert.strictEqual(result.code, 0, `expected exit 0, got ${result.code}`); @@ -188,13 +188,13 @@ test("gsd update --help outputs help and exits 0", async () => { }); // --------------------------------------------------------------------------- -// 5. gsd --list-models runs without crashing +// 5. sf --list-models runs without crashing // --------------------------------------------------------------------------- -test("gsd --list-models runs without crashing", async () => { +test("sf --list-models runs without crashing", async () => { const result = await runGsd(["--list-models"]); - assert.ok(!result.timedOut, "gsd --list-models should exit within the timeout"); + assert.ok(!result.timedOut, "sf --list-models should exit within the timeout"); assert.strictEqual(result.code, 0, `expected exit 0, got ${result.code}`); // No unhandled crash markers @@ -218,17 +218,17 @@ test("gsd --list-models runs without crashing", async () => { }); // --------------------------------------------------------------------------- -// 6. gsd --print in text mode does not segfault or throw unhandled errors +// 6. sf --print in text mode does not segfault or throw unhandled errors // (may fail with "No model selected" when no API keys are configured) // --------------------------------------------------------------------------- -test("gsd --mode text --print does not segfault or throw unhandled errors", { skip: !process.env.ANTHROPIC_API_KEY && !process.env.OPENAI_API_KEY ? "no API key available — print mode requires a configured provider" : undefined }, async () => { +test("sf --mode text --print does not segfault or throw unhandled errors", { skip: !process.env.ANTHROPIC_API_KEY && !process.env.OPENAI_API_KEY ? "no API key available — print mode requires a configured provider" : undefined }, async () => { const result = await runGsd( ["--mode", "text", "--print", "echo hello"], 15_000, ); - assert.ok(!result.timedOut, "gsd --print should not hang indefinitely"); + assert.ok(!result.timedOut, "sf --print should not hang indefinitely"); const combinedOutput = stripAnsi(result.stdout + result.stderr); @@ -280,10 +280,10 @@ test("gsd --mode text --print does not segfault or throw unhandled errors", { sk // =========================================================================== // --------------------------------------------------------------------------- -// 7. gsd headless --help outputs headless-specific help and exits 0 +// 7. sf headless --help outputs headless-specific help and exits 0 // --------------------------------------------------------------------------- -test("gsd headless --help outputs help and exits 0", async () => { +test("sf headless --help outputs help and exits 0", async () => { const result = await runGsd(["headless", "--help"]); assert.strictEqual(result.code, 0, `expected exit 0, got ${result.code}`); @@ -303,10 +303,10 @@ test("gsd headless --help outputs help and exits 0", async () => { }); // --------------------------------------------------------------------------- -// 8. gsd sessions --help outputs sessions-specific help and exits 0 +// 8. sf sessions --help outputs sessions-specific help and exits 0 // --------------------------------------------------------------------------- -test("gsd sessions --help outputs sessions-specific help and exits 0", async () => { +test("sf sessions --help outputs sessions-specific help and exits 0", async () => { const result = await runGsd(["sessions", "--help"]); assert.strictEqual(result.code, 0, `expected exit 0, got ${result.code}`); @@ -324,10 +324,10 @@ test("gsd sessions --help outputs sessions-specific help and exits 0", async () // =========================================================================== // --------------------------------------------------------------------------- -// 9. gsd (no TTY) exits with clean error about requiring a terminal +// 9. sf (no TTY) exits with clean error about requiring a terminal // --------------------------------------------------------------------------- -test("gsd with no TTY exits 1 with clean terminal-required error", async () => { +test("sf with no TTY exits 1 with clean terminal-required error", async () => { // Running with piped stdin (non-TTY) and no subcommand/flags triggers // interactive mode which requires a TTY const result = await runGsd([], 15_000); @@ -348,10 +348,10 @@ test("gsd with no TTY exits 1 with clean terminal-required error", async () => { }); // --------------------------------------------------------------------------- -// 10. gsd with unknown flags does not crash +// 10. sf with unknown flags does not crash // --------------------------------------------------------------------------- -test("gsd with unknown flags does not crash", async () => { +test("sf with unknown flags does not crash", async () => { // Unknown flags are silently ignored by the arg parser. // With --help appended, we get a clean exit path to test. const result = await runGsd(["--some-unknown-flag", "--help"]); @@ -364,10 +364,10 @@ test("gsd with unknown flags does not crash", async () => { }); // --------------------------------------------------------------------------- -// 11. gsd -v is equivalent to --version +// 11. sf -v is equivalent to --version // --------------------------------------------------------------------------- -test("gsd -v is equivalent to --version", async () => { +test("sf -v is equivalent to --version", async () => { const result = await runGsd(["-v"]); assert.strictEqual(result.code, 0, `expected exit 0, got ${result.code}`); @@ -382,10 +382,10 @@ test("gsd -v is equivalent to --version", async () => { }); // --------------------------------------------------------------------------- -// 12. gsd -h is equivalent to --help +// 12. sf -h is equivalent to --help // --------------------------------------------------------------------------- -test("gsd -h is equivalent to --help", async () => { +test("sf -h is equivalent to --help", async () => { const result = await runGsd(["-h"]); assert.strictEqual(result.code, 0, `expected exit 0, got ${result.code}`); @@ -403,11 +403,11 @@ test("gsd -h is equivalent to --help", async () => { // =========================================================================== // --------------------------------------------------------------------------- -// 13. gsd headless without .gsd/ directory exits 1 with clean error +// 13. sf headless without .sf/ directory exits 1 with clean error // --------------------------------------------------------------------------- -test("gsd headless without .gsd/ directory exits 1 with clean error", async (t) => { - const tmpDir = mkdtempSync(join(tmpdir(), "gsd-e2e-no-gsd-")); +test("sf headless without .sf/ directory exits 1 with clean error", async (t) => { + const tmpDir = mkdtempSync(join(tmpdir(), "sf-e2e-no-sf-")); t.after(() => { rmSync(tmpDir, { recursive: true, force: true }); }); @@ -418,19 +418,19 @@ test("gsd headless without .gsd/ directory exits 1 with clean error", async (t) const combined = stripAnsi(result.stdout + result.stderr); assert.ok( - combined.includes(".gsd/") || combined.includes("No .gsd"), - `expected .gsd/ missing error, got:\n${combined.slice(0, 500)}`, + combined.includes(".sf/") || combined.includes("No .sf"), + `expected .sf/ missing error, got:\n${combined.slice(0, 500)}`, ); assertNoCrashMarkers(combined); }); // --------------------------------------------------------------------------- -// 14. gsd headless new-milestone without --context exits 1 +// 14. sf headless new-milestone without --context exits 1 // --------------------------------------------------------------------------- -test("gsd headless new-milestone without --context exits 1", async (t) => { - const tmpDir = mkdtempSync(join(tmpdir(), "gsd-e2e-no-ctx-")); +test("sf headless new-milestone without --context exits 1", async (t) => { + const tmpDir = mkdtempSync(join(tmpdir(), "sf-e2e-no-ctx-")); t.after(() => { rmSync(tmpDir, { recursive: true, force: true }); }); @@ -449,11 +449,11 @@ test("gsd headless new-milestone without --context exits 1", async (t) => { }); // --------------------------------------------------------------------------- -// 15. gsd headless --timeout with invalid value exits 1 +// 15. sf headless --timeout with invalid value exits 1 // --------------------------------------------------------------------------- -test("gsd headless --timeout with invalid value exits 1", async (t) => { - const tmpDir = mkdtempSync(join(tmpdir(), "gsd-e2e-bad-timeout-")); +test("sf headless --timeout with invalid value exits 1", async (t) => { + const tmpDir = mkdtempSync(join(tmpdir(), "sf-e2e-bad-timeout-")); t.after(() => { rmSync(tmpDir, { recursive: true, force: true }); }); @@ -477,11 +477,11 @@ test("gsd headless --timeout with invalid value exits 1", async (t) => { }); // --------------------------------------------------------------------------- -// 16. gsd headless --timeout with negative value exits 1 +// 16. sf headless --timeout with negative value exits 1 // --------------------------------------------------------------------------- -test("gsd headless --timeout with negative value exits 1", async (t) => { - const tmpDir = mkdtempSync(join(tmpdir(), "gsd-e2e-neg-timeout-")); +test("sf headless --timeout with negative value exits 1", async (t) => { + const tmpDir = mkdtempSync(join(tmpdir(), "sf-e2e-neg-timeout-")); t.after(() => { rmSync(tmpDir, { recursive: true, force: true }); }); @@ -504,12 +504,12 @@ test("gsd headless --timeout with negative value exits 1", async (t) => { assertNoCrashMarkers(combined); }); -test("gsd headless query returns JSON from the built CLI", async (t) => { - const tmpDir = createTempGitRepo("gsd-e2e-query-"); +test("sf headless query returns JSON from the built CLI", async (t) => { + const tmpDir = createTempGitRepo("sf-e2e-query-"); t.after(() => { rmSync(tmpDir, { recursive: true, force: true }); }); - mkdirSync(join(tmpDir, ".gsd", "milestones"), { recursive: true }); + mkdirSync(join(tmpDir, ".sf", "milestones"), { recursive: true }); // Cold packaged startup in a fresh temp repo is now regularly >10s because // the built CLI loads bundled TS resources through jiti before answering. @@ -526,8 +526,8 @@ test("gsd headless query returns JSON from the built CLI", async (t) => { assert.equal(typeof snapshot.state?.phase, "string", "query output should include state.phase"); }); -test("gsd worktree list loads the built worktree CLI without module errors", async (t) => { - const tmpDir = createTempGitRepo("gsd-e2e-worktree-"); +test("sf worktree list loads the built worktree CLI without module errors", async (t) => { + const tmpDir = createTempGitRepo("sf-e2e-worktree-"); t.after(() => { rmSync(tmpDir, { recursive: true, force: true }); }); @@ -554,7 +554,7 @@ test("gsd worktree list loads the built worktree CLI without module errors", asy // 17. --help output lists all subcommands // --------------------------------------------------------------------------- -test("gsd --help lists all documented subcommands", async () => { +test("sf --help lists all documented subcommands", async () => { const result = await runGsd(["--help"]); assert.strictEqual(result.code, 0, `expected exit 0, got ${result.code}`); @@ -573,7 +573,7 @@ test("gsd --help lists all documented subcommands", async () => { // 18. --help output lists all key flags // --------------------------------------------------------------------------- -test("gsd --help lists all key flags", async () => { +test("sf --help lists all key flags", async () => { const result = await runGsd(["--help"]); assert.strictEqual(result.code, 0, `expected exit 0, got ${result.code}`); @@ -604,10 +604,10 @@ test("gsd --help lists all key flags", async () => { // =========================================================================== // --------------------------------------------------------------------------- -// 19. gsd --version followed by other flags still just prints version +// 19. sf --version followed by other flags still just prints version // --------------------------------------------------------------------------- -test("gsd --version ignores trailing arguments", async () => { +test("sf --version ignores trailing arguments", async () => { const result = await runGsd(["--version", "--help", "--list-models"]); assert.strictEqual(result.code, 0, `expected exit 0, got ${result.code}`); @@ -623,12 +623,12 @@ test("gsd --version ignores trailing arguments", async () => { }); // --------------------------------------------------------------------------- -// 20. gsd headless help (positional, not flag) exits 0 +// 20. sf headless help (positional, not flag) exits 0 // --------------------------------------------------------------------------- -test("gsd headless help (positional) exits cleanly", async () => { +test("sf headless help (positional) exits cleanly", async () => { // "help" as a positional is treated as a quick command by headless mode. - // Without .gsd/ it should fail, but with --help flag it should succeed. + // Without .sf/ it should fail, but with --help flag it should succeed. const result = await runGsd(["headless", "--help"]); assert.strictEqual(result.code, 0, `expected exit 0, got ${result.code}`); diff --git a/src/tests/integration/pack-install.test.ts b/src/tests/integration/pack-install.test.ts index dc5f0ba32..cea79fd31 100644 --- a/src/tests/integration/pack-install.test.ts +++ b/src/tests/integration/pack-install.test.ts @@ -115,7 +115,7 @@ function listTarEntries(tarballPath: string): Promise<string[]> { // ═══════════════════════════════════════════════════════════════════════════ test("npm pack produces tarball with required files", async (t) => { - const sandbox = createNpmSandbox("gsd-pack-test-"); + const sandbox = createNpmSandbox("sf-pack-test-"); const tarballPath = packTarball(sandbox); assert.ok(existsSync(tarballPath), "tarball created"); @@ -134,22 +134,22 @@ test("npm pack produces tarball with required files", async (t) => { assert.ok(files.some(f => f.includes("dist/wizard.js")), "tarball contains dist/wizard.js"); assert.ok(files.some(f => f.includes("dist/resource-loader.js")), "tarball contains dist/resource-loader.js"); assert.ok(files.some(f => f.includes("pkg/package.json")), "tarball contains pkg/package.json"); - assert.ok(files.some(f => f.includes("src/resources/extensions/sf/index.ts")), "tarball contains bundled gsd extension"); + assert.ok(files.some(f => f.includes("src/resources/extensions/sf/index.ts")), "tarball contains bundled sf extension"); assert.ok(files.some(f => f.includes("scripts/postinstall.js")), "tarball contains postinstall script"); // pkg/package.json must have piConfig const pkgJson = readFileSync(join(projectRoot, "pkg", "package.json"), "utf-8"); const pkg = JSON.parse(pkgJson); - assert.equal(pkg.piConfig?.name, "gsd", "pkg/package.json piConfig.name is gsd"); - assert.equal(pkg.piConfig?.configDir, ".gsd", "pkg/package.json piConfig.configDir is .gsd"); + assert.equal(pkg.piConfig?.name, "sf", "pkg/package.json piConfig.name is sf"); + assert.equal(pkg.piConfig?.configDir, ".sf", "pkg/package.json piConfig.configDir is .sf"); }); // ═══════════════════════════════════════════════════════════════════════════ -// 2. npm pack → install → gsd binary resolves +// 2. npm pack → install → sf binary resolves // ═══════════════════════════════════════════════════════════════════════════ -test("tarball installs and gsd binary resolves", async (t) => { - const sandbox = createNpmSandbox("gsd-install-test-"); +test("tarball installs and sf binary resolves", async (t) => { + const sandbox = createNpmSandbox("sf-install-test-"); const tarballPath = packTarball(sandbox); t.after(() => { @@ -160,10 +160,10 @@ test("tarball installs and gsd binary resolves", async (t) => { // Install from tarball into a temp prefix runNpmQuiet(["install", "--prefix", sandbox.installPrefix, tarballPath, "--no-save"], sandbox); - // Verify the gsd bin exists in the installed package - const binName = process.platform === "win32" ? "gsd.cmd" : "gsd"; + // Verify the sf bin exists in the installed package + const binName = process.platform === "win32" ? "sf.cmd" : "sf"; const installedBin = join(sandbox.installPrefix, "node_modules", ".bin", binName); - assert.ok(existsSync(installedBin), `gsd binary exists in node_modules/.bin/ (${binName})`); + assert.ok(existsSync(installedBin), `sf binary exists in node_modules/.bin/ (${binName})`); // Verify loader.js is executable (has shebang) const installedLoader = join(sandbox.installPrefix, "node_modules", "sf-run", "dist", "loader.js"); @@ -180,18 +180,18 @@ test("tarball installs and gsd binary resolves", async (t) => { "src", "resources", "extensions", - "gsd", + "sf", "index.ts", ); - assert.ok(existsSync(installedGsdExt), "bundled gsd extension present in installed package"); + assert.ok(existsSync(installedGsdExt), "bundled sf extension present in installed package"); }); // ═══════════════════════════════════════════════════════════════════════════ // 3. Launch → extensions load → no errors on stderr // ═══════════════════════════════════════════════════════════════════════════ -test("gsd launches and loads extensions without errors", async () => { - // Launch gsd with all optional keys set (skip wizard) and capture stderr. +test("sf launches and loads extensions without errors", async () => { + // Launch sf with all optional keys set (skip wizard) and capture stderr. // Kill after 5 seconds — we just need to see if extensions load. // Assumes build already done. const output = await new Promise<string>((resolve) => { @@ -229,7 +229,7 @@ test("gsd launches and loads extensions without errors", async () => { // No extension load errors assert.ok( - !output.includes("[gsd] Extension load error"), + !output.includes("[sf] Extension load error"), `no extension load errors on stderr (got: ${output.slice(0, 500)})`, ); @@ -244,9 +244,9 @@ test("gsd launches and loads extensions without errors", async () => { ); }); -test("gsd exits early with a clear message when synced resources are newer than the binary", async (t) => { - const fakeHome = mkdtempSync(join(tmpdir(), "gsd-version-skew-")); - const fakeAgentDir = join(fakeHome, ".gsd", "agent"); +test("sf exits early with a clear message when synced resources are newer than the binary", async (t) => { + const fakeHome = mkdtempSync(join(tmpdir(), "sf-version-skew-")); + const fakeAgentDir = join(fakeHome, ".sf", "agent"); mkdirSync(fakeAgentDir, { recursive: true }); writeFileSync( join(fakeAgentDir, "managed-resources.json"), @@ -283,6 +283,6 @@ test("gsd exits early with a clear message when synced resources are newer than assert.equal(result.code, 1, "startup exits with code 1 on version skew"); assert.match(result.stderr, /Version mismatch detected/, "prints a friendly skew header"); - assert.match(result.stderr, /npm install -g sf-run@latest|gsd update/, "prints upgrade guidance"); - assert.doesNotMatch(result.stderr, /\[gsd\] Extension load error/, "fails before extension loading"); + assert.match(result.stderr, /npm install -g sf-run@latest|sf update/, "prints upgrade guidance"); + assert.doesNotMatch(result.stderr, /\[sf\] Extension load error/, "fails before extension loading"); }); diff --git a/src/tests/integration/web-auth-token.test.ts b/src/tests/integration/web-auth-token.test.ts index 20e835620..6091da446 100644 --- a/src/tests/integration/web-auth-token.test.ts +++ b/src/tests/integration/web-auth-token.test.ts @@ -47,7 +47,7 @@ test('auth.ts wraps localStorage calls in try/catch for private browsing', () => // ─── sendBeacon auth token tests ──────────────────────────────────────────── -const appShellSource = readFileSync(join(projectRoot, 'web', 'components', 'gsd', 'app-shell.tsx'), 'utf-8') +const appShellSource = readFileSync(join(projectRoot, 'web', 'components', 'sf', 'app-shell.tsx'), 'utf-8') test('app-shell.tsx sendBeacon includes auth token as query parameter', () => { // sendBeacon cannot set custom headers, so the token must be passed diff --git a/src/tests/integration/web-auto-dashboard-lock-reconciliation.test.ts b/src/tests/integration/web-auto-dashboard-lock-reconciliation.test.ts index 10f9d641f..d421b622b 100644 --- a/src/tests/integration/web-auto-dashboard-lock-reconciliation.test.ts +++ b/src/tests/integration/web-auto-dashboard-lock-reconciliation.test.ts @@ -26,7 +26,7 @@ import { const repoRoot = join(import.meta.dirname, "..", "..", ".."); function makeTempFixture(): { projectCwd: string; cleanup: () => void } { - const root = mkdtempSync(join(tmpdir(), "gsd-auto-lock-test-")); + const root = mkdtempSync(join(tmpdir(), "sf-auto-lock-test-")); const projectCwd = join(root, "project"); mkdirSync(projectCwd, { recursive: true }); return { @@ -47,13 +47,13 @@ function writeAutoModule(dir: string, payload: Record<string, unknown>): string } function writeSessionLock(projectCwd: string, data: Record<string, unknown>): void { - const gsdDir = join(projectCwd, ".gsd"); + const gsdDir = join(projectCwd, ".sf"); mkdirSync(gsdDir, { recursive: true }); writeFileSync(join(gsdDir, "auto.lock"), JSON.stringify(data)); } function writePausedSession(projectCwd: string, data: Record<string, unknown>): void { - const runtimeDir = join(projectCwd, ".gsd", "runtime"); + const runtimeDir = join(projectCwd, ".sf", "runtime"); mkdirSync(runtimeDir, { recursive: true }); writeFileSync(join(runtimeDir, "paused-session.json"), JSON.stringify(data)); } diff --git a/src/tests/integration/web-boot-node24.test.ts b/src/tests/integration/web-boot-node24.test.ts index 631b41d02..a828d0a16 100644 --- a/src/tests/integration/web-boot-node24.test.ts +++ b/src/tests/integration/web-boot-node24.test.ts @@ -11,13 +11,13 @@ const [nodeMajor, nodeMinor] = process.versions.node.split(".").map(Number) const isNode22_7OrNewer = nodeMajor > 22 || (nodeMajor === 22 && nodeMinor >= 7) test("resolveTypeStrippingFlag returns --experimental-strip-types for paths outside node_modules", () => { - const flag = resolveTypeStrippingFlag("/home/user/projects/gsd") + const flag = resolveTypeStrippingFlag("/home/user/projects/sf") assert.equal(flag, "--experimental-strip-types") }) test("resolveTypeStrippingFlag returns --experimental-strip-types for path with node_modules substring not as directory", () => { - // e.g. /home/user/my_node_modules_backup/gsd — not actually under node_modules/ - const flag = resolveTypeStrippingFlag("/home/user/my_node_modules_backup/gsd") + // e.g. /home/user/my_node_modules_backup/sf — not actually under node_modules/ + const flag = resolveTypeStrippingFlag("/home/user/my_node_modules_backup/sf") assert.equal(flag, "--experimental-strip-types") }) diff --git a/src/tests/integration/web-bridge-contract.test.ts b/src/tests/integration/web-bridge-contract.test.ts index abe3209ea..5dec0a2fd 100644 --- a/src/tests/integration/web-bridge-contract.test.ts +++ b/src/tests/integration/web-bridge-contract.test.ts @@ -53,10 +53,10 @@ function attachJsonLineReader(stream: PassThrough, onLine: (line: string) => voi } function makeWorkspaceFixture(): { projectCwd: string; sessionsDir: string; cleanup: () => void } { - const root = mkdtempSync(join(tmpdir(), "gsd-web-bridge-")); + const root = mkdtempSync(join(tmpdir(), "sf-web-bridge-")); const projectCwd = join(root, "project"); const sessionsDir = join(root, "sessions"); - const milestoneDir = join(projectCwd, ".gsd", "milestones", "M001"); + const milestoneDir = join(projectCwd, ".sf", "milestones", "M001"); const sliceDir = join(milestoneDir, "slices", "S01"); const tasksDir = join(sliceDir, "tasks"); @@ -141,20 +141,20 @@ function fakeWorkspaceIndex() { { id: "M001", title: "Demo Milestone", - roadmapPath: ".gsd/milestones/M001/M001-ROADMAP.md", + roadmapPath: ".sf/milestones/M001/M001-ROADMAP.md", slices: [ { id: "S01", title: "Demo Slice", done: false, - planPath: ".gsd/milestones/M001/slices/S01/S01-PLAN.md", - tasksDir: ".gsd/milestones/M001/slices/S01/tasks", + planPath: ".sf/milestones/M001/slices/S01/S01-PLAN.md", + tasksDir: ".sf/milestones/M001/slices/S01/tasks", tasks: [ { id: "T01", title: "Wire boot", done: false, - planPath: ".gsd/milestones/M001/slices/S01/tasks/T01-PLAN.md", + planPath: ".sf/milestones/M001/slices/S01/tasks/T01-PLAN.md", }, ], }, diff --git a/src/tests/integration/web-bridge-terminal-contract.test.ts b/src/tests/integration/web-bridge-terminal-contract.test.ts index 51890a645..009cd7923 100644 --- a/src/tests/integration/web-bridge-terminal-contract.test.ts +++ b/src/tests/integration/web-bridge-terminal-contract.test.ts @@ -102,7 +102,7 @@ async function readSseEvents(response: Response, count: number): Promise<any[]> } function makeWorkspaceFixture(): { projectCwd: string; sessionsDir: string; cleanup: () => void } { - const root = mkdtempSync(join(tmpdir(), "gsd-web-bridge-terminal-")); + const root = mkdtempSync(join(tmpdir(), "sf-web-bridge-terminal-")); const projectCwd = join(root, "project"); const sessionsDir = join(root, "sessions"); mkdirSync(projectCwd, { recursive: true }); diff --git a/src/tests/integration/web-cli-entry.test.ts b/src/tests/integration/web-cli-entry.test.ts index a8f5acc6c..c2c7d2be5 100644 --- a/src/tests/integration/web-cli-entry.test.ts +++ b/src/tests/integration/web-cli-entry.test.ts @@ -8,7 +8,7 @@ import { pathToFileURL } from "node:url"; const { resolveGsdCliEntry } = await import("../../web/cli-entry.ts"); function makeFixture(paths: string[]): string { - const root = mkdtempSync(join(tmpdir(), "gsd-cli-entry-")); + const root = mkdtempSync(join(tmpdir(), "sf-cli-entry-")); for (const relativePath of paths) { const fullPath = join(root, relativePath); mkdirSync(join(fullPath, ".."), { recursive: true }); @@ -62,7 +62,7 @@ test("resolveGsdCliEntry prefers the source loader for source-dev interactive se command: "/custom/node", args: [ "--import", - pathToFileURL(join(packageRoot, "src", "resources", "extensions", "gsd", "tests", "resolve-ts.mjs")).href, + pathToFileURL(join(packageRoot, "src", "resources", "extensions", "sf", "tests", "resolve-ts.mjs")).href, "--experimental-strip-types", join(packageRoot, "src", "loader.ts"), ], @@ -81,7 +81,7 @@ test("resolveGsdCliEntry appends rpc arguments for bridge sessions", (t) => { execPath: "/custom/node", hostKind: "packaged-standalone", mode: "rpc", - sessionDir: "/tmp/.gsd/sessions/project-c", + sessionDir: "/tmp/.sf/sessions/project-c", }); assert.deepEqual(entry, { @@ -92,7 +92,7 @@ test("resolveGsdCliEntry appends rpc arguments for bridge sessions", (t) => { "rpc", "--continue", "--session-dir", - "/tmp/.gsd/sessions/project-c", + "/tmp/.sf/sessions/project-c", ], cwd: "/tmp/project-c", }); diff --git a/src/tests/integration/web-command-parity-contract.test.ts b/src/tests/integration/web-command-parity-contract.test.ts index 9c199edf0..915a4eed1 100644 --- a/src/tests/integration/web-command-parity-contract.test.ts +++ b/src/tests/integration/web-command-parity-contract.test.ts @@ -144,33 +144,33 @@ test("registered SF command roots stay on the prompt/extension path", async () = const registeredRoots = await collectRegisteredGsdCommandRoots() assert.deepEqual( registeredRoots, - ["exit", "gsd", "kill", "worktree", "wt"], + ["exit", "sf", "kill", "worktree", "wt"], "browser parity contract only expects the current SF command roots", ) - // Non-gsd roots are extension commands that pass through to the bridge. + // Non-sf roots are extension commands that pass through to the bridge. // Derived dynamically so adding a new registration fails this assertion loudly. - const nonGsdRoots = registeredRoots.filter((r) => r !== "gsd") - assert.equal(nonGsdRoots.length, 4, "expected exactly 4 non-gsd passthrough roots; update this count when adding registrations") + const nonGsdRoots = registeredRoots.filter((r) => r !== "sf") + assert.equal(nonGsdRoots.length, 4, "expected exactly 4 non-sf passthrough roots; update this count when adding registrations") for (const root of nonGsdRoots) { assertPromptPassthrough(`/${root}`) } - // Bare /gsd passes through to bridge (equivalent to /gsd next) - const bareGsd = dispatchBrowserSlashCommand("/gsd") - assert.equal(bareGsd.kind, "prompt", "bare /gsd should pass through to bridge") - assert.equal(bareGsd.command.message, "/gsd", "bare /gsd should preserve exact input") + // Bare /sf passes through to bridge (equivalent to /sf next) + const bareGsd = dispatchBrowserSlashCommand("/sf") + assert.equal(bareGsd.kind, "prompt", "bare /sf should pass through to bridge") + assert.equal(bareGsd.command.message, "/sf", "bare /sf should preserve exact input") }) test("current SF command family samples dispatch to correct outcomes after S02", async (t) => { - await t.test("/gsd (bare) still passes through to bridge", () => { - assertPromptPassthrough("/gsd") + await t.test("/sf (bare) still passes through to bridge", () => { + assertPromptPassthrough("/sf") }) - await t.test("/gsd status now dispatches to surface", () => { - const outcome = dispatchBrowserSlashCommand("/gsd status") - assert.equal(outcome.kind, "surface", "/gsd status should dispatch to surface after T01") - assert.equal(outcome.surface, "gsd-status") + await t.test("/sf status now dispatches to surface", () => { + const outcome = dispatchBrowserSlashCommand("/sf status") + assert.equal(outcome.kind, "surface", "/sf status should dispatch to surface after T01") + assert.equal(outcome.surface, "sf-status") }) await t.test("/worktree list, /wt list, /kill, /exit still pass through", () => { @@ -180,14 +180,14 @@ test("current SF command family samples dispatch to correct outcomes after S02", assertPromptPassthrough("/exit") }) - await t.test("/gsd status dispatches to surface regardless of streaming state", () => { - const streaming = dispatchBrowserSlashCommand("/gsd status", { isStreaming: true }) - assert.equal(streaming.kind, "surface", "/gsd status should be surface even when streaming") - assert.equal(streaming.surface, "gsd-status") + await t.test("/sf status dispatches to surface regardless of streaming state", () => { + const streaming = dispatchBrowserSlashCommand("/sf status", { isStreaming: true }) + assert.equal(streaming.kind, "surface", "/sf status should be surface even when streaming") + assert.equal(streaming.surface, "sf-status") - const idle = dispatchBrowserSlashCommand("/gsd status", { isStreaming: false }) + const idle = dispatchBrowserSlashCommand("/sf status", { isStreaming: false }) assert.equal(idle.kind, "surface") - assert.equal(idle.surface, "gsd-status") + assert.equal(idle.surface, "sf-status") }) }) @@ -227,7 +227,7 @@ const EXPECTED_GSD_OUTCOMES = new Map<string, "surface" | "prompt" | "local" | " ["help", "local"], ]) -test("every registered /gsd subcommand has an explicit browser dispatch outcome", async (t) => { +test("every registered /sf subcommand has an explicit browser dispatch outcome", async (t) => { assert.equal( EXPECTED_GSD_OUTCOMES.size, 30, @@ -235,93 +235,93 @@ test("every registered /gsd subcommand has an explicit browser dispatch outcome" ) for (const [subcommand, expectedKind] of EXPECTED_GSD_OUTCOMES) { - await t.test(`/gsd ${subcommand} -> ${expectedKind}`, () => { - const outcome = dispatchBrowserSlashCommand(`/gsd ${subcommand}`) + await t.test(`/sf ${subcommand} -> ${expectedKind}`, () => { + const outcome = dispatchBrowserSlashCommand(`/sf ${subcommand}`) assert.equal( outcome.kind, expectedKind, - `/gsd ${subcommand} should dispatch to ${expectedKind}, got ${outcome.kind}`, + `/sf ${subcommand} should dispatch to ${expectedKind}, got ${outcome.kind}`, ) }) if (expectedKind === "surface") { - await t.test(`/gsd ${subcommand} opens gsd-${subcommand} surface`, () => { - const outcome = dispatchBrowserSlashCommand(`/gsd ${subcommand}`) as any - assert.equal(outcome.surface, `gsd-${subcommand}`, `/gsd ${subcommand} should open the gsd-${subcommand} surface`) + await t.test(`/sf ${subcommand} opens sf-${subcommand} surface`, () => { + const outcome = dispatchBrowserSlashCommand(`/sf ${subcommand}`) as any + assert.equal(outcome.surface, `sf-${subcommand}`, `/sf ${subcommand} should open the sf-${subcommand} surface`) }) } if (expectedKind === "prompt") { - await t.test(`/gsd ${subcommand} preserves exact input text`, () => { - const outcome = dispatchBrowserSlashCommand(`/gsd ${subcommand}`) as any - assert.equal(outcome.command.message, `/gsd ${subcommand}`, `/gsd ${subcommand} should preserve exact input text for bridge delivery`) + await t.test(`/sf ${subcommand} preserves exact input text`, () => { + const outcome = dispatchBrowserSlashCommand(`/sf ${subcommand}`) as any + assert.equal(outcome.command.message, `/sf ${subcommand}`, `/sf ${subcommand} should preserve exact input text for bridge delivery`) }) } if (expectedKind === "local") { - await t.test(`/gsd ${subcommand} dispatches to gsd_help action`, () => { - const outcome = dispatchBrowserSlashCommand(`/gsd ${subcommand}`) as any - assert.equal(outcome.action, "gsd_help", `/gsd ${subcommand} should dispatch to gsd_help action`) + await t.test(`/sf ${subcommand} dispatches to gsd_help action`, () => { + const outcome = dispatchBrowserSlashCommand(`/sf ${subcommand}`) as any + assert.equal(outcome.action, "gsd_help", `/sf ${subcommand} should dispatch to gsd_help action`) }) } if (expectedKind === "view-navigate") { - await t.test(`/gsd ${subcommand} navigates to the ${subcommand} view`, () => { - const outcome = dispatchBrowserSlashCommand(`/gsd ${subcommand}`) as any - assert.equal(outcome.view, subcommand, `/gsd ${subcommand} should navigate to the ${subcommand} view`) + await t.test(`/sf ${subcommand} navigates to the ${subcommand} view`, () => { + const outcome = dispatchBrowserSlashCommand(`/sf ${subcommand}`) as any + assert.equal(outcome.view, subcommand, `/sf ${subcommand} should navigate to the ${subcommand} view`) }) } } }) test("SF dispatch edge cases", async (t) => { - await t.test("/gsd (bare, no subcommand) passes through to bridge", () => { - const outcome = dispatchBrowserSlashCommand("/gsd") + await t.test("/sf (bare, no subcommand) passes through to bridge", () => { + const outcome = dispatchBrowserSlashCommand("/sf") assert.equal(outcome.kind, "prompt") - assert.equal(outcome.command.message, "/gsd") + assert.equal(outcome.command.message, "/sf") }) - await t.test("/gsd help dispatches to local gsd_help action", () => { - const outcome = dispatchBrowserSlashCommand("/gsd help") + await t.test("/sf help dispatches to local gsd_help action", () => { + const outcome = dispatchBrowserSlashCommand("/sf help") assert.equal(outcome.kind, "local") assert.equal(outcome.action, "gsd_help") }) - await t.test("/gsd unknown-xyz passes through to bridge", () => { - const outcome = dispatchBrowserSlashCommand("/gsd unknown-xyz") + await t.test("/sf unknown-xyz passes through to bridge", () => { + const outcome = dispatchBrowserSlashCommand("/sf unknown-xyz") assert.equal(outcome.kind, "prompt", "unknown subcommand should pass through to bridge") - assert.equal(outcome.command.message, "/gsd unknown-xyz", "unknown subcommand should preserve exact input") - assert.equal(outcome.slashCommandName, "gsd", "unknown subcommand should identify as gsd command") + assert.equal(outcome.command.message, "/sf unknown-xyz", "unknown subcommand should preserve exact input") + assert.equal(outcome.slashCommandName, "sf", "unknown subcommand should identify as sf command") }) - await t.test("/export is built-in session export, not gsd-export", () => { + await t.test("/export is built-in session export, not sf-export", () => { const outcome = dispatchBrowserSlashCommand("/export") assert.equal(outcome.kind, "surface") assert.equal(outcome.surface, "export", "/export should be the built-in session export surface") }) - await t.test("/gsd export is SF milestone export, distinct from built-in /export", () => { - const outcome = dispatchBrowserSlashCommand("/gsd export") + await t.test("/sf export is SF milestone export, distinct from built-in /export", () => { + const outcome = dispatchBrowserSlashCommand("/sf export") assert.equal(outcome.kind, "surface") - assert.equal(outcome.surface, "gsd-export", "/gsd export should be the SF milestone export surface") + assert.equal(outcome.surface, "sf-export", "/sf export should be the SF milestone export surface") }) - await t.test("/gsd forensics detailed preserves sub-args", () => { - const outcome = dispatchBrowserSlashCommand("/gsd forensics detailed") + await t.test("/sf forensics detailed preserves sub-args", () => { + const outcome = dispatchBrowserSlashCommand("/sf forensics detailed") assert.equal(outcome.kind, "surface") - assert.equal(outcome.surface, "gsd-forensics") + assert.equal(outcome.surface, "sf-forensics") assert.equal(outcome.args, "detailed", "sub-args after subcommand should be preserved") }) await t.test("SF surface commands produce system terminal notice", () => { - const outcome = dispatchBrowserSlashCommand("/gsd status") + const outcome = dispatchBrowserSlashCommand("/sf status") const notice = getBrowserSlashCommandTerminalNotice(outcome) assert.ok(notice, "surface outcome should produce a terminal notice") assert.equal(notice.type, "system") }) await t.test("SF passthrough commands produce no terminal notice", () => { - const outcome = dispatchBrowserSlashCommand("/gsd auto") + const outcome = dispatchBrowserSlashCommand("/sf auto") const notice = getBrowserSlashCommandTerminalNotice(outcome) assert.equal(notice, null, "passthrough outcome should produce no terminal notice") }) @@ -333,25 +333,25 @@ test("every SF surface dispatches through the contract wiring end-to-end", async assert.equal(gsdSurfaces.length, 19, "should have exactly 19 SF surface subcommands") for (const [subcommand] of gsdSurfaces) { - await t.test(`/gsd ${subcommand} -> dispatch -> open request -> surface state`, () => { - const outcome = dispatchBrowserSlashCommand(`/gsd ${subcommand}`) + await t.test(`/sf ${subcommand} -> dispatch -> open request -> surface state`, () => { + const outcome = dispatchBrowserSlashCommand(`/sf ${subcommand}`) assert.equal(outcome.kind, "surface") const openRequest = surfaceOutcomeToOpenRequest(outcome, {}) const state = openCommandSurfaceState(createInitialCommandSurfaceState(), openRequest) - assert.equal(state.open, true, `surface state should be open for gsd-${subcommand}`) - assert.ok(state.section, `surface state should have a non-null section for gsd-${subcommand}`) - assert.equal(state.section, `gsd-${subcommand}`, `section should match gsd-${subcommand}`) - assert.ok(state.selectedTarget, `surface state should have a non-null selectedTarget for gsd-${subcommand}`) - assert.equal(state.selectedTarget.kind, "gsd", `target kind should be "gsd" for gsd-${subcommand}`) + assert.equal(state.open, true, `surface state should be open for sf-${subcommand}`) + assert.ok(state.section, `surface state should have a non-null section for sf-${subcommand}`) + assert.equal(state.section, `sf-${subcommand}`, `section should match sf-${subcommand}`) + assert.ok(state.selectedTarget, `surface state should have a non-null selectedTarget for sf-${subcommand}`) + assert.equal(state.selectedTarget.kind, "sf", `target kind should be "sf" for sf-${subcommand}`) assert.equal(state.selectedTarget.subcommand, subcommand, `target subcommand should be "${subcommand}"`) }) } }) -test("/gsd visualize dispatches as view-navigate to the visualizer view", () => { - const outcome = dispatchBrowserSlashCommand("/gsd visualize") +test("/sf visualize dispatches as view-navigate to the visualizer view", () => { + const outcome = dispatchBrowserSlashCommand("/sf visualize") assert.equal(outcome.kind, "view-navigate") assert.equal(outcome.view, "visualize") }) @@ -680,7 +680,7 @@ test("surface action state keeps compaction summaries inspectable", () => { }) test("command-surface session affordances use the shared store action path", () => { - const commandSurfacePath = resolve(import.meta.dirname, "../../../web/components/gsd/command-surface.tsx") + const commandSurfacePath = resolve(import.meta.dirname, "../../../web/components/sf/command-surface.tsx") const commandSurfaceSource = readFileSync(commandSurfacePath, "utf-8") assert.match( diff --git a/src/tests/integration/web-continuity-contract.test.ts b/src/tests/integration/web-continuity-contract.test.ts index 5bc1b9b0d..10af2b4c3 100644 --- a/src/tests/integration/web-continuity-contract.test.ts +++ b/src/tests/integration/web-continuity-contract.test.ts @@ -1,7 +1,7 @@ import test from "node:test"; import assert from "node:assert/strict"; -// ─── Constants mirrored from gsd-workspace-store.tsx ───────────────── +// ─── Constants mirrored from sf-workspace-store.tsx ───────────────── // These MUST match the exported values in the store. The final test // case verifies the store's actual exported values if the runtime // supports .tsx imports; otherwise we trust these mirrors. diff --git a/src/tests/integration/web-dashboard-rtk-contract.test.ts b/src/tests/integration/web-dashboard-rtk-contract.test.ts index 08c1e18fd..923fc18a9 100644 --- a/src/tests/integration/web-dashboard-rtk-contract.test.ts +++ b/src/tests/integration/web-dashboard-rtk-contract.test.ts @@ -3,7 +3,7 @@ import assert from "node:assert/strict"; import { readFileSync } from "node:fs"; import { join } from "node:path"; -const dashboardPath = join(process.cwd(), "web", "components", "gsd", "dashboard.tsx"); +const dashboardPath = join(process.cwd(), "web", "components", "sf", "dashboard.tsx"); const source = readFileSync(dashboardPath, "utf-8"); test("dashboard gates RTK Saved metric card on rtkEnabled", () => { diff --git a/src/tests/integration/web-diagnostics-contract.test.ts b/src/tests/integration/web-diagnostics-contract.test.ts index eb698f3ca..2d4e79e07 100644 --- a/src/tests/integration/web-diagnostics-contract.test.ts +++ b/src/tests/integration/web-diagnostics-contract.test.ts @@ -36,7 +36,7 @@ const { dispatchBrowserSlashCommand, } = await import("../../../web/lib/browser-slash-command-dispatch.ts") -const { GSDWorkspaceStore } = await import("../../../web/lib/gsd-workspace-store.tsx") +const { GSDWorkspaceStore } = await import("../../../web/lib/sf-workspace-store.tsx") // ─── Block 1: Type exports (R103, R104, R105) ─────────────────────────────── @@ -253,35 +253,35 @@ describe("diagnostics contract state", () => { // ─── Block 3: Dispatch→surface pipeline (R103, R104, R105) ────────────────── describe("diagnostics dispatch→surface pipeline", () => { - it("/gsd forensics dispatches to gsd-forensics surface", () => { - const outcome = dispatchBrowserSlashCommand("/gsd forensics", {}) + it("/sf forensics dispatches to sf-forensics surface", () => { + const outcome = dispatchBrowserSlashCommand("/sf forensics", {}) assert.equal(outcome.kind, "surface") if (outcome.kind === "surface") { - assert.equal(outcome.surface, "gsd-forensics") + assert.equal(outcome.surface, "sf-forensics") } }) - it("/gsd doctor dispatches to gsd-doctor surface", () => { - const outcome = dispatchBrowserSlashCommand("/gsd doctor", {}) + it("/sf doctor dispatches to sf-doctor surface", () => { + const outcome = dispatchBrowserSlashCommand("/sf doctor", {}) assert.equal(outcome.kind, "surface") if (outcome.kind === "surface") { - assert.equal(outcome.surface, "gsd-doctor") + assert.equal(outcome.surface, "sf-doctor") } }) - it("/gsd skill-health dispatches to gsd-skill-health surface", () => { - const outcome = dispatchBrowserSlashCommand("/gsd skill-health", {}) + it("/sf skill-health dispatches to sf-skill-health surface", () => { + const outcome = dispatchBrowserSlashCommand("/sf skill-health", {}) assert.equal(outcome.kind, "surface") if (outcome.kind === "surface") { - assert.equal(outcome.surface, "gsd-skill-health") + assert.equal(outcome.surface, "sf-skill-health") } }) - it("/gsd doctor fix dispatches to gsd-doctor surface with args", () => { - const outcome = dispatchBrowserSlashCommand("/gsd doctor fix", {}) + it("/sf doctor fix dispatches to sf-doctor surface with args", () => { + const outcome = dispatchBrowserSlashCommand("/sf doctor fix", {}) assert.equal(outcome.kind, "surface") if (outcome.kind === "surface") { - assert.equal(outcome.surface, "gsd-doctor") + assert.equal(outcome.surface, "sf-doctor") } }) }) @@ -289,19 +289,19 @@ describe("diagnostics dispatch→surface pipeline", () => { // ─── Block 4: Surface→section mapping (R103, R104, R105) ──────────────────── describe("diagnostics surface→section mapping", () => { - it("gsd-forensics surface maps to gsd-forensics section", () => { - const section = commandSurfaceSectionForRequest({ surface: "gsd-forensics" as any } as any) - assert.equal(section, "gsd-forensics") + it("sf-forensics surface maps to sf-forensics section", () => { + const section = commandSurfaceSectionForRequest({ surface: "sf-forensics" as any } as any) + assert.equal(section, "sf-forensics") }) - it("gsd-doctor surface maps to gsd-doctor section", () => { - const section = commandSurfaceSectionForRequest({ surface: "gsd-doctor" as any } as any) - assert.equal(section, "gsd-doctor") + it("sf-doctor surface maps to sf-doctor section", () => { + const section = commandSurfaceSectionForRequest({ surface: "sf-doctor" as any } as any) + assert.equal(section, "sf-doctor") }) - it("gsd-skill-health surface maps to gsd-skill-health section", () => { - const section = commandSurfaceSectionForRequest({ surface: "gsd-skill-health" as any } as any) - assert.equal(section, "gsd-skill-health") + it("sf-skill-health surface maps to sf-skill-health section", () => { + const section = commandSurfaceSectionForRequest({ surface: "sf-skill-health" as any } as any) + assert.equal(section, "sf-skill-health") }) }) diff --git a/src/tests/integration/web-live-interaction-contract.test.ts b/src/tests/integration/web-live-interaction-contract.test.ts index ce2d33141..fd80ee841 100644 --- a/src/tests/integration/web-live-interaction-contract.test.ts +++ b/src/tests/integration/web-live-interaction-contract.test.ts @@ -56,10 +56,10 @@ function attachJsonLineReader(stream: PassThrough, onLine: (line: string) => voi } function makeWorkspaceFixture(): { projectCwd: string; sessionsDir: string; cleanup: () => void } { - const root = mkdtempSync(join(tmpdir(), "gsd-web-live-")); + const root = mkdtempSync(join(tmpdir(), "sf-web-live-")); const projectCwd = join(root, "project"); const sessionsDir = join(root, "sessions"); - const milestoneDir = join(projectCwd, ".gsd", "milestones", "M001"); + const milestoneDir = join(projectCwd, ".sf", "milestones", "M001"); const sliceDir = join(milestoneDir, "slices", "S01"); const tasksDir = join(sliceDir, "tasks"); @@ -132,15 +132,15 @@ function fakeWorkspaceIndex() { { id: "M001", title: "Demo", - roadmapPath: ".gsd/milestones/M001/M001-ROADMAP.md", + roadmapPath: ".sf/milestones/M001/M001-ROADMAP.md", slices: [ { id: "S01", title: "Demo", done: false, - planPath: ".gsd/milestones/M001/slices/S01/S01-PLAN.md", - tasksDir: ".gsd/milestones/M001/slices/S01/tasks", - tasks: [{ id: "T01", title: "Work", done: false, planPath: ".gsd/milestones/M001/slices/S01/tasks/T01-PLAN.md" }], + planPath: ".sf/milestones/M001/slices/S01/S01-PLAN.md", + tasksDir: ".sf/milestones/M001/slices/S01/tasks", + tasks: [{ id: "T01", title: "Work", done: false, planPath: ".sf/milestones/M001/slices/S01/tasks/T01-PLAN.md" }], }, ], }, diff --git a/src/tests/integration/web-live-state-contract.test.ts b/src/tests/integration/web-live-state-contract.test.ts index 94c82fce1..74b489877 100644 --- a/src/tests/integration/web-live-state-contract.test.ts +++ b/src/tests/integration/web-live-state-contract.test.ts @@ -54,10 +54,10 @@ function attachJsonLineReader(stream: PassThrough, onLine: (line: string) => voi } function makeWorkspaceFixture(): { projectCwd: string; sessionsDir: string; cleanup: () => void } { - const root = mkdtempSync(join(tmpdir(), "gsd-web-live-state-")); + const root = mkdtempSync(join(tmpdir(), "sf-web-live-state-")); const projectCwd = join(root, "project"); const sessionsDir = join(root, "sessions"); - const milestoneDir = join(projectCwd, ".gsd", "milestones", "M001"); + const milestoneDir = join(projectCwd, ".sf", "milestones", "M001"); const sliceDir = join(milestoneDir, "slices", "S01"); const tasksDir = join(sliceDir, "tasks"); @@ -140,20 +140,20 @@ function fakeWorkspaceIndex() { { id: "M001", title: "Demo Milestone", - roadmapPath: ".gsd/milestones/M001/M001-ROADMAP.md", + roadmapPath: ".sf/milestones/M001/M001-ROADMAP.md", slices: [ { id: "S01", title: "Demo Slice", done: false, - planPath: ".gsd/milestones/M001/slices/S01/S01-PLAN.md", - tasksDir: ".gsd/milestones/M001/slices/S01/tasks", + planPath: ".sf/milestones/M001/slices/S01/S01-PLAN.md", + tasksDir: ".sf/milestones/M001/slices/S01/tasks", tasks: [ { id: "T01", title: "Wire boot", done: false, - planPath: ".gsd/milestones/M001/slices/S01/tasks/T01-PLAN.md", + planPath: ".sf/milestones/M001/slices/S01/tasks/T01-PLAN.md", }, ], }, @@ -198,7 +198,7 @@ function fakeBootPayload(sessionPath: string) { return { project: { cwd: "/tmp/demo-project", - sessionsDir: "/tmp/demo-project/.gsd/sessions", + sessionsDir: "/tmp/demo-project/.sf/sessions", packageRoot: repoRoot, }, workspace: fakeWorkspaceIndex(), @@ -245,7 +245,7 @@ function fakeBootPayload(sessionPath: string) { bridge: { phase: "ready", projectCwd: "/tmp/demo-project", - projectSessionsDir: "/tmp/demo-project/.gsd/sessions", + projectSessionsDir: "/tmp/demo-project/.sf/sessions", packageRoot: repoRoot, startedAt: "2026-03-15T03:30:00.000Z", updatedAt: "2026-03-15T03:30:01.000Z", diff --git a/src/tests/integration/web-mode-assembled.test.ts b/src/tests/integration/web-mode-assembled.test.ts index 37ec62635..edf68d21b 100644 --- a/src/tests/integration/web-mode-assembled.test.ts +++ b/src/tests/integration/web-mode-assembled.test.ts @@ -64,10 +64,10 @@ function attachJsonLineReader(stream: PassThrough, onLine: (line: string) => voi } function makeWorkspaceFixture(): { projectCwd: string; sessionsDir: string; cleanup: () => void } { - const root = mkdtempSync(join(tmpdir(), "gsd-web-assembled-")); + const root = mkdtempSync(join(tmpdir(), "sf-web-assembled-")); const projectCwd = join(root, "project"); const sessionsDir = join(root, "sessions"); - const milestoneDir = join(projectCwd, ".gsd", "milestones", "M001"); + const milestoneDir = join(projectCwd, ".sf", "milestones", "M001"); const sliceDir = join(milestoneDir, "slices", "S01"); const tasksDir = join(sliceDir, "tasks"); @@ -136,15 +136,15 @@ function fakeWorkspaceIndex() { { id: "M001", title: "Demo", - roadmapPath: ".gsd/milestones/M001/M001-ROADMAP.md", + roadmapPath: ".sf/milestones/M001/M001-ROADMAP.md", slices: [ { id: "S01", title: "Demo", done: false, - planPath: ".gsd/milestones/M001/slices/S01/S01-PLAN.md", - tasksDir: ".gsd/milestones/M001/slices/S01/tasks", - tasks: [{ id: "T01", title: "Work", done: false, planPath: ".gsd/milestones/M001/slices/S01/tasks/T01-PLAN.md" }], + planPath: ".sf/milestones/M001/slices/S01/S01-PLAN.md", + tasksDir: ".sf/milestones/M001/slices/S01/tasks", + tasks: [{ id: "T01", title: "Work", done: false, planPath: ".sf/milestones/M001/slices/S01/tasks/T01-PLAN.md" }], }, ], }, @@ -1022,14 +1022,14 @@ test("assembled slash-command behavior keeps built-ins safe while preserving SF assert.match(builtInReject.notice ?? "", /blocked instead of falling through to the model/i); assert.equal(builtInReject.status, null); - // /gsd status is now a browser surface (S02), verify that - const gsdSurface = await submitBrowserInput("/gsd status"); + // /sf status is now a browser surface (S02), verify that + const gsdSurface = await submitBrowserInput("/sf status"); assert.equal(gsdSurface.outcome.kind, "surface"); - assert.equal(gsdSurface.outcome.surface, "gsd-status"); + assert.equal(gsdSurface.outcome.surface, "sf-status"); assert.equal(gsdSurface.status, null); - // /gsd auto is a passthrough subcommand — reaches the bridge as a prompt - const gsdPrompt = await submitBrowserInput("/gsd auto"); + // /sf auto is a passthrough subcommand — reaches the bridge as a prompt + const gsdPrompt = await submitBrowserInput("/sf auto"); assert.equal(gsdPrompt.outcome.kind, "prompt"); assert.equal(gsdPrompt.status, 200); assert.equal(gsdPrompt.body.command, "prompt"); @@ -1041,5 +1041,5 @@ test("assembled slash-command behavior keeps built-ins safe while preserving SF "only browser-executable slash commands should reach the live bridge; built-in surfaces/rejects must stay out of prompt text", ); const promptCommand = bridgeCommands.find((command) => command.type === "prompt"); - assert.equal(promptCommand?.message, "/gsd auto", "SF passthrough commands must stay on the extension prompt path"); + assert.equal(promptCommand?.message, "/sf auto", "SF passthrough commands must stay on the extension prompt path"); }); diff --git a/src/tests/integration/web-mode-cli.test.ts b/src/tests/integration/web-mode-cli.test.ts index fcaff99d9..1e8dc9a6e 100644 --- a/src/tests/integration/web-mode-cli.test.ts +++ b/src/tests/integration/web-mode-cli.test.ts @@ -20,9 +20,9 @@ test('package hooks declare a concrete staged web host', () => { const rootPackage = JSON.parse(readFileSync(join(projectRoot, 'package.json'), 'utf-8')) assert.equal(rootPackage.scripts['stage:web-host'], 'node scripts/stage-web-standalone.cjs') assert.equal(rootPackage.scripts['build:web-host'], 'npm --prefix web run build && npm run stage:web-host') - assert.equal(rootPackage.scripts['gsd'], 'node scripts/dev-cli.js') - assert.equal(rootPackage.scripts['gsd:web'], 'npm run build:pi && npm run copy-resources && node scripts/build-web-if-stale.cjs && node scripts/dev-cli.js --web') - assert.equal(rootPackage.scripts['gsd:web:stop'], 'node scripts/dev-cli.js web stop') + assert.equal(rootPackage.scripts['sf'], 'node scripts/dev-cli.js') + assert.equal(rootPackage.scripts['sf:web'], 'npm run build:pi && npm run copy-resources && node scripts/build-web-if-stale.cjs && node scripts/dev-cli.js --web') + assert.equal(rootPackage.scripts['sf:web:stop'], 'node scripts/dev-cli.js web stop') assert.ok(rootPackage.files.includes('dist/web')) const webPackage = JSON.parse(readFileSync(join(projectRoot, 'web', 'package.json'), 'utf-8')) @@ -36,7 +36,7 @@ test('web mode launcher defines or imports a browser opener', () => { }) test('cli.ts branches to web mode before interactive startup and preserves cwd-scoped launch inputs', async (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-web-cli-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-web-cli-')) const cwd = join(tmp, 'project space') mkdirSync(cwd, { recursive: true }) @@ -76,7 +76,7 @@ test('cli.ts branches to web mode before interactive startup and preserves cwd-s assert.deepEqual(launchInputs, { cwd, projectSessionsDir: cliWeb.getProjectSessionsDir(cwd), - agentDir: join(process.env.HOME || '', '.gsd', 'agent'), + agentDir: join(process.env.HOME || '', '.sf', 'agent'), host: undefined, port: undefined, allowedOrigins: undefined, @@ -84,7 +84,7 @@ test('cli.ts branches to web mode before interactive startup and preserves cwd-s }) test('launchWebMode prefers the packaged standalone host and opens the resolved URL', async (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-web-host-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-web-host-')) const standaloneRoot = join(tmp, 'dist', 'web', 'standalone') const serverPath = join(standaloneRoot, 'server.js') mkdirSync(standaloneRoot, { recursive: true }) @@ -106,8 +106,8 @@ test('launchWebMode prefers the packaged standalone host and opens the resolved const status = await webMode.launchWebMode( { cwd: '/tmp/current-project', - projectSessionsDir: '/tmp/.gsd/sessions/--tmp-current-project--', - agentDir: '/tmp/.gsd/agent', + projectSessionsDir: '/tmp/.sf/sessions/--tmp-current-project--', + agentDir: '/tmp/.sf/agent', packageRoot: tmp, }, { @@ -174,7 +174,7 @@ test('launchWebMode prefers the packaged standalone host and opens the resolved SF_WEB_PORT: '45123', SF_WEB_AUTH_TOKEN: authToken, SF_WEB_PROJECT_CWD: '/tmp/current-project', - SF_WEB_PROJECT_SESSIONS_DIR: '/tmp/.gsd/sessions/--tmp-current-project--', + SF_WEB_PROJECT_SESSIONS_DIR: '/tmp/.sf/sessions/--tmp-current-project--', SF_WEB_PACKAGE_ROOT: tmp, SF_WEB_HOST_KIND: 'packaged-standalone', }, @@ -188,7 +188,7 @@ test('launchWebMode prefers the packaged standalone host and opens the resolved }) test('stopWebMode kills process by PID and removes PID file', (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-web-stop-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-web-stop-')) const pidFilePath = join(tmp, 'web-server.pid') let stderrOutput = '' let killedPid: number | undefined @@ -211,7 +211,7 @@ test('stopWebMode kills process by PID and removes PID file', (t) => { }) test('stopWebMode reports error when no PID file exists', (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-web-stop-nopid-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-web-stop-nopid-')) const pidFilePath = join(tmp, 'web-server.pid') let stderrOutput = '' @@ -230,7 +230,7 @@ test('stopWebMode reports error when no PID file exists', (t) => { }) test('runWebCliBranch handles "web stop" subcommand without --web flag', async (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-web-branch-stop-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-web-branch-stop-')) const pidFilePath = join(tmp, 'web-server.pid') let stderrOutput = '' @@ -276,8 +276,8 @@ test('parseCliArgs does not capture --web followed by a flag as path', () => { assert.equal(flags.model, 'test') }) -test('gsd web <path> is handled as web start with path', async (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-web-path-')) +test('sf web <path> is handled as web start with path', async (t) => { + const tmp = mkdtempSync(join(tmpdir(), 'sf-web-path-')) const projectDir = join(tmp, 'my-project') mkdirSync(projectDir, { recursive: true }) let launchedCwd = '' @@ -311,8 +311,8 @@ test('gsd web <path> is handled as web start with path', async (t) => { assert.equal(launchedCwd, projectDir) }) -test('gsd web start <path> resolves path and launches', async (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-web-start-path-')) +test('sf web start <path> resolves path and launches', async (t) => { + const tmp = mkdtempSync(join(tmpdir(), 'sf-web-start-path-')) const projectDir = join(tmp, 'another-project') mkdirSync(projectDir, { recursive: true }) let launchedCwd = '' @@ -346,8 +346,8 @@ test('gsd web start <path> resolves path and launches', async (t) => { assert.equal(launchedCwd, projectDir) }) -test('gsd --web <path> resolves path and launches', async (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-web-flag-path-')) +test('sf --web <path> resolves path and launches', async (t) => { + const tmp = mkdtempSync(join(tmpdir(), 'sf-web-flag-path-')) const projectDir = join(tmp, 'flagged-project') mkdirSync(projectDir, { recursive: true }) let launchedCwd = '' @@ -382,10 +382,10 @@ test('gsd --web <path> resolves path and launches', async (t) => { assert.equal(launchedCwd, projectDir) }) -test('gsd --web <nonexistent-path> fails with clear error', async () => { +test('sf --web <nonexistent-path> fails with clear error', async () => { let stderrOutput = '' - const flags = cliWeb.parseCliArgs(['node', 'dist/loader.js', '--web', '/tmp/nonexistent-gsd-test-path-xyz']) + const flags = cliWeb.parseCliArgs(['node', 'dist/loader.js', '--web', '/tmp/nonexistent-sf-test-path-xyz']) const result = await cliWeb.runWebCliBranch(flags, { stderr: { write: (chunk: string) => { stderrOutput += chunk; return true } }, }) @@ -401,7 +401,7 @@ test('gsd --web <nonexistent-path> fails with clear error', async () => { }) test('launch failure surfaces status and reason before browser open', async (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-web-missing-host-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-web-missing-host-')) let openedUrl = '' let stderrOutput = '' @@ -410,8 +410,8 @@ test('launch failure surfaces status and reason before browser open', async (t) const status = await webMode.launchWebMode( { cwd: '/tmp/current-project', - projectSessionsDir: '/tmp/.gsd/sessions/--tmp-current-project--', - agentDir: '/tmp/.gsd/agent', + projectSessionsDir: '/tmp/.sf/sessions/--tmp-current-project--', + agentDir: '/tmp/.sf/agent', packageRoot: tmp, }, { @@ -440,7 +440,7 @@ test('launch failure surfaces status and reason before browser open', async (t) // ─── Instance registry tests ───────────────────────────────────────── test('registerInstance and readInstanceRegistry round-trip', (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-web-registry-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-web-registry-')) const registryPath = join(tmp, 'web-instances.json') t.after(() => { rmSync(tmp, { recursive: true, force: true }) }); @@ -456,7 +456,7 @@ test('registerInstance and readInstanceRegistry round-trip', (t) => { }) test('unregisterInstance removes a single entry', (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-web-unreg-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-web-unreg-')) const registryPath = join(tmp, 'web-instances.json') t.after(() => { rmSync(tmp, { recursive: true, force: true }) }); @@ -484,7 +484,7 @@ test('stopWebMode with projectCwd reports not-found when not in registry', () => assert.match(stderrOutput, /No web server running/) }) -test('gsd web stop all is parsed and dispatched', async () => { +test('sf web stop all is parsed and dispatched', async () => { let stopOptions: { projectCwd?: string; all?: boolean } | undefined const flags = cliWeb.parseCliArgs(['node', 'dist/loader.js', 'web', 'stop', 'all']) @@ -505,8 +505,8 @@ test('gsd web stop all is parsed and dispatched', async () => { assert.equal(stopOptions?.projectCwd, undefined) }) -test('gsd web stop <path> is parsed and dispatched with resolved path', async (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-web-stop-path-')) +test('sf web stop <path> is parsed and dispatched with resolved path', async (t) => { + const tmp = mkdtempSync(join(tmpdir(), 'sf-web-stop-path-')) let stopOptions: { projectCwd?: string; all?: boolean } | undefined t.after(() => { rmSync(tmp, { recursive: true, force: true }) }); @@ -531,7 +531,7 @@ test('gsd web stop <path> is parsed and dispatched with resolved path', async (t // ─── Context-aware launch detection tests ────────────────────────────── test('resolveContextAwareCwd returns project cwd when inside a project under dev root', (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-ctx-aware-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-ctx-aware-')) const devRoot = join(tmp, 'devroot') const projectA = join(devRoot, 'projectA') const prefsPath = join(tmp, 'web-preferences.json') @@ -546,7 +546,7 @@ test('resolveContextAwareCwd returns project cwd when inside a project under dev }) test('resolveContextAwareCwd returns cwd unchanged when AT dev root', (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-ctx-aware-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-ctx-aware-')) const devRoot = join(tmp, 'devroot') const prefsPath = join(tmp, 'web-preferences.json') @@ -560,7 +560,7 @@ test('resolveContextAwareCwd returns cwd unchanged when AT dev root', (t) => { }) test('resolveContextAwareCwd returns cwd unchanged when no dev root configured', (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-ctx-aware-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-ctx-aware-')) const prefsPath = join(tmp, 'web-preferences.json') const cwd = join(tmp, 'somedir') @@ -574,7 +574,7 @@ test('resolveContextAwareCwd returns cwd unchanged when no dev root configured', }) test('resolveContextAwareCwd returns cwd unchanged when prefs file missing', (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-ctx-aware-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-ctx-aware-')) const prefsPath = join(tmp, 'nonexistent-prefs.json') const cwd = join(tmp, 'somedir') @@ -587,7 +587,7 @@ test('resolveContextAwareCwd returns cwd unchanged when prefs file missing', (t) }) test('resolveContextAwareCwd returns cwd unchanged when dev root path is stale', (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-ctx-aware-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-ctx-aware-')) const prefsPath = join(tmp, 'web-preferences.json') const cwd = join(tmp, 'somedir') const staleDevRoot = join(tmp, 'nonexistent-devroot') @@ -602,7 +602,7 @@ test('resolveContextAwareCwd returns cwd unchanged when dev root path is stale', }) test('resolveContextAwareCwd resolves nested cwd to one-level-deep project', (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-ctx-aware-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-ctx-aware-')) const devRoot = join(tmp, 'devroot') const projectA = join(devRoot, 'projectA') const nested = join(projectA, 'src', 'components', 'deep') @@ -618,7 +618,7 @@ test('resolveContextAwareCwd resolves nested cwd to one-level-deep project', (t) }) test('resolveContextAwareCwd returns cwd unchanged when outside dev root', (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-ctx-aware-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-ctx-aware-')) const devRoot = join(tmp, 'devroot') const outsideDir = join(tmp, 'elsewhere') const prefsPath = join(tmp, 'web-preferences.json') @@ -636,7 +636,7 @@ test('resolveContextAwareCwd returns cwd unchanged when outside dev root', (t) = // ─── Stale instance cleanup tests ───────────────────────────────────── test('launchWebMode kills stale instance for same cwd before spawning', async (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-web-stale-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-web-stale-')) const standaloneRoot = join(tmp, 'dist', 'web', 'standalone') const serverPath = join(standaloneRoot, 'server.js') mkdirSync(standaloneRoot, { recursive: true }) @@ -657,8 +657,8 @@ test('launchWebMode kills stale instance for same cwd before spawning', async (t const status = await webMode.launchWebMode( { cwd, - projectSessionsDir: '/tmp/.gsd/sessions/stale', - agentDir: '/tmp/.gsd/agent', + projectSessionsDir: '/tmp/.sf/sessions/stale', + agentDir: '/tmp/.sf/agent', packageRoot: tmp, }, { @@ -698,7 +698,7 @@ test('launchWebMode kills stale instance for same cwd before spawning', async (t }) test('launchWebMode does not log cleanup when no stale instance exists', async (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-web-no-stale-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-web-no-stale-')) const standaloneRoot = join(tmp, 'dist', 'web', 'standalone') const serverPath = join(standaloneRoot, 'server.js') mkdirSync(standaloneRoot, { recursive: true }) @@ -714,8 +714,8 @@ test('launchWebMode does not log cleanup when no stale instance exists', async ( const status = await webMode.launchWebMode( { cwd: '/tmp/clean-project', - projectSessionsDir: '/tmp/.gsd/sessions/clean', - agentDir: '/tmp/.gsd/agent', + projectSessionsDir: '/tmp/.sf/sessions/clean', + agentDir: '/tmp/.sf/agent', packageRoot: tmp, }, { diff --git a/src/tests/integration/web-mode-network-flags.test.ts b/src/tests/integration/web-mode-network-flags.test.ts index e91913bcb..547432425 100644 --- a/src/tests/integration/web-mode-network-flags.test.ts +++ b/src/tests/integration/web-mode-network-flags.test.ts @@ -66,7 +66,7 @@ test('parseCliArgs does not set network flags when not provided', () => { // ─── launchWebMode env forwarding ──────────────────────────────────── test('launchWebMode forwards custom host, port, and allowed origins to subprocess env', async (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-web-net-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-web-net-')) const standaloneRoot = join(tmp, 'dist', 'web', 'standalone') const serverPath = join(standaloneRoot, 'server.js') mkdirSync(standaloneRoot, { recursive: true }) @@ -79,8 +79,8 @@ test('launchWebMode forwards custom host, port, and allowed origins to subproces const status = await webMode.launchWebMode( { cwd: '/tmp/project', - projectSessionsDir: '/tmp/.gsd/sessions', - agentDir: '/tmp/.gsd/agent', + projectSessionsDir: '/tmp/.sf/sessions', + agentDir: '/tmp/.sf/agent', packageRoot: tmp, host: '0.0.0.0', port: 8080, @@ -113,7 +113,7 @@ test('launchWebMode forwards custom host, port, and allowed origins to subproces }) test('launchWebMode omits SF_WEB_ALLOWED_ORIGINS when none provided', async (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-web-no-origins-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-web-no-origins-')) const standaloneRoot = join(tmp, 'dist', 'web', 'standalone') const serverPath = join(standaloneRoot, 'server.js') mkdirSync(standaloneRoot, { recursive: true }) @@ -126,8 +126,8 @@ test('launchWebMode omits SF_WEB_ALLOWED_ORIGINS when none provided', async (t) await webMode.launchWebMode( { cwd: '/tmp/project', - projectSessionsDir: '/tmp/.gsd/sessions', - agentDir: '/tmp/.gsd/agent', + projectSessionsDir: '/tmp/.sf/sessions', + agentDir: '/tmp/.sf/agent', packageRoot: tmp, }, { @@ -151,7 +151,7 @@ test('launchWebMode omits SF_WEB_ALLOWED_ORIGINS when none provided', async (t) // ─── runWebCliBranch end-to-end forwarding ─────────────────────────── test('runWebCliBranch forwards --host, --port, --allowed-origins to launchWebMode', async (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-web-branch-flags-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-web-branch-flags-')) const projectDir = join(tmp, 'project') mkdirSync(projectDir, { recursive: true }) diff --git a/src/tests/integration/web-mode-onboarding.test.ts b/src/tests/integration/web-mode-onboarding.test.ts index 096797a20..9f8b3ce79 100644 --- a/src/tests/integration/web-mode-onboarding.test.ts +++ b/src/tests/integration/web-mode-onboarding.test.ts @@ -63,10 +63,10 @@ function attachJsonLineReader(stream: PassThrough, onLine: (line: string) => voi } function makeWorkspaceFixture(): { projectCwd: string; sessionsDir: string; cleanup: () => void } { - const root = mkdtempSync(join(tmpdir(), "gsd-web-onboarding-integration-")); + const root = mkdtempSync(join(tmpdir(), "sf-web-onboarding-integration-")); const projectCwd = join(root, "project"); const sessionsDir = join(root, "sessions"); - const milestoneDir = join(projectCwd, ".gsd", "milestones", "M001"); + const milestoneDir = join(projectCwd, ".sf", "milestones", "M001"); const sliceDir = join(milestoneDir, "slices", "S02"); const tasksDir = join(sliceDir, "tasks"); @@ -138,20 +138,20 @@ function fakeWorkspaceIndex() { { id: "M001", title: "Demo Milestone", - roadmapPath: ".gsd/milestones/M001/M001-ROADMAP.md", + roadmapPath: ".sf/milestones/M001/M001-ROADMAP.md", slices: [ { id: "S02", title: "First-run setup wizard", done: false, - planPath: ".gsd/milestones/M001/slices/S02/S02-PLAN.md", - tasksDir: ".gsd/milestones/M001/slices/S02/tasks", + planPath: ".sf/milestones/M001/slices/S02/S02-PLAN.md", + tasksDir: ".sf/milestones/M001/slices/S02/tasks", tasks: [ { id: "T02", title: "Enforce the gate and refresh bridge auth after successful setup", done: false, - planPath: ".gsd/milestones/M001/slices/S02/tasks/T02-PLAN.md", + planPath: ".sf/milestones/M001/slices/S02/tasks/T02-PLAN.md", }, ], }, @@ -425,13 +425,13 @@ test("refresh failures keep the workspace locked and expose the failed bridge-re assert.match(failedBootPayload.onboarding.bridgeAuthRefresh.error, /could not attach/i); }); -test("fresh gsd --web browser onboarding stays locked on failed validation and unlocks after a successful retry", async (t) => { +test("fresh sf --web browser onboarding stays locked on failed validation and unlocks after a successful retry", async (t) => { if (process.platform === "win32") { t.skip("runtime launch test uses POSIX browser-open stubs") return } - const tempRoot = mkdtempSync(join(tmpdir(), "gsd-web-onboarding-runtime-")) + const tempRoot = mkdtempSync(join(tmpdir(), "sf-web-onboarding-runtime-")) const tempHome = join(tempRoot, "home") const browserLogPath = join(tempRoot, "browser-open.log") let port: number | null = null diff --git a/src/tests/integration/web-mode-runtime-fixtures.ts b/src/tests/integration/web-mode-runtime-fixtures.ts index 7778a3482..a4590d913 100644 --- a/src/tests/integration/web-mode-runtime-fixtures.ts +++ b/src/tests/integration/web-mode-runtime-fixtures.ts @@ -36,10 +36,10 @@ function canonicalizePath(path: string): string { function sessionBaseVariants(baseSessionsDir: string): string[] { const variants = new Set<string>([baseSessionsDir]) const normalized = baseSessionsDir.replace(/\\/g, "/") - if (normalized.endsWith("/.gsd/sessions")) { + if (normalized.endsWith("/.sf/sessions")) { variants.add(join(dirname(baseSessionsDir), "agent", "sessions")) } - if (normalized.endsWith("/.gsd/agent/sessions")) { + if (normalized.endsWith("/.sf/agent/sessions")) { variants.add(join(dirname(dirname(baseSessionsDir)), "sessions")) } return [...variants] @@ -126,9 +126,9 @@ function writeSeededSessionFile(options: { } export function makeRuntimeWorkspaceFixture(): RuntimeWorkspaceFixture { - const root = mkdtempSync(join(tmpdir(), "gsd-web-runtime-fixture-")) + const root = mkdtempSync(join(tmpdir(), "sf-web-runtime-fixture-")) const projectCwd = join(root, "project") - const milestoneDir = join(projectCwd, ".gsd", "milestones", "M001") + const milestoneDir = join(projectCwd, ".sf", "milestones", "M001") const sliceDir = join(milestoneDir, "slices", "S02") const tasksDir = join(sliceDir, "tasks") @@ -155,9 +155,9 @@ export function makeRuntimeWorkspaceFixture(): RuntimeWorkspaceFixture { } export function makeInterruptedRunRuntimeFixture(): RuntimeWorkspaceFixture { - const root = mkdtempSync(join(tmpdir(), "gsd-web-runtime-recovery-")) + const root = mkdtempSync(join(tmpdir(), "sf-web-runtime-recovery-")) const projectCwd = join(root, "project") - const milestoneDir = join(projectCwd, ".gsd", "milestones", "M002") + const milestoneDir = join(projectCwd, ".sf", "milestones", "M002") const sliceDir = join(milestoneDir, "slices", "S04") const tasksDir = join(sliceDir, "tasks") @@ -297,7 +297,7 @@ export function seedInterruptedRunRecoverySessions(options: { type: "toolCall", id: "tool-read-1", name: "read", - arguments: { path: ".gsd/milestones/M002/slices/S04/S04-PLAN.md" }, + arguments: { path: ".sf/milestones/M002/slices/S04/S04-PLAN.md" }, }, { type: "toolCall", diff --git a/src/tests/integration/web-mode-runtime-harness.ts b/src/tests/integration/web-mode-runtime-harness.ts index 3083d6bc9..07980e1b2 100644 --- a/src/tests/integration/web-mode-runtime-harness.ts +++ b/src/tests/integration/web-mode-runtime-harness.ts @@ -6,7 +6,7 @@ import { join } from "node:path" import type { Page, Request, Response } from "playwright" const projectRoot = process.cwd() -const resolveTsPath = join(projectRoot, "src", "resources", "extensions", "gsd", "tests", "resolve-ts.mjs") +const resolveTsPath = join(projectRoot, "src", "resources", "extensions", "sf", "tests", "resolve-ts.mjs") const loaderPath = join(projectRoot, "src", "loader.ts") const builtAgentEntryPath = join(projectRoot, "packages", "pi-coding-agent", "dist", "index.js") const packagedWebHostPath = join(projectRoot, "dist", "web", "standalone", "server.js") @@ -108,7 +108,7 @@ export type RuntimeReadyProof<TBoot = unknown> = { } export function writePreseededAuthFile(tempHome: string): void { - const agentDir = join(tempHome, ".gsd", "agent") + const agentDir = join(tempHome, ".sf", "agent") mkdirSync(agentDir, { recursive: true, mode: 0o700 }) const authPath = join(agentDir, "auth.json") const fakeCredential = { type: "api_key", key: "sk-ant-test-fake-key-for-runtime-test" } @@ -155,7 +155,7 @@ export function ensureRuntimeArtifacts(): void { } export function parseStartedUrl(stderr: string): string { - const match = stderr.match(/\[gsd\] Web mode startup: status=started[^\n]*url=(http:\/\/[^\s]+)/) + const match = stderr.match(/\[sf\] Web mode startup: status=started[^\n]*url=(http:\/\/[^\s]+)/) if (!match) { throw new Error(`Did not find successful web startup line in stderr:\n${stderr}`) } @@ -163,7 +163,7 @@ export function parseStartedUrl(stderr: string): string { } function parseReadyAuthToken(stderr: string): string | null { - const match = stderr.match(/\[gsd\] Ready → http:\/\/[^\s]+\/#token=([a-f0-9]{64})/) + const match = stderr.match(/\[sf\] Ready → http:\/\/[^\s]+\/#token=([a-f0-9]{64})/) return match?.[1] ?? null } @@ -176,7 +176,7 @@ export async function launchPackagedWebHost(options: { }): Promise<RuntimeLaunchResult> { ensureRuntimeArtifacts() - mkdirSync(join(options.tempHome, ".gsd"), { recursive: true }) + mkdirSync(join(options.tempHome, ".sf"), { recursive: true }) const browserLogPath = options.browserLogPath ?? join(options.tempHome, "browser-open.log") const fakeBin = join(options.tempHome, "fake-bin") mkdirSync(fakeBin, { recursive: true }) @@ -216,7 +216,7 @@ export async function launchPackagedWebHost(options: { const timeout = setTimeout(() => { child.kill("SIGTERM") - finish(new Error(`Timed out waiting for gsd --web to exit. stderr so far:\n${stderr}`)) + finish(new Error(`Timed out waiting for sf --web to exit. stderr so far:\n${stderr}`)) }, options.timeoutMs ?? 180_000) child.stdout.on("data", (chunk: Buffer) => { diff --git a/src/tests/integration/web-mode-windows-hide.test.ts b/src/tests/integration/web-mode-windows-hide.test.ts index c1b2902f5..cd2ca31fc 100644 --- a/src/tests/integration/web-mode-windows-hide.test.ts +++ b/src/tests/integration/web-mode-windows-hide.test.ts @@ -12,7 +12,7 @@ const webMode = await import("../../web-mode.ts"); // --------------------------------------------------------------------------- test("launchWebMode passes windowsHide: true in spawn options", async (t) => { - const tmp = mkdtempSync(join(tmpdir(), "gsd-web-winhide-")); + const tmp = mkdtempSync(join(tmpdir(), "sf-web-winhide-")); const standaloneRoot = join(tmp, "dist", "web", "standalone"); const serverPath = join(standaloneRoot, "server.js"); mkdirSync(standaloneRoot, { recursive: true }); @@ -30,8 +30,8 @@ test("launchWebMode passes windowsHide: true in spawn options", async (t) => { const status = await webMode.launchWebMode( { cwd: "/tmp/winhide-project", - projectSessionsDir: "/tmp/.gsd/sessions/winhide", - agentDir: "/tmp/.gsd/agent", + projectSessionsDir: "/tmp/.sf/sessions/winhide", + agentDir: "/tmp/.sf/agent", packageRoot: tmp, }, { @@ -66,7 +66,7 @@ test("launchWebMode passes windowsHide: true in spawn options", async (t) => { }); test("launchWebMode source-dev host also passes windowsHide: true", async (t) => { - const tmp = mkdtempSync(join(tmpdir(), "gsd-web-winhide-src-")); + const tmp = mkdtempSync(join(tmpdir(), "sf-web-winhide-src-")); const webRoot = join(tmp, "web"); mkdirSync(webRoot, { recursive: true }); writeFileSync(join(webRoot, "package.json"), '{"name":"web"}\n'); @@ -83,8 +83,8 @@ test("launchWebMode source-dev host also passes windowsHide: true", async (t) => const status = await webMode.launchWebMode( { cwd: "/tmp/winhide-src-project", - projectSessionsDir: "/tmp/.gsd/sessions/winhide-src", - agentDir: "/tmp/.gsd/agent", + projectSessionsDir: "/tmp/.sf/sessions/winhide-src", + agentDir: "/tmp/.sf/agent", packageRoot: tmp, }, { diff --git a/src/tests/integration/web-multi-project-contract.test.ts b/src/tests/integration/web-multi-project-contract.test.ts index f228b3510..b7f013b42 100644 --- a/src/tests/integration/web-multi-project-contract.test.ts +++ b/src/tests/integration/web-multi-project-contract.test.ts @@ -52,10 +52,10 @@ function attachJsonLineReader(stream: PassThrough, onLine: (line: string) => voi } function makeWorkspaceFixture(label: string): { projectCwd: string; sessionsDir: string; cleanup: () => void } { - const root = mkdtempSync(join(tmpdir(), `gsd-multi-project-${label}-`)); + const root = mkdtempSync(join(tmpdir(), `sf-multi-project-${label}-`)); const projectCwd = join(root, "project"); const sessionsDir = join(root, "sessions"); - const milestoneDir = join(projectCwd, ".gsd", "milestones", "M001"); + const milestoneDir = join(projectCwd, ".sf", "milestones", "M001"); const sliceDir = join(milestoneDir, "slices", "S01"); const tasksDir = join(sliceDir, "tasks"); @@ -112,20 +112,20 @@ function fakeWorkspaceIndex() { { id: "M001", title: "Demo Milestone", - roadmapPath: ".gsd/milestones/M001/M001-ROADMAP.md", + roadmapPath: ".sf/milestones/M001/M001-ROADMAP.md", slices: [ { id: "S01", title: "Demo Slice", done: false, - planPath: ".gsd/milestones/M001/slices/S01/S01-PLAN.md", - tasksDir: ".gsd/milestones/M001/slices/S01/tasks", + planPath: ".sf/milestones/M001/slices/S01/S01-PLAN.md", + tasksDir: ".sf/milestones/M001/slices/S01/tasks", tasks: [ { id: "T01", title: "Wire boot", done: false, - planPath: ".gsd/milestones/M001/slices/S01/tasks/T01-PLAN.md", + planPath: ".sf/milestones/M001/slices/S01/tasks/T01-PLAN.md", }, ], }, diff --git a/src/tests/integration/web-onboarding-contract.test.ts b/src/tests/integration/web-onboarding-contract.test.ts index fd0ced58e..539f690c5 100644 --- a/src/tests/integration/web-onboarding-contract.test.ts +++ b/src/tests/integration/web-onboarding-contract.test.ts @@ -116,10 +116,10 @@ function projectRequest(projectCwd: string, url: string, init?: RequestInit): Re } function makeWorkspaceFixture(): { projectCwd: string; sessionsDir: string; cleanup: () => void } { - const root = mkdtempSync(join(tmpdir(), "gsd-web-onboarding-")); + const root = mkdtempSync(join(tmpdir(), "sf-web-onboarding-")); const projectCwd = join(root, "project"); const sessionsDir = join(root, "sessions"); - const milestoneDir = join(projectCwd, ".gsd", "milestones", "M001"); + const milestoneDir = join(projectCwd, ".sf", "milestones", "M001"); const sliceDir = join(milestoneDir, "slices", "S02"); const tasksDir = join(sliceDir, "tasks"); @@ -191,20 +191,20 @@ function fakeWorkspaceIndex() { { id: "M001", title: "Demo Milestone", - roadmapPath: ".gsd/milestones/M001/M001-ROADMAP.md", + roadmapPath: ".sf/milestones/M001/M001-ROADMAP.md", slices: [ { id: "S02", title: "First-run setup wizard", done: false, - planPath: ".gsd/milestones/M001/slices/S02/S02-PLAN.md", - tasksDir: ".gsd/milestones/M001/slices/S02/tasks", + planPath: ".sf/milestones/M001/slices/S02/S02-PLAN.md", + tasksDir: ".sf/milestones/M001/slices/S02/tasks", tasks: [ { id: "T01", title: "Establish shared onboarding auth truth and browser setup API", done: false, - planPath: ".gsd/milestones/M001/slices/S02/tasks/T01-PLAN.md", + planPath: ".sf/milestones/M001/slices/S02/tasks/T01-PLAN.md", }, ], }, diff --git a/src/tests/integration/web-onboarding-presentation.test.ts b/src/tests/integration/web-onboarding-presentation.test.ts index 8cb297c2b..fb337c4a0 100644 --- a/src/tests/integration/web-onboarding-presentation.test.ts +++ b/src/tests/integration/web-onboarding-presentation.test.ts @@ -1,7 +1,7 @@ import test from "node:test" import assert from "node:assert/strict" -const { getOnboardingPresentation } = await import("../../../web/lib/gsd-workspace-store.tsx") +const { getOnboardingPresentation } = await import("../../../web/lib/sf-workspace-store.tsx") function makeOnboardingState(overrides: Record<string, unknown> = {}) { return { diff --git a/src/tests/integration/web-project-discovery-contract.test.ts b/src/tests/integration/web-project-discovery-contract.test.ts index 51ca44f93..a401acbfc 100644 --- a/src/tests/integration/web-project-discovery-contract.test.ts +++ b/src/tests/integration/web-project-discovery-contract.test.ts @@ -12,7 +12,7 @@ import { detectMonorepo } from "../../web/bridge-service.ts"; // Fixture setup — standard multi-project root // --------------------------------------------------------------------------- -const tempRoot = mkdtempSync(join(tmpdir(), "gsd-project-discovery-")); +const tempRoot = mkdtempSync(join(tmpdir(), "sf-project-discovery-")); // project-a: brownfield (package.json + .git) const projectA = join(tempRoot, "project-a"); @@ -20,10 +20,10 @@ mkdirSync(projectA); mkdirSync(join(projectA, ".git")); writeFileSync(join(projectA, "package.json"), "{}"); -// project-b: empty-gsd (.gsd folder, no milestones) +// project-b: empty-sf (.sf folder, no milestones) const projectB = join(tempRoot, "project-b"); mkdirSync(projectB); -mkdirSync(join(projectB, ".gsd")); +mkdirSync(join(projectB, ".sf")); // project-c: brownfield (Cargo.toml) const projectC = join(tempRoot, "project-c"); @@ -45,7 +45,7 @@ mkdirSync(join(tempRoot, "node_modules")); // --------------------------------------------------------------------------- // monorepo-pnpm: detected via pnpm-workspace.yaml -const monorepoPnpm = mkdtempSync(join(tmpdir(), "gsd-mono-pnpm-")); +const monorepoPnpm = mkdtempSync(join(tmpdir(), "sf-mono-pnpm-")); mkdirSync(join(monorepoPnpm, ".git")); writeFileSync(join(monorepoPnpm, "package.json"), '{"name":"my-monorepo"}'); writeFileSync(join(monorepoPnpm, "pnpm-workspace.yaml"), 'packages:\n - "packages/*"'); @@ -54,7 +54,7 @@ mkdirSync(join(monorepoPnpm, "packages", "pkg-a")); mkdirSync(join(monorepoPnpm, "packages", "pkg-b")); // monorepo-lerna: detected via lerna.json -const monorepoLerna = mkdtempSync(join(tmpdir(), "gsd-mono-lerna-")); +const monorepoLerna = mkdtempSync(join(tmpdir(), "sf-mono-lerna-")); mkdirSync(join(monorepoLerna, ".git")); writeFileSync(join(monorepoLerna, "package.json"), '{"name":"lerna-mono"}'); writeFileSync(join(monorepoLerna, "lerna.json"), '{"version":"1.0.0"}'); @@ -62,7 +62,7 @@ mkdirSync(join(monorepoLerna, "backend")); mkdirSync(join(monorepoLerna, "frontend")); // monorepo-workspaces: detected via package.json workspaces field -const monorepoWorkspaces = mkdtempSync(join(tmpdir(), "gsd-mono-ws-")); +const monorepoWorkspaces = mkdtempSync(join(tmpdir(), "sf-mono-ws-")); mkdirSync(join(monorepoWorkspaces, ".git")); writeFileSync(join(monorepoWorkspaces, "package.json"), '{"name":"ws-mono","workspaces":["packages/*"]}'); mkdirSync(join(monorepoWorkspaces, "packages")); @@ -70,7 +70,7 @@ mkdirSync(join(monorepoWorkspaces, "packages", "core")); mkdirSync(join(monorepoWorkspaces, "packages", "ui")); // monorepo-turbo: detected via turbo.json -const monorepoTurbo = mkdtempSync(join(tmpdir(), "gsd-mono-turbo-")); +const monorepoTurbo = mkdtempSync(join(tmpdir(), "sf-mono-turbo-")); mkdirSync(join(monorepoTurbo, ".git")); writeFileSync(join(monorepoTurbo, "package.json"), '{"name":"turbo-mono"}'); writeFileSync(join(monorepoTurbo, "turbo.json"), '{"pipeline":{}}'); @@ -78,7 +78,7 @@ mkdirSync(join(monorepoTurbo, "apps")); mkdirSync(join(monorepoTurbo, "packages")); // monorepo-nx: detected via nx.json -const monorepoNx = mkdtempSync(join(tmpdir(), "gsd-mono-nx-")); +const monorepoNx = mkdtempSync(join(tmpdir(), "sf-mono-nx-")); mkdirSync(join(monorepoNx, ".git")); writeFileSync(join(monorepoNx, "package.json"), '{"name":"nx-mono"}'); writeFileSync(join(monorepoNx, "nx.json"), '{}'); @@ -86,7 +86,7 @@ mkdirSync(join(monorepoNx, "libs")); mkdirSync(join(monorepoNx, "apps")); // non-monorepo: plain project with package.json (no workspaces, no marker files) -const plainProject = mkdtempSync(join(tmpdir(), "gsd-plain-project-")); +const plainProject = mkdtempSync(join(tmpdir(), "sf-plain-project-")); mkdirSync(join(plainProject, ".git")); writeFileSync(join(plainProject, "package.json"), '{"name":"plain","dependencies":{}}'); mkdirSync(join(plainProject, "src")); @@ -130,11 +130,11 @@ describe("project-discovery", () => { assert.equal(a.signals.hasGitRepo, true); }); - test("project-b is detected as empty-gsd", () => { + test("project-b is detected as empty-sf", () => { const results = discoverProjects(tempRoot); const b = results.find(r => r.name === "project-b"); assert.ok(b, "project-b not found"); - assert.equal(b.kind, "empty-gsd"); + assert.equal(b.kind, "empty-sf"); assert.equal(b.signals.hasGsdFolder, true); }); @@ -263,7 +263,7 @@ describe("project-discovery with monorepo root as devRoot", () => { assert.ok(results.some(r => r.name === "src"), "should find src directory"); }); - test("monorepo entry has correct kind (brownfield when no .gsd)", () => { + test("monorepo entry has correct kind (brownfield when no .sf)", () => { const results = discoverProjects(monorepoPnpm); assert.equal(results[0].kind, "brownfield"); }); diff --git a/src/tests/integration/web-project-tab-preservation.test.ts b/src/tests/integration/web-project-tab-preservation.test.ts index 4b7b5d2d1..439654e4f 100644 --- a/src/tests/integration/web-project-tab-preservation.test.ts +++ b/src/tests/integration/web-project-tab-preservation.test.ts @@ -8,7 +8,7 @@ import assert from "node:assert/strict"; // Bug #2711: Switching projects always returns to dashboard. // // Root cause: handleSelectProject in ProjectsPanel dispatched -// gsd:navigate-view with { view: "dashboard" } on every switch. +// sf:navigate-view with { view: "dashboard" } on every switch. // Additionally, the viewRestored flag in WorkspaceChrome was never // reset when the project changed, so the per-project sessionStorage // restore could not fire for the new project. @@ -52,7 +52,7 @@ const KNOWN_VIEWS = new Set([ ]); function viewStorageKey(projectCwd: string): string { - return `gsd-active-view:${projectCwd}`; + return `sf-active-view:${projectCwd}`; } // ── Simulated WorkspaceChrome view-restore logic ───────────────────────── diff --git a/src/tests/integration/web-project-url.test.ts b/src/tests/integration/web-project-url.test.ts index bbe9f918c..7d50c7a89 100644 --- a/src/tests/integration/web-project-url.test.ts +++ b/src/tests/integration/web-project-url.test.ts @@ -19,14 +19,14 @@ test("buildProjectPath appends project while preserving existing query params", test("buildProjectAbsoluteUrl produces a same-origin URL with the active project scope", () => { const url = buildProjectAbsoluteUrl( - "/api/terminal/stream?id=gsd-interactive&command=gsd", + "/api/terminal/stream?id=sf-interactive&command=sf", "http://localhost:3000", "/Users/sn0w/Documents/dev/Other Project", ) assert.equal(url.origin, "http://localhost:3000") assert.equal(url.pathname, "/api/terminal/stream") - assert.equal(url.searchParams.get("id"), "gsd-interactive") - assert.equal(url.searchParams.get("command"), "gsd") + assert.equal(url.searchParams.get("id"), "sf-interactive") + assert.equal(url.searchParams.get("command"), "sf") assert.equal(url.searchParams.get("project"), "/Users/sn0w/Documents/dev/Other Project") }) diff --git a/src/tests/integration/web-recovery-diagnostics-contract.test.ts b/src/tests/integration/web-recovery-diagnostics-contract.test.ts index 84f4b1805..5c2a2caf9 100644 --- a/src/tests/integration/web-recovery-diagnostics-contract.test.ts +++ b/src/tests/integration/web-recovery-diagnostics-contract.test.ts @@ -102,10 +102,10 @@ function readyOnboardingState(overrides: Record<string, unknown> = {}) { } function makeRecoveryFixture(): { projectCwd: string; sessionsDir: string; cleanup: () => void } { - const root = mkdtempSync(join(tmpdir(), "gsd-recovery-contract-")) + const root = mkdtempSync(join(tmpdir(), "sf-recovery-contract-")) const projectCwd = join(root, "project") const sessionsDir = join(root, "sessions") - const milestoneDir = join(projectCwd, ".gsd", "milestones", "M001") + const milestoneDir = join(projectCwd, ".sf", "milestones", "M001") const sliceDir = join(milestoneDir, "slices", "S01") const tasksDir = join(sliceDir, "tasks") @@ -150,7 +150,7 @@ function makeRecoveryFixture(): { projectCwd: string; sessionsDir: string; clean } function makeEmptyProjectFixture(): { projectCwd: string; sessionsDir: string; cleanup: () => void } { - const root = mkdtempSync(join(tmpdir(), "gsd-recovery-empty-")) + const root = mkdtempSync(join(tmpdir(), "sf-recovery-empty-")) const projectCwd = join(root, "project") const sessionsDir = join(root, "sessions") mkdirSync(projectCwd, { recursive: true }) @@ -272,7 +272,7 @@ test("/api/recovery returns structured recovery diagnostics and redacts secrets" payload.actions.browser.map((action: { id: string }) => action.id), ["refresh_diagnostics", "refresh_workspace", "open_retry_controls", "open_resume_controls", "open_auth_controls"], ) - assert.ok(payload.actions.commands.some((entry: { command: string }) => entry.command.includes("/gsd doctor"))) + assert.ok(payload.actions.commands.some((entry: { command: string }) => entry.command.includes("/sf doctor"))) const serialized = JSON.stringify(payload) assert.doesNotMatch(serialized, /sk-test-recovery-secret-9999|sk-onboarding-secret-1234/) diff --git a/src/tests/integration/web-responsive.test.ts b/src/tests/integration/web-responsive.test.ts index f159103e7..d3e16c6b1 100644 --- a/src/tests/integration/web-responsive.test.ts +++ b/src/tests/integration/web-responsive.test.ts @@ -28,46 +28,46 @@ test('layout.tsx exports a Viewport with device-width', () => { // ── app-shell.tsx ─────────────────────────────────────────────────────────── test('app-shell.tsx has a mobile hamburger menu toggle', () => { - const src = readComponent('components/gsd/app-shell.tsx') + const src = readComponent('components/sf/app-shell.tsx') assert.ok(src.includes('mobile-nav-toggle'), 'should have mobile-nav-toggle test id') assert.ok(src.includes('Menu'), 'should import Menu icon for hamburger') }) test('app-shell.tsx hides desktop sidebar on mobile with md:flex', () => { - const src = readComponent('components/gsd/app-shell.tsx') + const src = readComponent('components/sf/app-shell.tsx') // The desktop sidebar wrapper should use hidden + md:flex assert.ok(src.includes('hidden md:flex'), 'desktop sidebar should be hidden on mobile') }) test('app-shell.tsx has a mobile nav drawer', () => { - const src = readComponent('components/gsd/app-shell.tsx') + const src = readComponent('components/sf/app-shell.tsx') assert.ok(src.includes('mobile-nav-drawer'), 'should have mobile-nav-drawer test id') assert.ok(src.includes('mobile-nav-overlay'), 'should have mobile-nav-overlay test id') }) test('app-shell.tsx has a mobile milestone drawer', () => { - const src = readComponent('components/gsd/app-shell.tsx') + const src = readComponent('components/sf/app-shell.tsx') assert.ok(src.includes('mobile-milestone-drawer'), 'should have mobile-milestone-drawer test id') assert.ok(src.includes('mobile-milestone-toggle'), 'should have mobile-milestone-toggle test id') }) test('app-shell.tsx has a mobile bottom bar', () => { - const src = readComponent('components/gsd/app-shell.tsx') + const src = readComponent('components/sf/app-shell.tsx') assert.ok(src.includes('mobile-bottom-bar'), 'should have mobile-bottom-bar test id') }) test('app-shell.tsx header uses responsive padding', () => { - const src = readComponent('components/gsd/app-shell.tsx') + const src = readComponent('components/sf/app-shell.tsx') assert.ok(src.includes('md:px-4'), 'header should have responsive horizontal padding') }) test('app-shell.tsx hides project label on small screens', () => { - const src = readComponent('components/gsd/app-shell.tsx') + const src = readComponent('components/sf/app-shell.tsx') assert.ok(src.includes('hidden sm:inline'), 'project label should be hidden on mobile') }) test('app-shell.tsx hides desktop milestone sidebar on mobile', () => { - const src = readComponent('components/gsd/app-shell.tsx') + const src = readComponent('components/sf/app-shell.tsx') // The milestone sidebar resize handle should be hidden on mobile assert.ok( src.includes('hidden md:flex') || src.includes('hidden md:block'), @@ -78,13 +78,13 @@ test('app-shell.tsx hides desktop milestone sidebar on mobile', () => { // ── sidebar.tsx ────────────────────────────────────────────────────────────── test('sidebar.tsx supports a mobile prop', () => { - const src = readComponent('components/gsd/sidebar.tsx') + const src = readComponent('components/sf/sidebar.tsx') assert.ok(src.includes('mobile?:'), 'Sidebar should accept a mobile prop') assert.ok(src.includes('mobile?: boolean'), 'mobile prop should be boolean') }) test('sidebar.tsx has a MobileNavPanel with touch-friendly targets', () => { - const src = readComponent('components/gsd/sidebar.tsx') + const src = readComponent('components/sf/sidebar.tsx') assert.ok(src.includes('mobile-nav-panel'), 'should have mobile-nav-panel test id') assert.ok(src.includes('min-h-[44px]'), 'nav items should have 44px minimum touch target height') }) @@ -92,25 +92,25 @@ test('sidebar.tsx has a MobileNavPanel with touch-friendly targets', () => { // ── dashboard.tsx ─────────────────────────────────────────────────────────── test('dashboard.tsx has responsive grid for metric cards', () => { - const src = readComponent('components/gsd/dashboard.tsx') + const src = readComponent('components/sf/dashboard.tsx') assert.ok(src.includes('sm:grid-cols-2'), 'metric grid should stack to 2 cols on sm') assert.ok(src.includes('xl:grid-cols-4'), 'metric grid should expand to 4 cols on xl') }) test('dashboard.tsx has responsive padding on content area', () => { - const src = readComponent('components/gsd/dashboard.tsx') + const src = readComponent('components/sf/dashboard.tsx') assert.ok(src.includes('md:p-6'), 'content area should have responsive padding') }) test('dashboard.tsx has responsive header padding', () => { - const src = readComponent('components/gsd/dashboard.tsx') + const src = readComponent('components/sf/dashboard.tsx') assert.ok(src.includes('md:px-6'), 'dashboard header should have responsive horizontal padding') }) // ── status-bar.tsx ────────────────────────────────────────────────────────── test('status-bar.tsx hides branch info on small screens', () => { - const src = readComponent('components/gsd/status-bar.tsx') + const src = readComponent('components/sf/status-bar.tsx') // Branch info should be hidden on mobile assert.ok( src.includes('hidden sm:flex'), @@ -119,12 +119,12 @@ test('status-bar.tsx hides branch info on small screens', () => { }) test('status-bar.tsx has responsive text sizing', () => { - const src = readComponent('components/gsd/status-bar.tsx') + const src = readComponent('components/sf/status-bar.tsx') assert.ok(src.includes('md:text-xs'), 'status bar should have responsive text size') }) test('status-bar.tsx has responsive gap spacing', () => { - const src = readComponent('components/gsd/status-bar.tsx') + const src = readComponent('components/sf/status-bar.tsx') assert.ok(src.includes('md:gap-4'), 'status bar should have responsive gap') }) diff --git a/src/tests/integration/web-session-parity-contract.test.ts b/src/tests/integration/web-session-parity-contract.test.ts index 0c868eae6..68d5b59be 100644 --- a/src/tests/integration/web-session-parity-contract.test.ts +++ b/src/tests/integration/web-session-parity-contract.test.ts @@ -65,7 +65,7 @@ function makeWorkspaceFixture(): { otherSessionsDir: string cleanup: () => void } { - const root = mkdtempSync(join(tmpdir(), "gsd-web-session-parity-")) + const root = mkdtempSync(join(tmpdir(), "sf-web-session-parity-")) const projectCwd = join(root, "project") const sessionsDir = join(root, "sessions") const otherProjectCwd = join(root, "other-project") @@ -564,7 +564,7 @@ test("/api/session/manage renames inactive sessions via authoritative session-fi }) test("/api/git returns a current-project-scoped repo summary and ignores changes outside the current project subtree", async (t) => { - const root = mkdtempSync(join(tmpdir(), "gsd-web-git-summary-")) + const root = mkdtempSync(join(tmpdir(), "sf-web-git-summary-")) const repoRoot = join(root, "repo") const projectCwd = join(repoRoot, "apps", "current-project") const docsDir = join(repoRoot, "docs") @@ -580,7 +580,7 @@ test("/api/git returns a current-project-scoped repo summary and ignores changes git(repoRoot, ["init"]) git(repoRoot, ["config", "user.name", "SF Test"]) - git(repoRoot, ["config", "user.email", "gsd-test@example.com"]) + git(repoRoot, ["config", "user.email", "sf-test@example.com"]) git(repoRoot, ["add", "."]) git(repoRoot, ["commit", "-m", "initial"]) @@ -617,7 +617,7 @@ test("/api/git returns a current-project-scoped repo summary and ignores changes }) test("/api/git exposes an explicit not-a-repo state instead of failing silently", async (t) => { - const projectCwd = mkdtempSync(join(tmpdir(), "gsd-web-not-repo-")) + const projectCwd = mkdtempSync(join(tmpdir(), "sf-web-not-repo-")) t.after(() => { rmSync(projectCwd, { recursive: true, force: true }) }); @@ -637,9 +637,9 @@ test("/api/git exposes an explicit not-a-repo state instead of failing silently" test("browser session, settings, and git surfaces keep inspectable browse/manage/state markers on the shared surface", () => { const rpcTypesSource = readFileSync(resolve(import.meta.dirname, "../../../packages/pi-coding-agent/src/modes/rpc/rpc-types.ts"), "utf8") const contractSource = readFileSync(resolve(import.meta.dirname, "../../../web/lib/command-surface-contract.ts"), "utf8") - const storeSource = readFileSync(resolve(import.meta.dirname, "../../../web/lib/gsd-workspace-store.tsx"), "utf8") - const surfaceSource = readFileSync(resolve(import.meta.dirname, "../../../web/components/gsd/command-surface.tsx"), "utf8") - const sidebarSource = readFileSync(resolve(import.meta.dirname, "../../../web/components/gsd/sidebar.tsx"), "utf8") + const storeSource = readFileSync(resolve(import.meta.dirname, "../../../web/lib/sf-workspace-store.tsx"), "utf8") + const surfaceSource = readFileSync(resolve(import.meta.dirname, "../../../web/components/sf/command-surface.tsx"), "utf8") + const sidebarSource = readFileSync(resolve(import.meta.dirname, "../../../web/components/sf/sidebar.tsx"), "utf8") const gitRouteSource = readFileSync(resolve(import.meta.dirname, "../../../web/app/api/git/route.ts"), "utf8") assert.match(rpcTypesSource, /autoRetryEnabled: boolean/, "rpc-types.ts must expose retry-enabled state in get_state") @@ -658,15 +658,15 @@ test("browser session, settings, and git surfaces keep inspectable browse/manage assert.match(contractSource, /set_auto_retry/, "command-surface-contract.ts must model auto-retry mutations") assert.match(contractSource, /abort_retry/, "command-surface-contract.ts must model retry-cancellation mutations") - assert.match(storeSource, /\/api\/git/, "gsd-workspace-store.tsx must load the current-project git summary route") - assert.match(storeSource, /loadGitSummary/, "gsd-workspace-store.tsx must expose a shared git-summary browser action") - assert.match(storeSource, /\/api\/session\/browser/, "gsd-workspace-store.tsx must load the dedicated current-project session browser route") - assert.match(storeSource, /\/api\/session\/manage/, "gsd-workspace-store.tsx must call the session manage route for browser renames") - assert.match(storeSource, /setSteeringModeFromSurface/, "gsd-workspace-store.tsx must expose a shared steering-mode browser action") - assert.match(storeSource, /setFollowUpModeFromSurface/, "gsd-workspace-store.tsx must expose a shared follow-up-mode browser action") - assert.match(storeSource, /setAutoCompactionFromSurface/, "gsd-workspace-store.tsx must expose a shared auto-compaction browser action") - assert.match(storeSource, /setAutoRetryFromSurface/, "gsd-workspace-store.tsx must expose a shared auto-retry browser action") - assert.match(storeSource, /abortRetryFromSurface/, "gsd-workspace-store.tsx must expose a shared retry-cancellation browser action") + assert.match(storeSource, /\/api\/git/, "sf-workspace-store.tsx must load the current-project git summary route") + assert.match(storeSource, /loadGitSummary/, "sf-workspace-store.tsx must expose a shared git-summary browser action") + assert.match(storeSource, /\/api\/session\/browser/, "sf-workspace-store.tsx must load the dedicated current-project session browser route") + assert.match(storeSource, /\/api\/session\/manage/, "sf-workspace-store.tsx must call the session manage route for browser renames") + assert.match(storeSource, /setSteeringModeFromSurface/, "sf-workspace-store.tsx must expose a shared steering-mode browser action") + assert.match(storeSource, /setFollowUpModeFromSurface/, "sf-workspace-store.tsx must expose a shared follow-up-mode browser action") + assert.match(storeSource, /setAutoCompactionFromSurface/, "sf-workspace-store.tsx must expose a shared auto-compaction browser action") + assert.match(storeSource, /setAutoRetryFromSurface/, "sf-workspace-store.tsx must expose a shared auto-retry browser action") + assert.match(storeSource, /abortRetryFromSurface/, "sf-workspace-store.tsx must expose a shared retry-cancellation browser action") assert.match(surfaceSource, /data-testid="command-surface-git-summary"/, "command-surface.tsx must expose the git summary panel") assert.match(surfaceSource, /data-testid="command-surface-git-state"/, "command-surface.tsx must expose inspectable git-summary state text") diff --git a/src/tests/integration/web-state-surfaces-contract.test.ts b/src/tests/integration/web-state-surfaces-contract.test.ts index a102d66e5..be7225ca7 100644 --- a/src/tests/integration/web-state-surfaces-contract.test.ts +++ b/src/tests/integration/web-state-surfaces-contract.test.ts @@ -15,8 +15,8 @@ const workspaceStatus = await import("../../../web/lib/workspace-status.ts"); // ─── Helpers ────────────────────────────────────────────────────────── function makeGsdFixture(): { root: string; gsdDir: string; cleanup: () => void } { - const root = mkdtempSync(join(tmpdir(), "gsd-state-surfaces-")); - const gsdDir = join(root, ".gsd"); + const root = mkdtempSync(join(tmpdir(), "sf-state-surfaces-")); + const gsdDir = join(root, ".sf"); mkdirSync(gsdDir, { recursive: true }); return { root, @@ -191,7 +191,7 @@ test("getTaskStatus returns correct statuses", () => { }); // ─── Group 3: Files API — tree listing ─────────────────────────────── -test("files API returns tree listing of .gsd/ directory", async (t) => { +test("files API returns tree listing of .sf/ directory", async (t) => { const { root, gsdDir, cleanup } = makeGsdFixture(); const origEnv = process.env.SF_WEB_PROJECT_CWD; @@ -342,8 +342,8 @@ test("files API returns 404 for missing files", async (t) => { assert.ok(data.error); }); -test("files API returns empty tree when .gsd/ does not exist", async (t) => { - const root = mkdtempSync(join(tmpdir(), "gsd-state-surfaces-empty-")); +test("files API returns empty tree when .sf/ does not exist", async (t) => { + const root = mkdtempSync(join(tmpdir(), "sf-state-surfaces-empty-")); const origEnv = process.env.SF_WEB_PROJECT_CWD; t.after(() => { @@ -364,11 +364,11 @@ test("files API returns empty tree when .gsd/ does not exist", async (t) => { // ─── Group 6: Mock-free invariant — no static mock data ────────────── const VIEW_FILES = [ - "web/components/gsd/dashboard.tsx", - "web/components/gsd/roadmap.tsx", - "web/components/gsd/activity-view.tsx", - "web/components/gsd/files-view.tsx", - "web/components/gsd/dual-terminal.tsx", + "web/components/sf/dashboard.tsx", + "web/components/sf/roadmap.tsx", + "web/components/sf/activity-view.tsx", + "web/components/sf/files-view.tsx", + "web/components/sf/dual-terminal.tsx", ]; // Patterns that indicate hardcoded mock data arrays @@ -404,23 +404,23 @@ test("view components contain no static mock data arrays", () => { test("view components read from real data sources (store or API)", () => { // Views that derive state from the workspace store const STORE_VIEWS = [ - "web/components/gsd/dashboard.tsx", - "web/components/gsd/roadmap.tsx", - "web/components/gsd/activity-view.tsx", - "web/components/gsd/terminal.tsx", + "web/components/sf/dashboard.tsx", + "web/components/sf/roadmap.tsx", + "web/components/sf/activity-view.tsx", + "web/components/sf/terminal.tsx", ]; // FilesView fetches from /api/files (real endpoint), not the workspace store — that's correct const API_VIEWS = [ - { path: "web/components/gsd/files-view.tsx", apiPattern: "/api/files" }, + { path: "web/components/sf/files-view.tsx", apiPattern: "/api/files" }, ]; for (const filePath of STORE_VIEWS) { const fullPath = resolve(import.meta.dirname, "../../..", filePath); const source = readFileSync(fullPath, "utf-8"); assert.ok( - source.includes("gsd-workspace-store"), - `${filePath} does not import from gsd-workspace-store — store-backed views must read real store state`, + source.includes("sf-workspace-store"), + `${filePath} does not import from sf-workspace-store — store-backed views must read real store state`, ); } @@ -438,7 +438,7 @@ test("view components read from real data sources (store or API)", () => { // from the dashboard. Live signals are visible in the terminal/power mode instead. test("status bar consumes statusTexts from store", () => { - const statusBarPath = resolve(import.meta.dirname, "../../../web/components/gsd/status-bar.tsx"); + const statusBarPath = resolve(import.meta.dirname, "../../../web/components/sf/status-bar.tsx"); const source = readFileSync(statusBarPath, "utf-8"); assert.ok( @@ -452,10 +452,10 @@ test("status bar consumes statusTexts from store", () => { }); test("browser shell renders title overrides, widgets, and editor prefills from store-backed state", () => { - const storePath = resolve(import.meta.dirname, "../../../web/lib/gsd-workspace-store.tsx"); - const appShellPath = resolve(import.meta.dirname, "../../../web/components/gsd/app-shell.tsx"); - const statusBarPath = resolve(import.meta.dirname, "../../../web/components/gsd/status-bar.tsx"); - const terminalPath = resolve(import.meta.dirname, "../../../web/components/gsd/terminal.tsx"); + const storePath = resolve(import.meta.dirname, "../../../web/lib/sf-workspace-store.tsx"); + const appShellPath = resolve(import.meta.dirname, "../../../web/components/sf/app-shell.tsx"); + const statusBarPath = resolve(import.meta.dirname, "../../../web/components/sf/status-bar.tsx"); + const terminalPath = resolve(import.meta.dirname, "../../../web/components/sf/terminal.tsx"); const storeSource = readFileSync(storePath, "utf-8"); const appShellSource = readFileSync(appShellPath, "utf-8"); @@ -472,13 +472,13 @@ test("browser shell renders title overrides, widgets, and editor prefills from s assert.match(terminalSource, /MAX_VISIBLE_WIDGET_LINES = 6/, "terminal.tsx must bound widget rendering so extension widgets cannot grow without limit"); assert.match(terminalSource, /widget\.placement \?\? "aboveEditor"/, "terminal.tsx must preserve the existing default above-editor placement semantics"); - assert.match(storeSource, /consumeEditorTextBuffer = \(\): string \| null =>/, "gsd-workspace-store.tsx must expose a consume-once editor prefill action"); + assert.match(storeSource, /consumeEditorTextBuffer = \(\): string \| null =>/, "sf-workspace-store.tsx must expose a consume-once editor prefill action"); assert.match(terminalSource, /consumeEditorTextBuffer/, "terminal.tsx must consume editor prefill state instead of replaying it forever"); assert.match(terminalSource, /setInput\(buffer\)/, "terminal.tsx must visibly prefill the command input from editorTextBuffer"); }); test("terminal consumes activeToolExecution from store", () => { - const terminalPath = resolve(import.meta.dirname, "../../../web/components/gsd/terminal.tsx"); + const terminalPath = resolve(import.meta.dirname, "../../../web/components/sf/terminal.tsx"); const source = readFileSync(terminalPath, "utf-8"); assert.ok( @@ -488,7 +488,7 @@ test("terminal consumes activeToolExecution from store", () => { }); test("chat tool blocks normalize Claude Code tool names before choosing built-in render treatment", () => { - const chatPath = resolve(import.meta.dirname, "../../../web/components/gsd/chat-mode.tsx"); + const chatPath = resolve(import.meta.dirname, "../../../web/components/sf/chat-mode.tsx"); const source = readFileSync(chatPath, "utf-8"); assert.match( @@ -515,11 +515,11 @@ test("chat tool blocks normalize Claude Code tool names before choosing built-in test("live browser panels consume live selectors and expose inspectable freshness markers", () => { const contractPath = resolve(import.meta.dirname, "../../../web/lib/command-surface-contract.ts") - const storePath = resolve(import.meta.dirname, "../../../web/lib/gsd-workspace-store.tsx") - const dashboardPath = resolve(import.meta.dirname, "../../../web/components/gsd/dashboard.tsx") - const sidebarPath = resolve(import.meta.dirname, "../../../web/components/gsd/sidebar.tsx") - const roadmapPath = resolve(import.meta.dirname, "../../../web/components/gsd/roadmap.tsx") - const statusBarPath = resolve(import.meta.dirname, "../../../web/components/gsd/status-bar.tsx") + const storePath = resolve(import.meta.dirname, "../../../web/lib/sf-workspace-store.tsx") + const dashboardPath = resolve(import.meta.dirname, "../../../web/components/sf/dashboard.tsx") + const sidebarPath = resolve(import.meta.dirname, "../../../web/components/sf/sidebar.tsx") + const roadmapPath = resolve(import.meta.dirname, "../../../web/components/sf/roadmap.tsx") + const statusBarPath = resolve(import.meta.dirname, "../../../web/components/sf/status-bar.tsx") const contractSource = readFileSync(contractPath, "utf-8") const storeSource = readFileSync(storePath, "utf-8") @@ -529,13 +529,13 @@ test("live browser panels consume live selectors and expose inspectable freshnes const statusBarSource = readFileSync(statusBarPath, "utf-8") assert.match(contractSource, /export interface WorkspaceRecoverySummary/, "command-surface-contract.ts must expose a shared recovery summary shape for live panels") - assert.match(storeSource, /live_state_invalidation/, "gsd-workspace-store.tsx must handle typed live_state_invalidation events") - assert.match(storeSource, /\/api\/live-state/, "gsd-workspace-store.tsx must use the narrow live-state route for targeted refreshes") - assert.match(storeSource, /softBootRefreshCount/, "gsd-workspace-store.tsx must expose a soft boot refresh counter for observability") - assert.match(storeSource, /targetedRefreshCount/, "gsd-workspace-store.tsx must expose a targeted refresh counter for observability") - assert.match(storeSource, /getLiveWorkspaceIndex/, "gsd-workspace-store.tsx must expose a live workspace selector") - assert.match(storeSource, /getLiveAutoDashboard/, "gsd-workspace-store.tsx must expose a live auto selector") - assert.match(storeSource, /getLiveResumableSessions/, "gsd-workspace-store.tsx must expose a live resumable-sessions selector") + assert.match(storeSource, /live_state_invalidation/, "sf-workspace-store.tsx must handle typed live_state_invalidation events") + assert.match(storeSource, /\/api\/live-state/, "sf-workspace-store.tsx must use the narrow live-state route for targeted refreshes") + assert.match(storeSource, /softBootRefreshCount/, "sf-workspace-store.tsx must expose a soft boot refresh counter for observability") + assert.match(storeSource, /targetedRefreshCount/, "sf-workspace-store.tsx must expose a targeted refresh counter for observability") + assert.match(storeSource, /getLiveWorkspaceIndex/, "sf-workspace-store.tsx must expose a live workspace selector") + assert.match(storeSource, /getLiveAutoDashboard/, "sf-workspace-store.tsx must expose a live auto selector") + assert.match(storeSource, /getLiveResumableSessions/, "sf-workspace-store.tsx must expose a live resumable-sessions selector") assert.match(dashboardSource, /getLiveWorkspaceIndex/, "dashboard.tsx must derive roadmap state from the live workspace selector") assert.match(dashboardSource, /getLiveAutoDashboard/, "dashboard.tsx must derive auto metrics from the live auto selector") @@ -554,9 +554,9 @@ test("live browser panels consume live selectors and expose inspectable freshnes }) test("workflow action surfaces route new-milestone CTAs through the shared command path", () => { - const dashboardPath = resolve(import.meta.dirname, "../../../web/components/gsd/dashboard.tsx") - const sidebarPath = resolve(import.meta.dirname, "../../../web/components/gsd/sidebar.tsx") - const chatPath = resolve(import.meta.dirname, "../../../web/components/gsd/chat-mode.tsx") + const dashboardPath = resolve(import.meta.dirname, "../../../web/components/sf/dashboard.tsx") + const sidebarPath = resolve(import.meta.dirname, "../../../web/components/sf/sidebar.tsx") + const chatPath = resolve(import.meta.dirname, "../../../web/components/sf/chat-mode.tsx") const dashboardSource = readFileSync(dashboardPath, "utf-8") const sidebarSource = readFileSync(sidebarPath, "utf-8") @@ -571,14 +571,14 @@ test("workflow action surfaces route new-milestone CTAs through the shared comma assert.doesNotMatch(dashboardSource, /NewMilestoneDialog/, "dashboard.tsx must not import or render the deprecated new-milestone dialog") assert.doesNotMatch(sidebarSource, /NewMilestoneDialog/, "sidebar.tsx must not import or render the deprecated new-milestone dialog") assert.doesNotMatch(chatSource, /NewMilestoneDialog/, "chat-mode.tsx must not import or render the deprecated new-milestone dialog") - assert.doesNotMatch(chatSource, /buildPromptCommand\("\/gsd auto", bridge\)/, "chat-mode.tsx must not hardcode a special /gsd auto path for new-milestone CTA dispatch") + assert.doesNotMatch(chatSource, /buildPromptCommand\("\/sf auto", bridge\)/, "chat-mode.tsx must not hardcode a special /sf auto path for new-milestone CTA dispatch") }) test("sidebar Git affordance opens a real git-summary surface with visible repo/not-repo/error states", () => { const contractPath = resolve(import.meta.dirname, "../../../web/lib/command-surface-contract.ts"); - const storePath = resolve(import.meta.dirname, "../../../web/lib/gsd-workspace-store.tsx"); - const surfacePath = resolve(import.meta.dirname, "../../../web/components/gsd/command-surface.tsx"); - const sidebarPath = resolve(import.meta.dirname, "../../../web/components/gsd/sidebar.tsx"); + const storePath = resolve(import.meta.dirname, "../../../web/lib/sf-workspace-store.tsx"); + const surfacePath = resolve(import.meta.dirname, "../../../web/components/sf/command-surface.tsx"); + const sidebarPath = resolve(import.meta.dirname, "../../../web/components/sf/sidebar.tsx"); const contractSource = readFileSync(contractPath, "utf-8"); const storeSource = readFileSync(storePath, "utf-8"); @@ -588,8 +588,8 @@ test("sidebar Git affordance opens a real git-summary surface with visible repo/ assert.match(contractSource, /gitSummary:/, "command-surface-contract.ts must retain git-summary state on the shared surface"); assert.match(contractSource, /load_git_summary/, "command-surface-contract.ts must model git-summary loading as an explicit action"); - assert.match(storeSource, /loadGitSummary/, "gsd-workspace-store.tsx must expose loadGitSummary so the Git surface is not inert"); - assert.match(storeSource, /\/api\/git/, "gsd-workspace-store.tsx must fetch the current-project git route for the Git surface"); + assert.match(storeSource, /loadGitSummary/, "sf-workspace-store.tsx must expose loadGitSummary so the Git surface is not inert"); + assert.match(storeSource, /\/api\/git/, "sf-workspace-store.tsx must fetch the current-project git route for the Git surface"); assert.match(surfaceSource, /data-testid="command-surface-git-summary"/, "command-surface.tsx must render a git-summary panel"); assert.match(surfaceSource, /data-testid="command-surface-git-not-repo"/, "command-surface.tsx must keep not-a-repo state browser-visible"); @@ -600,10 +600,10 @@ test("sidebar Git affordance opens a real git-summary surface with visible repo/ test("recovery diagnostics surface stays on a dedicated route with explicit stale and action state", () => { const contractPath = resolve(import.meta.dirname, "../../../web/lib/command-surface-contract.ts"); - const storePath = resolve(import.meta.dirname, "../../../web/lib/gsd-workspace-store.tsx"); - const surfacePath = resolve(import.meta.dirname, "../../../web/components/gsd/command-surface.tsx"); - const dashboardPath = resolve(import.meta.dirname, "../../../web/components/gsd/dashboard.tsx"); - const sidebarPath = resolve(import.meta.dirname, "../../../web/components/gsd/sidebar.tsx"); + const storePath = resolve(import.meta.dirname, "../../../web/lib/sf-workspace-store.tsx"); + const surfacePath = resolve(import.meta.dirname, "../../../web/components/sf/command-surface.tsx"); + const dashboardPath = resolve(import.meta.dirname, "../../../web/components/sf/dashboard.tsx"); + const sidebarPath = resolve(import.meta.dirname, "../../../web/components/sf/sidebar.tsx"); const contractSource = readFileSync(contractPath, "utf-8"); const storeSource = readFileSync(storePath, "utf-8"); @@ -615,9 +615,9 @@ test("recovery diagnostics surface stays on a dedicated route with explicit stal assert.match(contractSource, /export interface CommandSurfaceRecoveryState/, "command-surface-contract.ts must expose explicit recovery load state"); assert.match(contractSource, /load_recovery_diagnostics/, "command-surface-contract.ts must model recovery loading as an explicit action"); - assert.match(storeSource, /loadRecoveryDiagnostics = async/, "gsd-workspace-store.tsx must expose a recovery diagnostics loader"); - assert.match(storeSource, /\/api\/recovery/, "gsd-workspace-store.tsx must call the dedicated recovery route"); - assert.match(storeSource, /markRecoveryStateInvalidated/, "gsd-workspace-store.tsx must keep recovery diagnostics stale state inspectable after invalidation"); + assert.match(storeSource, /loadRecoveryDiagnostics = async/, "sf-workspace-store.tsx must expose a recovery diagnostics loader"); + assert.match(storeSource, /\/api\/recovery/, "sf-workspace-store.tsx must call the dedicated recovery route"); + assert.match(storeSource, /markRecoveryStateInvalidated/, "sf-workspace-store.tsx must keep recovery diagnostics stale state inspectable after invalidation"); assert.match(surfaceSource, /data-testid="command-surface-recovery"/, "command-surface.tsx must render a recovery diagnostics panel"); assert.match(surfaceSource, /data-testid="command-surface-recovery-state"/, "command-surface.tsx must expose a recovery load-state marker"); diff --git a/src/tests/integration/web-subprocess-module-resolution.test.ts b/src/tests/integration/web-subprocess-module-resolution.test.ts index eb0b0809e..175639eb7 100644 --- a/src/tests/integration/web-subprocess-module-resolution.test.ts +++ b/src/tests/integration/web-subprocess-module-resolution.test.ts @@ -12,7 +12,7 @@ import { // --------------------------------------------------------------------------- test("isUnderNodeModules returns false for paths outside node_modules", () => { - assert.equal(isUnderNodeModules("/home/user/projects/gsd"), false) + assert.equal(isUnderNodeModules("/home/user/projects/sf"), false) }) test("isUnderNodeModules returns true for Unix paths under node_modules/", () => { @@ -31,7 +31,7 @@ test("isUnderNodeModules returns true for Windows paths under node_modules/", () test("isUnderNodeModules returns false for substring match without trailing slash", () => { assert.equal( - isUnderNodeModules("/home/user/my_node_modules_backup/gsd"), + isUnderNodeModules("/home/user/my_node_modules_backup/sf"), false, ) }) @@ -41,7 +41,7 @@ test("isUnderNodeModules returns false for substring match without trailing slas // --------------------------------------------------------------------------- test("resolveSubprocessModule returns source .ts path when NOT under node_modules", () => { - const packageRoot = "/home/user/projects/gsd" + const packageRoot = "/home/user/projects/sf" const result = resolveSubprocessModule( packageRoot, "resources/extensions/sf/workspace-index.ts", diff --git a/src/tests/integration/web-switch-project.test.ts b/src/tests/integration/web-switch-project.test.ts index df9bc6b8b..143d7d268 100644 --- a/src/tests/integration/web-switch-project.test.ts +++ b/src/tests/integration/web-switch-project.test.ts @@ -91,7 +91,7 @@ function persistSwitchRoot( // Fixtures // --------------------------------------------------------------------------- -const tempRoot = mkdtempSync(join(tmpdir(), "gsd-switch-root-")); +const tempRoot = mkdtempSync(join(tmpdir(), "sf-switch-root-")); const rootA = join(tempRoot, "root-a"); mkdirSync(rootA); diff --git a/src/tests/integration/web-terminal-preservation.test.ts b/src/tests/integration/web-terminal-preservation.test.ts index fb0cd2d1a..d53dbe504 100644 --- a/src/tests/integration/web-terminal-preservation.test.ts +++ b/src/tests/integration/web-terminal-preservation.test.ts @@ -227,7 +227,7 @@ function deriveSessionId( sessionPrefix?: string, command?: string, ): string { - const base = sessionPrefix ?? (command ? "gsd-default" : "default"); + const base = sessionPrefix ?? (command ? "sf-default" : "default"); if (!projectCwd) return base; // Stable hash-like key from the project path — keeps IDs short but unique return `${base}:${projectCwd}`; @@ -252,10 +252,10 @@ test("session ID derivation: explicit sessionPrefix is preserved with project sc }); test("session ID derivation: command sessions are also project-scoped", () => { - const idA = deriveSessionId("/projects/alpha", undefined, "gsd"); - const idB = deriveSessionId("/projects/beta", undefined, "gsd"); + const idA = deriveSessionId("/projects/alpha", undefined, "sf"); + const idB = deriveSessionId("/projects/beta", undefined, "sf"); assert.notEqual(idA, idB); - assert.ok(idA.includes("gsd-default"), "Uses gsd-default base for command sessions"); + assert.ok(idA.includes("sf-default"), "Uses sf-default base for command sessions"); }); test("session ID derivation: no projectCwd falls back to plain base ID", () => { diff --git a/src/tests/integration/web-workflow-action-execution.test.ts b/src/tests/integration/web-workflow-action-execution.test.ts index 024677baa..02a7372d8 100644 --- a/src/tests/integration/web-workflow-action-execution.test.ts +++ b/src/tests/integration/web-workflow-action-execution.test.ts @@ -12,12 +12,12 @@ test("derivePendingWorkflowCommandLabel prefers the latest input line while a co commandInFlight: "prompt", terminalLines: [ { id: "1", timestamp: "12:00", type: "system", content: "Bridge ready" }, - { id: "2", timestamp: "12:01", type: "input", content: "/gsd" }, + { id: "2", timestamp: "12:01", type: "input", content: "/sf" }, { id: "3", timestamp: "12:02", type: "system", content: "Working…" }, ], }) - assert.equal(label, "/gsd") + assert.equal(label, "/sf") }) test("derivePendingWorkflowCommandLabel falls back to the command type when no input line exists", () => { @@ -34,7 +34,7 @@ test("navigateToGSDView dispatches the shared browser navigation event", (t) => const fakeWindow = new EventTarget() const seen: string[] = [] - fakeWindow.addEventListener("gsd:navigate-view", (event: Event) => { + fakeWindow.addEventListener("sf:navigate-view", (event: Event) => { seen.push((event as CustomEvent<{ view: string }>).detail.view) }) @@ -54,7 +54,7 @@ test("executeWorkflowActionInPowerMode calls dispatch and navigates to the appro const seenViews: string[] = [] let dispatchCalled = false - fakeWindow.addEventListener("gsd:navigate-view", (event: Event) => { + fakeWindow.addEventListener("sf:navigate-view", (event: Event) => { seenViews.push((event as CustomEvent<{ view: string }>).detail.view) }) diff --git a/src/tests/integration/web-workflow-controls-contract.test.ts b/src/tests/integration/web-workflow-controls-contract.test.ts index 897245290..adfb796ff 100644 --- a/src/tests/integration/web-workflow-controls-contract.test.ts +++ b/src/tests/integration/web-workflow-controls-contract.test.ts @@ -19,77 +19,77 @@ function baseInput(overrides: Partial<Parameters<typeof deriveWorkflowAction>[0] } // ─── Group 1: Phase → action mapping ────────────────────────────────── -test("planning + no auto → primary is /gsd with label Plan", () => { +test("planning + no auto → primary is /sf with label Plan", () => { const result = deriveWorkflowAction(baseInput({ phase: "planning" })); assert.ok(result.primary); - assert.equal(result.primary.command, "/gsd"); + assert.equal(result.primary.command, "/sf"); assert.equal(result.primary.label, "Plan"); assert.equal(result.primary.variant, "default"); assert.equal(result.disabled, false); }); -test("executing + no auto → primary is /gsd auto with label Start Auto", () => { +test("executing + no auto → primary is /sf auto with label Start Auto", () => { const result = deriveWorkflowAction(baseInput({ phase: "executing" })); assert.ok(result.primary); - assert.equal(result.primary.command, "/gsd auto"); + assert.equal(result.primary.command, "/sf auto"); assert.equal(result.primary.label, "Start Auto"); }); -test("summarizing + no auto → primary is /gsd auto with label Start Auto", () => { +test("summarizing + no auto → primary is /sf auto with label Start Auto", () => { const result = deriveWorkflowAction(baseInput({ phase: "summarizing" })); assert.ok(result.primary); - assert.equal(result.primary.command, "/gsd auto"); + assert.equal(result.primary.command, "/sf auto"); assert.equal(result.primary.label, "Start Auto"); }); -test("auto active (not paused) → primary is /gsd stop with destructive variant", () => { +test("auto active (not paused) → primary is /sf stop with destructive variant", () => { const result = deriveWorkflowAction(baseInput({ autoActive: true, autoPaused: false })); assert.ok(result.primary); - assert.equal(result.primary.command, "/gsd stop"); + assert.equal(result.primary.command, "/sf stop"); assert.equal(result.primary.label, "Stop Auto"); assert.equal(result.primary.variant, "destructive"); }); -test("auto paused → primary is /gsd auto with label Resume Auto", () => { +test("auto paused → primary is /sf auto with label Resume Auto", () => { const result = deriveWorkflowAction(baseInput({ autoPaused: true })); assert.ok(result.primary); - assert.equal(result.primary.command, "/gsd auto"); + assert.equal(result.primary.command, "/sf auto"); assert.equal(result.primary.label, "Resume Auto"); assert.equal(result.primary.variant, "default"); }); -test("pre-planning + no milestones → primary is /gsd with label Initialize Project", () => { +test("pre-planning + no milestones → primary is /sf with label Initialize Project", () => { const result = deriveWorkflowAction(baseInput({ phase: "pre-planning", hasMilestones: false })); assert.ok(result.primary); - assert.equal(result.primary.command, "/gsd"); + assert.equal(result.primary.command, "/sf"); assert.equal(result.primary.label, "Initialize Project"); }); -test("pre-planning + has milestones → primary is /gsd with label Continue", () => { +test("pre-planning + has milestones → primary is /sf with label Continue", () => { const result = deriveWorkflowAction(baseInput({ phase: "pre-planning", hasMilestones: true })); assert.ok(result.primary); - assert.equal(result.primary.command, "/gsd"); + assert.equal(result.primary.command, "/sf"); assert.equal(result.primary.label, "Continue"); }); -test("other phases (e.g. researching) without auto → primary is Continue /gsd", () => { +test("other phases (e.g. researching) without auto → primary is Continue /sf", () => { const result = deriveWorkflowAction(baseInput({ phase: "researching" })); assert.ok(result.primary); - assert.equal(result.primary.command, "/gsd"); + assert.equal(result.primary.command, "/sf"); assert.equal(result.primary.label, "Continue"); }); -test("verifying phase without auto → primary is Continue /gsd", () => { +test("verifying phase without auto → primary is Continue /sf", () => { const result = deriveWorkflowAction(baseInput({ phase: "verifying" })); assert.ok(result.primary); - assert.equal(result.primary.command, "/gsd"); + assert.equal(result.primary.command, "/sf"); assert.equal(result.primary.label, "Continue"); }); -test("complete phase without auto → primary is New Milestone /gsd with no step secondary", () => { +test("complete phase without auto → primary is New Milestone /sf with no step secondary", () => { const result = deriveWorkflowAction(baseInput({ phase: "complete" })); assert.ok(result.primary); - assert.equal(result.primary.command, "/gsd"); + assert.equal(result.primary.command, "/sf"); assert.equal(result.primary.label, "New Milestone"); assert.equal(result.isNewMilestone, true); assert.deepEqual(result.secondaries, []); @@ -99,7 +99,7 @@ test("complete phase without auto → primary is New Milestone /gsd with no step test("secondaries include Step when auto is not active", () => { const result = deriveWorkflowAction(baseInput({ phase: "executing" })); assert.ok(result.secondaries.length > 0); - const step = result.secondaries.find((s) => s.command === "/gsd next"); + const step = result.secondaries.find((s) => s.command === "/sf next"); assert.ok(step, "Expected a Step secondary action"); assert.equal(step.label, "Step"); }); diff --git a/src/tests/mcp-client-oauth.test.ts b/src/tests/mcp-client-oauth.test.ts index 568e28eab..f222a7dcb 100644 --- a/src/tests/mcp-client-oauth.test.ts +++ b/src/tests/mcp-client-oauth.test.ts @@ -38,7 +38,7 @@ test("HTTP transport with authProvider creates transport that can authenticate", get clientMetadata() { return { redirect_uris: ["http://localhost:3000/callback"], - client_name: "gsd-test", + client_name: "sf-test", }; }, clientInformation: () => undefined, diff --git a/src/tests/node-modules-symlink.test.ts b/src/tests/node-modules-symlink.test.ts index f5c8ead58..c09ae7123 100644 --- a/src/tests/node-modules-symlink.test.ts +++ b/src/tests/node-modules-symlink.test.ts @@ -13,7 +13,7 @@ import { fileURLToPath } from "node:url"; test("initResources creates node_modules symlink in agent dir", async (t) => { const { initResources } = await import("../resource-loader.ts"); - const tmp = mkdtempSync(join(tmpdir(), "gsd-symlink-")); + const tmp = mkdtempSync(join(tmpdir(), "sf-symlink-")); const fakeAgentDir = join(tmp, "agent"); t.after(() => rmSync(tmp, { recursive: true, force: true })); @@ -33,7 +33,7 @@ test("initResources creates node_modules symlink in agent dir", async (t) => { test("initResources replaces a real directory blocking node_modules with a symlink", async (t) => { const { initResources } = await import("../resource-loader.ts"); - const tmp = mkdtempSync(join(tmpdir(), "gsd-symlink-realdir-")); + const tmp = mkdtempSync(join(tmpdir(), "sf-symlink-realdir-")); const fakeAgentDir = join(tmp, "agent"); t.after(() => rmSync(tmp, { recursive: true, force: true })); @@ -58,7 +58,7 @@ test("initResources replaces a real directory blocking node_modules with a symli test("initResources replaces a stale symlink with a correct one", async (t) => { const { initResources } = await import("../resource-loader.ts"); - const tmp = mkdtempSync(join(tmpdir(), "gsd-symlink-stale-")); + const tmp = mkdtempSync(join(tmpdir(), "sf-symlink-stale-")); const fakeAgentDir = join(tmp, "agent"); t.after(() => rmSync(tmp, { recursive: true, force: true })); @@ -69,7 +69,7 @@ test("initResources replaces a stale symlink with a correct one", async (t) => { // Remove and replace with a stale symlink pointing to a non-existent path unlinkSync(nodeModulesPath); - symlinkSync("/tmp/nonexistent-gsd-node-modules-" + Date.now(), nodeModulesPath); + symlinkSync("/tmp/nonexistent-sf-node-modules-" + Date.now(), nodeModulesPath); const staleTarget = readlinkSync(nodeModulesPath); assert.notEqual(staleTarget, correctTarget, "stale symlink should point elsewhere"); @@ -83,7 +83,7 @@ test("initResources replaces a stale symlink with a correct one", async (t) => { test("initResources replaces symlink whose target was deleted", async (t) => { const { initResources } = await import("../resource-loader.ts"); - const tmp = mkdtempSync(join(tmpdir(), "gsd-symlink-missing-")); + const tmp = mkdtempSync(join(tmpdir(), "sf-symlink-missing-")); const fakeAgentDir = join(tmp, "agent"); t.after(() => rmSync(tmp, { recursive: true, force: true })); @@ -120,7 +120,7 @@ test("pnpm layout: merged node_modules contains entries from both hoisted and in // node_modules/ // @sf-run/ ← workspace scope (NOT hoisted) // @singularity-forge/ ← workspace scope (NOT hoisted) - const tmp = mkdtempSync(join(tmpdir(), "gsd-pnpm-merge-")); + const tmp = mkdtempSync(join(tmpdir(), "sf-pnpm-merge-")); t.after(() => rmSync(tmp, { recursive: true, force: true })); const hoisted = join(tmp, "node_modules"); @@ -135,9 +135,9 @@ test("pnpm layout: merged node_modules contains entries from both hoisted and in mkdirSync(pkgRoot, { recursive: true }); // Create internal entries (workspace packages) - mkdirSync(join(internal, "@gsd", "pi-ai"), { recursive: true }); - mkdirSync(join(internal, "@gsd", "pi-coding-agent"), { recursive: true }); - mkdirSync(join(internal, "@gsd-build", "core"), { recursive: true }); + mkdirSync(join(internal, "@sf", "pi-ai"), { recursive: true }); + mkdirSync(join(internal, "@sf", "pi-coding-agent"), { recursive: true }); + mkdirSync(join(internal, "@sf-build", "core"), { recursive: true }); // Create merged directory manually (simulating what reconcileMergedNodeModules does) mkdirSync(agentNodeModules, { recursive: true }); @@ -162,23 +162,23 @@ test("pnpm layout: merged node_modules contains entries from both hoisted and in assert.ok(existsSync(join(agentNodeModules, "@anthropic-ai")), "@anthropic-ai should resolve"); // Verify: workspace packages resolve through internal symlinks - assert.ok(existsSync(join(agentNodeModules, "@gsd")), "@gsd should resolve"); - assert.ok(existsSync(join(agentNodeModules, "@gsd", "pi-ai")), "@sf-run/pi-ai should resolve"); - assert.ok(existsSync(join(agentNodeModules, "@gsd-build")), "@gsd-build should resolve"); + assert.ok(existsSync(join(agentNodeModules, "@sf")), "@sf should resolve"); + assert.ok(existsSync(join(agentNodeModules, "@sf", "pi-ai")), "@sf-run/pi-ai should resolve"); + assert.ok(existsSync(join(agentNodeModules, "@sf-build")), "@sf-build should resolve"); // Verify: sf-run itself is NOT symlinked (it's the package root, not a dep) assert.ok(!existsSync(join(agentNodeModules, "sf-run")), "sf-run should not be in merged dir"); - // Verify: @gsd points to internal, not hoisted (internal takes precedence) - const gsdTarget = readlinkSync(join(agentNodeModules, "@gsd")); - assert.equal(gsdTarget, join(internal, "@gsd"), "@gsd should point to internal node_modules"); + // Verify: @sf points to internal, not hoisted (internal takes precedence) + const gsdTarget = readlinkSync(join(agentNodeModules, "@sf")); + assert.equal(gsdTarget, join(internal, "@sf"), "@sf should point to internal node_modules"); }); -test("pnpm layout: non-@gsd internal deps (e.g. @anthropic-ai) are included in merged dir", (t) => { - // Regression: PR #3564 narrowed the internal overlay to @gsd* only, +test("pnpm layout: non-@sf internal deps (e.g. @anthropic-ai) are included in merged dir", (t) => { + // Regression: PR #3564 narrowed the internal overlay to @sf* only, // dropping optionalDependencies like @anthropic-ai/claude-agent-sdk // that npm installs internally rather than hoisting. - const tmp = mkdtempSync(join(tmpdir(), "gsd-pnpm-internal-optional-")); + const tmp = mkdtempSync(join(tmpdir(), "sf-pnpm-internal-optional-")); t.after(() => rmSync(tmp, { recursive: true, force: true })); const hoisted = join(tmp, "node_modules"); @@ -191,7 +191,7 @@ test("pnpm layout: non-@gsd internal deps (e.g. @anthropic-ai) are included in m mkdirSync(pkgRoot, { recursive: true }); // Internal: workspace packages + optional dep that wasn't hoisted - mkdirSync(join(internal, "@gsd", "pi-ai"), { recursive: true }); + mkdirSync(join(internal, "@sf", "pi-ai"), { recursive: true }); mkdirSync(join(internal, "@anthropic-ai", "claude-agent-sdk"), { recursive: true }); mkdirSync(agentNodeModules, { recursive: true }); @@ -214,29 +214,29 @@ test("pnpm layout: non-@gsd internal deps (e.g. @anthropic-ai) are included in m assert.ok(existsSync(join(agentNodeModules, "@anthropic-ai")), "@anthropic-ai should resolve from internal"); assert.ok(existsSync(join(agentNodeModules, "@anthropic-ai", "claude-agent-sdk")), "@anthropic-ai/claude-agent-sdk should resolve"); - // @gsd still resolves - assert.ok(existsSync(join(agentNodeModules, "@gsd")), "@gsd should resolve"); + // @sf still resolves + assert.ok(existsSync(join(agentNodeModules, "@sf")), "@sf should resolve"); // Hoisted deps still resolve assert.ok(existsSync(join(agentNodeModules, "yaml")), "yaml should resolve"); }); test("hasMissingWorkspaceScopes detects pnpm layout", (t) => { - const tmp = mkdtempSync(join(tmpdir(), "gsd-pnpm-detect-")); + const tmp = mkdtempSync(join(tmpdir(), "sf-pnpm-detect-")); t.after(() => rmSync(tmp, { recursive: true, force: true })); const hoisted = join(tmp, "hoisted"); const internal = join(tmp, "internal"); - // npm-style: @gsd exists in both hoisted and internal - mkdirSync(join(hoisted, "@gsd"), { recursive: true }); - mkdirSync(join(internal, "@gsd"), { recursive: true }); + // npm-style: @sf exists in both hoisted and internal + mkdirSync(join(hoisted, "@sf"), { recursive: true }); + mkdirSync(join(internal, "@sf"), { recursive: true }); // Inline the detection logic for testing const hasMissing = (h: string, i: string): boolean => { if (!existsSync(i)) return false; for (const entry of readdirSync(i, { withFileTypes: true })) { - if (entry.isDirectory() && entry.name.startsWith("@gsd") && + if (entry.isDirectory() && entry.name.startsWith("@sf") && !existsSync(join(h, entry.name))) { return true; } @@ -246,13 +246,13 @@ test("hasMissingWorkspaceScopes detects pnpm layout", (t) => { assert.equal(hasMissing(hoisted, internal), false, "npm-style: no missing scopes"); - // pnpm-style: @gsd-build only in internal - mkdirSync(join(internal, "@gsd-build"), { recursive: true }); - assert.equal(hasMissing(hoisted, internal), true, "pnpm-style: @gsd-build missing from hoisted"); + // pnpm-style: @sf-build only in internal + mkdirSync(join(internal, "@sf-build"), { recursive: true }); + assert.equal(hasMissing(hoisted, internal), true, "pnpm-style: @sf-build missing from hoisted"); }); test("merged node_modules marker uses fingerprint including directory entries", (t) => { - const tmp = mkdtempSync(join(tmpdir(), "gsd-pnpm-marker-")); + const tmp = mkdtempSync(join(tmpdir(), "sf-pnpm-marker-")); t.after(() => rmSync(tmp, { recursive: true, force: true })); // Simulate two directories with known entries @@ -260,7 +260,7 @@ test("merged node_modules marker uses fingerprint including directory entries", const internal = join(tmp, "internal"); mkdirSync(join(hoisted, "yaml"), { recursive: true }); mkdirSync(join(hoisted, "@sinclair"), { recursive: true }); - mkdirSync(join(internal, "@gsd"), { recursive: true }); + mkdirSync(join(internal, "@sf"), { recursive: true }); // Build fingerprint the same way the production code does const h = readdirSync(hoisted).sort().join(","); @@ -270,14 +270,14 @@ test("merged node_modules marker uses fingerprint including directory entries", const agentNodeModules = join(tmp, "agent", "node_modules"); mkdirSync(agentNodeModules, { recursive: true }); - const marker = join(agentNodeModules, ".gsd-merged"); + const marker = join(agentNodeModules, ".sf-merged"); writeFileSync(marker, fingerprint); // Verify fingerprint contains all three components const stored = readFileSync(marker, "utf-8").trim(); assert.ok(stored.includes(fakePackageRoot), "fingerprint includes packageRoot"); assert.ok(stored.includes("@sinclair"), "fingerprint includes hoisted entries"); - assert.ok(stored.includes("@gsd"), "fingerprint includes internal entries"); + assert.ok(stored.includes("@sf"), "fingerprint includes internal entries"); // Verify fingerprint changes when a new package is added mkdirSync(join(hoisted, "new-package"), { recursive: true }); diff --git a/src/tests/parse-cli-args.test.ts b/src/tests/parse-cli-args.test.ts index 6b043f8b8..a7e56b2fb 100644 --- a/src/tests/parse-cli-args.test.ts +++ b/src/tests/parse-cli-args.test.ts @@ -6,7 +6,7 @@ import assert from 'node:assert/strict' import { parseCliArgs } from '../cli-web-branch.ts' function parse(...args: string[]) { - return parseCliArgs(['node', 'gsd', ...args]) + return parseCliArgs(['node', 'sf', ...args]) } describe('parseCliArgs — modes', () => { diff --git a/src/tests/provider.test.ts b/src/tests/provider.test.ts index 81a4848f9..147cbe497 100644 --- a/src/tests/provider.test.ts +++ b/src/tests/provider.test.ts @@ -42,7 +42,7 @@ function withEnv( } function makeTmpAuth(data: Record<string, unknown> = {}): { authPath: string; cleanup: () => void } { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-provider-test-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-provider-test-')) const authPath = join(tmp, 'auth.json') writeFileSync(authPath, JSON.stringify(data)) return { authPath, cleanup: () => rmSync(tmp, { recursive: true, force: true }) } diff --git a/src/tests/resource-loader-conflicts.test.ts b/src/tests/resource-loader-conflicts.test.ts index 19d13973e..131ce853f 100644 --- a/src/tests/resource-loader-conflicts.test.ts +++ b/src/tests/resource-loader-conflicts.test.ts @@ -97,18 +97,18 @@ function makeExtension( // ─── extractExtensionKey ───────────────────────────────────────────────────── describe("extractExtensionKey", () => { - const extensionsDir = "/home/user/.gsd/agent/extensions"; + const extensionsDir = "/home/user/.sf/agent/extensions"; it("extracts directory name from a nested extension path", () => { assert.equal( - extractExtensionKey("/home/user/.gsd/agent/extensions/mcp-client/index.js", extensionsDir), + extractExtensionKey("/home/user/.sf/agent/extensions/mcp-client/index.js", extensionsDir), "mcp-client", ); }); it("strips .ts/.js suffix from flat extension files", () => { assert.equal( - extractExtensionKey("/home/user/.gsd/agent/extensions/my-ext.ts", extensionsDir), + extractExtensionKey("/home/user/.sf/agent/extensions/my-ext.ts", extensionsDir), "my-ext", ); }); @@ -124,7 +124,7 @@ describe("extractExtensionKey", () => { // ─── detectExtensionConflicts ───────────────────────────────────────────────── describe("detectExtensionConflicts", () => { - const extensionsDir = "/home/user/.gsd/agent/extensions"; + const extensionsDir = "/home/user/.sf/agent/extensions"; it("returns no conflicts when extensions have unique tool names", () => { const extensions = [ @@ -208,12 +208,12 @@ describe("detectExtensionConflicts", () => { ); }); - it("reproduces issue #2075: bundled extension under /.gsd/agent/extensions/ was never identified as built-in", () => { + it("reproduces issue #2075: bundled extension under /.sf/agent/extensions/ was never identified as built-in", () => { // Before the fix, the isBuiltIn check used path heuristics that excluded - // paths containing /.gsd/agent/extensions/, so bundled extensions placed + // paths containing /.sf/agent/extensions/, so bundled extensions placed // there by initResources() could never be recognized as built-in. - const bundledPath = "/home/user/.gsd/agent/extensions/mcp-client/index.js"; - const userPath = "/home/user/.gsd/agent/extensions/mcporter/index.ts"; + const bundledPath = "/home/user/.sf/agent/extensions/mcp-client/index.js"; + const userPath = "/home/user/.sf/agent/extensions/mcporter/index.ts"; const extensions = [ makeExtension(bundledPath, { tools: ["mcp_servers", "mcp_discover", "mcp_call"] }), @@ -221,7 +221,7 @@ describe("detectExtensionConflicts", () => { ]; const bundledKeys = new Set(["mcp-client"]); - const conflicts = detectExtensionConflicts(extensions, bundledKeys, "/home/user/.gsd/agent/extensions"); + const conflicts = detectExtensionConflicts(extensions, bundledKeys, "/home/user/.sf/agent/extensions"); // All three conflicting tools should include the supersedes hint assert.equal(conflicts.length, 3); diff --git a/src/tests/resource-loader.test.ts b/src/tests/resource-loader.test.ts index 9e353b80d..336adba6c 100644 --- a/src/tests/resource-loader.test.ts +++ b/src/tests/resource-loader.test.ts @@ -44,22 +44,22 @@ test("getExtensionKey normalizes top-level .ts and .js entry names to the same k "ask-user-questions", ); assert.equal( - getExtensionKey("/tmp/extensions/gsd/index.js", extensionsDir), - "gsd", + getExtensionKey("/tmp/extensions/sf/index.js", extensionsDir), + "sf", ); }); test("hasStaleCompiledExtensionSiblings only flags top-level .ts/.js sibling pairs", async (t) => { const { hasStaleCompiledExtensionSiblings } = await import("../resource-loader.ts"); - const tmp = mkdtempSync(join(tmpdir(), "gsd-resource-loader-")); + const tmp = mkdtempSync(join(tmpdir(), "sf-resource-loader-")); const extensionsDir = join(tmp, "extensions"); const bundledDir = join(tmp, "bundled"); t.after(() => { rmSync(tmp, { recursive: true, force: true }); }); mkdirSync(bundledDir, { recursive: true }); - mkdirSync(join(extensionsDir, "gsd"), { recursive: true }); - writeFileSync(join(extensionsDir, "gsd", "index.ts"), "export {};\n"); + mkdirSync(join(extensionsDir, "sf"), { recursive: true }); + writeFileSync(join(extensionsDir, "sf", "index.ts"), "export {};\n"); assert.equal(hasStaleCompiledExtensionSiblings(extensionsDir, bundledDir), false); writeFileSync(join(bundledDir, "ask-user-questions.js"), "export {};\n"); @@ -74,9 +74,9 @@ test("hasStaleCompiledExtensionSiblings only flags top-level .ts/.js sibling pai }); test("buildResourceLoader excludes duplicate top-level pi extensions when bundled resources use .js", async (t) => { - const tmp = mkdtempSync(join(tmpdir(), "gsd-resource-loader-home-")); + const tmp = mkdtempSync(join(tmpdir(), "sf-resource-loader-home-")); const piExtensionsDir = join(tmp, ".pi", "agent", "extensions"); - const fakeAgentDir = join(tmp, ".gsd", "agent"); + const fakeAgentDir = join(tmp, ".sf", "agent"); const restoreHomeEnv = overrideHomeEnv(tmp); t.after(() => { @@ -106,7 +106,7 @@ test("buildResourceLoader excludes duplicate top-level pi extensions when bundle test("initResources manifest tracks all bundled extension subdirectories including remote-questions (#2367)", async () => { const { initResources } = await import("../resource-loader.ts"); - const tmp = mkdtempSync(join(tmpdir(), "gsd-resource-loader-manifest-")); + const tmp = mkdtempSync(join(tmpdir(), "sf-resource-loader-manifest-")); const fakeAgentDir = join(tmp, "agent"); try { @@ -139,7 +139,7 @@ test("initResources manifest tracks all bundled extension subdirectories includi test("initResources prunes stale top-level extension siblings next to bundled compiled extensions", async (t) => { const { initResources } = await import("../resource-loader.ts"); - const tmp = mkdtempSync(join(tmpdir(), "gsd-resource-loader-sync-")); + const tmp = mkdtempSync(join(tmpdir(), "sf-resource-loader-sync-")); const fakeAgentDir = join(tmp, "agent"); const bundledTsPath = join(fakeAgentDir, "extensions", "ask-user-questions.ts"); const bundledJsPath = join(fakeAgentDir, "extensions", "ask-user-questions.js"); @@ -183,7 +183,7 @@ test("initResources prunes stale top-level extension siblings next to bundled co test("pruneRemovedBundledExtensions removes stale subdirectory extensions not in current bundle", async () => { const { initResources } = await import("../resource-loader.ts"); - const tmp = mkdtempSync(join(tmpdir(), "gsd-resource-loader-prune-dirs-")); + const tmp = mkdtempSync(join(tmpdir(), "sf-resource-loader-prune-dirs-")); const fakeAgentDir = join(tmp, "agent"); try { @@ -192,7 +192,7 @@ test("pruneRemovedBundledExtensions removes stale subdirectory extensions not in // Simulate a stale subdirectory extension left from a previous SF version. // This mirrors the mcporter scenario: it was bundled before, synced to - // ~/.gsd/agent/extensions/, then removed from the bundle in a newer version. + // ~/.sf/agent/extensions/, then removed from the bundle in a newer version. const staleExtDir = join(fakeAgentDir, "extensions", "mcporter"); mkdirSync(staleExtDir, { recursive: true }); writeFileSync(join(staleExtDir, "index.ts"), 'export default { name: "mcporter" };\n'); diff --git a/src/tests/resource-sync-staleness.test.ts b/src/tests/resource-sync-staleness.test.ts index 56681018d..8671978af 100644 --- a/src/tests/resource-sync-staleness.test.ts +++ b/src/tests/resource-sync-staleness.test.ts @@ -9,7 +9,7 @@ import { tmpdir } from "node:os"; * * Validates that initResources() re-syncs when bundled resources change * within the same version (the bug that caused stale subagent extensions - * with a broken import to persist at ~/.gsd/agent/extensions/). + * with a broken import to persist at ~/.sf/agent/extensions/). */ test("resource manifest includes contentHash", async (t) => { @@ -22,7 +22,7 @@ test("resource manifest includes contentHash", async (t) => { contentHash: "abc123def456", }; - const tmpDir = mkdtempSync(join(tmpdir(), "gsd-resource-test-")); + const tmpDir = mkdtempSync(join(tmpdir(), "sf-resource-test-")); const manifestPath = join(tmpDir, "managed-resources.json"); t.after(() => { rmSync(tmpDir, { recursive: true, force: true }); }); diff --git a/src/tests/rtk-execution-seams.test.ts b/src/tests/rtk-execution-seams.test.ts index 7fa4470bc..6822c42da 100644 --- a/src/tests/rtk-execution-seams.test.ts +++ b/src/tests/rtk-execution-seams.test.ts @@ -81,7 +81,7 @@ function withFakeRtk<T>(mapping: Record<string, string | { status?: number; stdo function withManagedFakeRtk<T>(mapping: Record<string, string | { status?: number; stdout?: string }>, run: (env: NodeJS.ProcessEnv, managedPath: string) => Promise<T> | T): Promise<T> | T { const fake = createFakeRtk(mapping); - const managedHome = mkdtempSync(join(tmpdir(), "gsd-rtk-managed-home-")); + const managedHome = mkdtempSync(join(tmpdir(), "sf-rtk-managed-home-")); const managedDir = join(managedHome, "agent", "bin"); const managedPath = join(managedDir, process.platform === "win32" ? "rtk.cmd" : "rtk"); mkdirSync(managedDir, { recursive: true }); diff --git a/src/tests/rtk-session-stats.test.ts b/src/tests/rtk-session-stats.test.ts index f1a8b4604..75fc18a89 100644 --- a/src/tests/rtk-session-stats.test.ts +++ b/src/tests/rtk-session-stats.test.ts @@ -47,8 +47,8 @@ function summary(totalCommands: number, totalInput: number, totalOutput: number, } test("RTK session savings diff from a persisted baseline", () => { - const basePath = mkdtempSync(join(tmpdir(), "gsd-rtk-session-stats-")); - mkdirSync(join(basePath, ".gsd", "runtime"), { recursive: true }); + const basePath = mkdtempSync(join(tmpdir(), "sf-rtk-session-stats-")); + mkdirSync(join(basePath, ".sf", "runtime"), { recursive: true }); const first = createFakeRtk({ "gain --all --format json": { stdout: summary(10, 1000, 600, 400) }, @@ -80,8 +80,8 @@ test("RTK session savings diff from a persisted baseline", () => { }); test("RTK session savings baseline resets cleanly when tracking totals go backwards", () => { - const basePath = mkdtempSync(join(tmpdir(), "gsd-rtk-session-reset-")); - mkdirSync(join(basePath, ".gsd", "runtime"), { recursive: true }); + const basePath = mkdtempSync(join(tmpdir(), "sf-rtk-session-reset-")); + mkdirSync(join(basePath, ".sf", "runtime"), { recursive: true }); const first = createFakeRtk({ "gain --all --format json": { stdout: summary(8, 800, 500, 300) }, @@ -110,13 +110,13 @@ test("RTK session savings baseline resets cleanly when tracking totals go backwa }); test("RTK session stats fall back to the managed RTK path when SF_RTK_PATH is unset", () => { - const basePath = mkdtempSync(join(tmpdir(), "gsd-rtk-session-managed-")); - mkdirSync(join(basePath, ".gsd", "runtime"), { recursive: true }); + const basePath = mkdtempSync(join(tmpdir(), "sf-rtk-session-managed-")); + mkdirSync(join(basePath, ".sf", "runtime"), { recursive: true }); const fake = createFakeRtk({ "gain --all --format json": { stdout: summary(6, 900, 500, 400) }, }); - const managedHome = mkdtempSync(join(tmpdir(), "gsd-rtk-home-")); + const managedHome = mkdtempSync(join(tmpdir(), "sf-rtk-home-")); const managedDir = join(managedHome, "agent", "bin"); const managedPath = join(managedDir, process.platform === "win32" ? "rtk.cmd" : "rtk"); mkdirSync(managedDir, { recursive: true }); @@ -186,8 +186,8 @@ test("formatRtkSavingsLabel produces a compact footer string", () => { }); test("clearRtkSessionBaseline removes a stored session entry", () => { - const basePath = mkdtempSync(join(tmpdir(), "gsd-rtk-session-clear-")); - mkdirSync(join(basePath, ".gsd", "runtime"), { recursive: true }); + const basePath = mkdtempSync(join(tmpdir(), "sf-rtk-session-clear-")); + mkdirSync(join(basePath, ".sf", "runtime"), { recursive: true }); const fake = createFakeRtk({ "gain --all --format json": { stdout: summary(3, 300, 200, 100) }, }); diff --git a/src/tests/rtk-test-utils.ts b/src/tests/rtk-test-utils.ts index bf3526081..2effd909f 100644 --- a/src/tests/rtk-test-utils.ts +++ b/src/tests/rtk-test-utils.ts @@ -9,7 +9,7 @@ function shellQuote(value: string): string { } export function createFakeRtk(mapping: Record<string, FakeRtkResponse>): { path: string; cleanup: () => void } { - const dir = mkdtempSync(join(tmpdir(), "gsd-fake-rtk-")); + const dir = mkdtempSync(join(tmpdir(), "sf-fake-rtk-")); const payload = JSON.stringify(mapping); const jsSource = `#!/usr/bin/env node diff --git a/src/tests/rtk.test.ts b/src/tests/rtk.test.ts index d67709d68..9dedc4459 100644 --- a/src/tests/rtk.test.ts +++ b/src/tests/rtk.test.ts @@ -48,10 +48,10 @@ test("resolveRtkAssetName maps supported release assets correctly", () => { test("prependPathEntry preserves the original PATH key casing and avoids duplicates", () => { const env: NodeJS.ProcessEnv = { Path: "/usr/bin" }; - prependPathEntry(env, "/tmp/gsd-bin"); - assert.equal(env.Path, `/tmp/gsd-bin${delimiter}${"/usr/bin"}`); - prependPathEntry(env, "/tmp/gsd-bin"); - assert.equal(env.Path, `/tmp/gsd-bin${delimiter}${"/usr/bin"}`); + prependPathEntry(env, "/tmp/sf-bin"); + assert.equal(env.Path, `/tmp/sf-bin${delimiter}${"/usr/bin"}`); + prependPathEntry(env, "/tmp/sf-bin"); + assert.equal(env.Path, `/tmp/sf-bin${delimiter}${"/usr/bin"}`); }); test("buildRtkEnv prepends the managed bin dir and disables telemetry", () => { @@ -93,7 +93,7 @@ test("rewriteCommandWithRtk respects the disable flag", () => { test("rewriteCommandWithRtk falls back to the managed RTK path when SF_RTK_PATH is unset", () => { const fake = createFakeRtk({ "git status": "rtk git status" }); - const managedHome = mkdtempSync(join(tmpdir(), "gsd-rtk-managed-home-")); + const managedHome = mkdtempSync(join(tmpdir(), "sf-rtk-managed-home-")); const managedDir = join(managedHome, "agent", "bin"); const managedPath = join(managedDir, process.platform === "win32" ? "rtk.cmd" : "rtk"); diff --git a/src/tests/search-provider-command.test.ts b/src/tests/search-provider-command.test.ts index 29cf65972..b246a1928 100644 --- a/src/tests/search-provider-command.test.ts +++ b/src/tests/search-provider-command.test.ts @@ -45,7 +45,7 @@ function withEnv( } function makeTmpAuth(data: Record<string, unknown> = {}): { authPath: string; cleanup: () => void } { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-cmd-test-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-cmd-test-')) const authPath = join(tmp, 'auth.json') writeFileSync(authPath, JSON.stringify(data)) return { authPath, cleanup: () => rmSync(tmp, { recursive: true, force: true }) } diff --git a/src/tests/tool-bootstrap.test.ts b/src/tests/tool-bootstrap.test.ts index 8a98fd068..39bc853c2 100644 --- a/src/tests/tool-bootstrap.test.ts +++ b/src/tests/tool-bootstrap.test.ts @@ -17,7 +17,7 @@ function makeExecutable(dir: string, name: string, content = "#!/bin/sh\nexit 0\ } test("resolveToolFromPath finds fd via fdfind fallback", (t) => { - const tmp = mkdtempSync(join(tmpdir(), "gsd-tool-bootstrap-resolve-")); + const tmp = mkdtempSync(join(tmpdir(), "sf-tool-bootstrap-resolve-")); t.after(() => { rmSync(tmp, { recursive: true, force: true }); }); makeExecutable(tmp, "fdfind"); @@ -26,7 +26,7 @@ test("resolveToolFromPath finds fd via fdfind fallback", (t) => { }); test("ensureManagedTools provisions fd and rg into managed bin dir", (t) => { - const tmp = mkdtempSync(join(tmpdir(), "gsd-tool-bootstrap-provision-")); + const tmp = mkdtempSync(join(tmpdir(), "sf-tool-bootstrap-provision-")); const sourceBin = join(tmp, "source-bin"); const targetBin = join(tmp, "target-bin"); @@ -48,7 +48,7 @@ test("ensureManagedTools provisions fd and rg into managed bin dir", (t) => { }); test("ensureManagedTools copies executable when symlink target already exists as a broken link", (t) => { - const tmp = mkdtempSync(join(tmpdir(), "gsd-tool-bootstrap-copy-")); + const tmp = mkdtempSync(join(tmpdir(), "sf-tool-bootstrap-copy-")); const sourceBin = join(tmp, "source-bin"); const targetBin = join(tmp, "target-bin"); const targetFd = join(targetBin, FD_TARGET); diff --git a/src/tests/ttsr-rule-loader.test.ts b/src/tests/ttsr-rule-loader.test.ts index 272397522..eee8257d2 100644 --- a/src/tests/ttsr-rule-loader.test.ts +++ b/src/tests/ttsr-rule-loader.test.ts @@ -15,8 +15,8 @@ import { loadRules } from '../../src/resources/extensions/ttsr/index.js' function makeTmpProject(): { cwd: string; globalDir: string; projectDir: string; cleanup: () => void } { const cwd = mkdtempSync(join(tmpdir(), 'ttsr-loader-test-')) - const globalDir = join(cwd, '.gsd-global', 'agent', 'rules') - const projectDir = join(cwd, '.gsd', 'rules') + const globalDir = join(cwd, '.sf-global', 'agent', 'rules') + const projectDir = join(cwd, '.sf', 'rules') return { cwd, globalDir, projectDir, cleanup: () => rmSync(cwd, { recursive: true, force: true }) } } @@ -27,13 +27,13 @@ function writeRule(dir: string, name: string, frontmatter: string, body: string) // loadRules uses homedir() for global dir — we can't easily override that, // so we test the project-local path and the merge logic by testing with -// a cwd that has .gsd/rules/. +// a cwd that has .sf/rules/. // ═══════════════════════════════════════════════════════════════════════════ // Project-local rule loading // ═══════════════════════════════════════════════════════════════════════════ -test('loads rule from project .gsd/rules/', (t) => { +test('loads rule from project .sf/rules/', (t) => { const { cwd, projectDir, cleanup } = makeTmpProject() t.after(() => { cleanup() }); @@ -81,11 +81,11 @@ test('skips rules with no condition', (t) => { assert.equal(rules.filter(r => r.name === 'no-condition').length, 0) }) -test('returns empty array when .gsd/rules/ does not exist', (t) => { +test('returns empty array when .sf/rules/ does not exist', (t) => { const { cwd, cleanup } = makeTmpProject() t.after(() => { cleanup() }); - // cwd exists but no .gsd/rules/ dir + // cwd exists but no .sf/rules/ dir const rules = loadRules(cwd) // May include global rules from homedir — just verify no crash assert.ok(Array.isArray(rules)) diff --git a/src/tests/tui-content-cursor-desync.test.ts b/src/tests/tui-content-cursor-desync.test.ts index 66565f337..9acd118b7 100644 --- a/src/tests/tui-content-cursor-desync.test.ts +++ b/src/tests/tui-content-cursor-desync.test.ts @@ -102,7 +102,7 @@ describe("TUI cursor tracking regression (#3764)", () => { }); it("handles editor-to-selector swap without cursor corruption", () => { - // Simulates /gsd prefs: editor with CURSOR_MARKER is replaced by + // Simulates /sf prefs: editor with CURSOR_MARKER is replaced by // a selector component (no CURSOR_MARKER) that has different line count. const terminal = new MockTTYTerminal(); const tui = new TUI(terminal, false); @@ -227,7 +227,7 @@ describe("TUI cursor tracking regression (#3764)", () => { }); it("handles input component swap (prefs wizard text input)", () => { - // Simulates /gsd prefs input step: selector replaced by text input with cursor + // Simulates /sf prefs input step: selector replaced by text input with cursor const terminal = new MockTTYTerminal(); const tui = new TUI(terminal, false); diff --git a/src/tests/tui-non-tty-render-loop.test.ts b/src/tests/tui-non-tty-render-loop.test.ts index 4295637dc..e627348a2 100644 --- a/src/tests/tui-non-tty-render-loop.test.ts +++ b/src/tests/tui-non-tty-render-loop.test.ts @@ -1,7 +1,7 @@ /** * Test: RPC bridge TUI render loop must not burn CPU on non-TTY stdout. * - * When gsd is spawned as an RPC bridge child process, stdout is a pipe + * When sf is spawned as an RPC bridge child process, stdout is a pipe * (process.stdout.isTTY === undefined). The TUI render loop must not * start in that scenario — otherwise it runs at ~4,600 renders/second * consuming 500%+ CPU doing nothing useful. diff --git a/src/tests/update-check.test.ts b/src/tests/update-check.test.ts index cd57599be..719aa299e 100644 --- a/src/tests/update-check.test.ts +++ b/src/tests/update-check.test.ts @@ -42,7 +42,7 @@ test('compareSemver handles versions with different segment counts', () => { // --------------------------------------------------------------------------- test('readUpdateCache returns null for nonexistent file', (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-update-cache-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-update-cache-')) t.after(() => { rmSync(tmp, { recursive: true, force: true }) }); const result = readUpdateCache(join(tmp, 'nonexistent')) @@ -50,7 +50,7 @@ test('readUpdateCache returns null for nonexistent file', (t) => { }) test('readUpdateCache returns null for malformed JSON', (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-update-cache-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-update-cache-')) t.after(() => { rmSync(tmp, { recursive: true, force: true }) }); const cachePath = join(tmp, '.update-check') @@ -60,7 +60,7 @@ test('readUpdateCache returns null for malformed JSON', (t) => { }) test('writeUpdateCache + readUpdateCache round-trips correctly', (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-update-cache-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-update-cache-')) t.after(() => { rmSync(tmp, { recursive: true, force: true }) }); const cachePath = join(tmp, '.update-check') @@ -71,7 +71,7 @@ test('writeUpdateCache + readUpdateCache round-trips correctly', (t) => { }) test('writeUpdateCache creates parent directories', (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-update-cache-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-update-cache-')) t.after(() => { rmSync(tmp, { recursive: true, force: true }) }); const cachePath = join(tmp, 'nested', 'dir', '.update-check') @@ -101,7 +101,7 @@ function startMockRegistry(responseBody: object, statusCode = 200): Promise<{ ur } test('checkForUpdates calls onUpdate when newer version is available', async (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-update-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-update-')) const registry = await startMockRegistry({ version: '99.0.0' }) t.after(async () => { await registry.close() @@ -131,7 +131,7 @@ test('checkForUpdates calls onUpdate when newer version is available', async (t) }) test('checkForUpdates does not call onUpdate when already on latest', async (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-update-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-update-')) const registry = await startMockRegistry({ version: '1.0.0' }) t.after(async () => { await registry.close() @@ -153,7 +153,7 @@ test('checkForUpdates does not call onUpdate when already on latest', async (t) }) test('checkForUpdates does not call onUpdate when current is ahead', async (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-update-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-update-')) const registry = await startMockRegistry({ version: '1.0.0' }) t.after(async () => { await registry.close() @@ -175,7 +175,7 @@ test('checkForUpdates does not call onUpdate when current is ahead', async (t) = }) test('checkForUpdates writes cache after successful fetch', async (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-update-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-update-')) const cachePath = join(tmp, '.update-check') const registry = await startMockRegistry({ version: '5.0.0' }) t.after(async () => { @@ -199,7 +199,7 @@ test('checkForUpdates writes cache after successful fetch', async (t) => { }) test('checkForUpdates uses cache and skips fetch when checked recently', async (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-update-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-update-')) const cachePath = join(tmp, '.update-check') // Write a fresh cache entry writeUpdateCache({ lastCheck: Date.now(), latestVersion: '10.0.0' }, cachePath) @@ -227,7 +227,7 @@ test('checkForUpdates uses cache and skips fetch when checked recently', async ( }) test('checkForUpdates skips notification when cache is fresh and versions match', async (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-update-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-update-')) const cachePath = join(tmp, '.update-check') writeUpdateCache({ lastCheck: Date.now(), latestVersion: '1.0.0' }, cachePath) @@ -247,7 +247,7 @@ test('checkForUpdates skips notification when cache is fresh and versions match' }) test('checkForUpdates handles server error gracefully', async (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-update-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-update-')) const registry = await startMockRegistry({}, 500) t.after(async () => { await registry.close() @@ -273,7 +273,7 @@ test('checkForUpdates handles network timeout gracefully', async (t) => { const server = createServer(() => { /* intentionally never respond */ }) await new Promise<void>((resolve) => server.listen(0, '127.0.0.1', resolve)) const addr = server.address() as { port: number } - const tmp = mkdtempSync(join(tmpdir(), 'gsd-update-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-update-')) t.after(async () => { await new Promise<void>((r) => server.close(() => r())) @@ -295,7 +295,7 @@ test('checkForUpdates handles network timeout gracefully', async (t) => { }) test('checkForUpdates handles missing version field in response', async (t) => { - const tmp = mkdtempSync(join(tmpdir(), 'gsd-update-')) + const tmp = mkdtempSync(join(tmpdir(), 'sf-update-')) const registry = await startMockRegistry({ name: 'sf-run' }) // no version field t.after(async () => { await registry.close() diff --git a/src/tests/update-cmd-diagnostics.test.ts b/src/tests/update-cmd-diagnostics.test.ts index 04786aac5..9ca0c579b 100644 --- a/src/tests/update-cmd-diagnostics.test.ts +++ b/src/tests/update-cmd-diagnostics.test.ts @@ -1,7 +1,7 @@ /** - * Regression test for #3445: gsd update must print both current and latest + * Regression test for #3445: sf update must print both current and latest * versions for diagnostics, and bypass npm cache. - * Regression test for #4145: gsd update must use bun when installed via Bun. + * Regression test for #4145: sf update must use bun when installed via Bun. */ import { test } from "node:test"; import assert from "node:assert/strict"; @@ -21,7 +21,7 @@ test("update-cmd prints latest version before comparison (#3445)", () => { test("update commands use the registry fetch helper instead of npm view (#3806)", () => { const src = readFileSync(join(__dirname, "..", "update-cmd.ts"), "utf-8"); - const handlerSrc = readFileSync(join(__dirname, "..", "resources", "extensions", "gsd", "commands-handlers.ts"), "utf-8"); + const handlerSrc = readFileSync(join(__dirname, "..", "resources", "extensions", "sf", "commands-handlers.ts"), "utf-8"); assert.ok( src.includes("fetchLatestVersionFromRegistry"), "update-cmd should use the shared registry fetch helper", @@ -29,9 +29,9 @@ test("update commands use the registry fetch helper instead of npm view (#3806)" assert.ok(!src.includes("npm view "), "update-cmd should no longer shell out to npm view"); assert.ok( handlerSrc.includes("fetchLatestVersionForCommand"), - "/gsd update should fetch the latest version through a registry helper too", + "/sf update should fetch the latest version through a registry helper too", ); - assert.ok(!handlerSrc.includes("npm view "), "/gsd update should no longer shell out to npm view"); + assert.ok(!handlerSrc.includes("npm view "), "/sf update should no longer shell out to npm view"); }); test("update-check exports resolveInstallCommand (#4145)", async () => { @@ -76,9 +76,9 @@ test("update-cmd uses resolveInstallCommand instead of hardcoded npm (#4145)", ( }); test("commands-handlers uses resolveInstallCommand instead of hardcoded npm (#4145)", () => { - const handlerSrc = readFileSync(join(__dirname, "..", "resources", "extensions", "gsd", "commands-handlers.ts"), "utf-8"); + const handlerSrc = readFileSync(join(__dirname, "..", "resources", "extensions", "sf", "commands-handlers.ts"), "utf-8"); assert.ok( handlerSrc.includes("resolveInstallCommand"), - "/gsd update handler should use resolveInstallCommand for package manager detection", + "/sf update handler should use resolveInstallCommand for package manager detection", ); }); diff --git a/src/tests/welcome-screen.test.ts b/src/tests/welcome-screen.test.ts index 35d457b46..4acbe8f48 100644 --- a/src/tests/welcome-screen.test.ts +++ b/src/tests/welcome-screen.test.ts @@ -48,7 +48,7 @@ test('renders model and provider', () => { test('renders cwd hint', () => { const out = strip(capture({ version: '1.0.0' })) - assert.ok(out.includes('/gsd to begin'), 'hint line missing') + assert.ok(out.includes('/sf to begin'), 'hint line missing') }) test('skips when not a TTY', (t) => { diff --git a/src/tests/windows-portability.test.ts b/src/tests/windows-portability.test.ts index 30dbde0e5..fc150d753 100644 --- a/src/tests/windows-portability.test.ts +++ b/src/tests/windows-portability.test.ts @@ -9,7 +9,7 @@ import { encodeCwd } from "../resources/extensions/subagent/isolation.ts"; function makeTempDir(prefix: string): string { const dir = path.join( os.tmpdir(), - `gsd-windows-portability-${prefix}-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`, + `sf-windows-portability-${prefix}-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`, ); mkdirSync(dir, { recursive: true }); return dir; @@ -55,7 +55,7 @@ test("encodeCwd produces a filesystem-safe token for Windows paths", () => { test("Windows launch points use shell-safe shims", () => { const gsdClient = readFileSync( - path.join(process.cwd(), "vscode-extension", "src", "gsd-client.ts"), + path.join(process.cwd(), "vscode-extension", "src", "sf-client.ts"), "utf8", ); const updateService = readFileSync( @@ -63,7 +63,7 @@ test("Windows launch points use shell-safe shims", () => { "utf8", ); const preExecution = readFileSync( - path.join(process.cwd(), "src", "resources", "extensions", "gsd", "pre-execution-checks.ts"), + path.join(process.cwd(), "src", "resources", "extensions", "sf", "pre-execution-checks.ts"), "utf8", ); const validatePack = readFileSync( diff --git a/src/tests/xterm-theme.test.ts b/src/tests/xterm-theme.test.ts index b3f419be3..f3b269075 100644 --- a/src/tests/xterm-theme.test.ts +++ b/src/tests/xterm-theme.test.ts @@ -42,11 +42,11 @@ test("light xterm palette keeps warning and ANSI white entries readable", () => test("terminal components share the central xterm theme helper", () => { const shellSource = readFileSync( - resolve(import.meta.dirname, "../../web/components/gsd/shell-terminal.tsx"), + resolve(import.meta.dirname, "../../web/components/sf/shell-terminal.tsx"), "utf8", ); const mainSource = readFileSync( - resolve(import.meta.dirname, "../../web/components/gsd/main-session-terminal.tsx"), + resolve(import.meta.dirname, "../../web/components/sf/main-session-terminal.tsx"), "utf8", ); diff --git a/src/update-check.ts b/src/update-check.ts index 568c019ed..7d437f454 100644 --- a/src/update-check.ts +++ b/src/update-check.ts @@ -85,7 +85,7 @@ function printUpdateBanner(current: string, latest: string): void { const installCmd = resolveInstallCommand('sf-run') process.stderr.write( ` ${chalk.yellow('Update available:')} ${chalk.dim(`v${current}`)} → ${chalk.bold(`v${latest}`)}\n` + - ` ${chalk.dim('Run')} ${installCmd} ${chalk.dim('or')} /gsd update ${chalk.dim('to upgrade')}\n\n`, + ` ${chalk.dim('Run')} ${installCmd} ${chalk.dim('or')} /sf update ${chalk.dim('to upgrade')}\n\n`, ) } @@ -228,7 +228,7 @@ export async function checkAndPromptForUpdates(options: UpdateCheckOptions = {}) process.stderr.write(`\n ${chalk.yellow(`Update failed. You can run: ${installCmd}`)}\n\n`) } } else { - process.stderr.write(` ${chalk.dim('Skipped. Run')} gsd update ${chalk.dim('anytime to upgrade.')}\n\n`) + process.stderr.write(` ${chalk.dim('Skipped. Run')} sf update ${chalk.dim('anytime to upgrade.')}\n\n`) } return false diff --git a/src/web-mode.ts b/src/web-mode.ts index db26e22a6..51cf83327 100644 --- a/src/web-mode.ts +++ b/src/web-mode.ts @@ -523,7 +523,7 @@ async function waitForBootReady(url: string, timeoutMs = 180_000, stderr?: Writa /** * If a previous web server instance is registered for the same `cwd`, attempt * to kill it and remove its registry entry so the new launch can bind the port - * cleanly. This handles the "orphan process" scenario where a prior `gsd --web` + * cleanly. This handles the "orphan process" scenario where a prior `sf --web` * was terminated without clean shutdown (e.g. terminal closed). */ function cleanupStaleInstance(cwd: string, stderr: WritableLike, registryPath?: string): void { @@ -577,7 +577,7 @@ export async function launchWebMode( stderr.write(`[forge] Starting web mode…\n`) // Kill any stale server instance for this project before reserving a port. - // This prevents EADDRINUSE when the previous `gsd --web` was terminated + // This prevents EADDRINUSE when the previous `sf --web` was terminated // without a clean shutdown (e.g. terminal closed, crash). cleanupStaleInstance(options.cwd, stderr, deps.registryPath) diff --git a/src/web/auto-dashboard-service.ts b/src/web/auto-dashboard-service.ts index 040887448..b7570d253 100644 --- a/src/web/auto-dashboard-service.ts +++ b/src/web/auto-dashboard-service.ts @@ -35,7 +35,7 @@ function fallbackAutoDashboardData(): AutoDashboardData { } function resolveTsLoaderPath(packageRoot: string): string { - return join(packageRoot, "src", "resources", "extensions", "gsd", "tests", "resolve-ts.mjs"); + return join(packageRoot, "src", "resources", "extensions", "sf", "tests", "resolve-ts.mjs"); } export function collectTestOnlyFallbackAutoDashboardData(): AutoDashboardData { @@ -59,8 +59,8 @@ function isPidAlive(pid: number): boolean { * * The subprocess always starts with fresh module state (s.active === false), * so it can never report active/paused correctly. We check: - * 1. .gsd/auto.lock — if present and its PID is alive, auto IS running. - * 2. .gsd/runtime/paused-session.json — if present, auto IS paused. + * 1. .sf/auto.lock — if present and its PID is alive, auto IS running. + * 2. .sf/runtime/paused-session.json — if present, auto IS paused. * * See #2705. */ @@ -73,7 +73,7 @@ function reconcileWithDiskState( if (data.active || data.paused) return data; // Check for paused-session.json first (paused takes precedence). - const pausedPath = join(projectCwd, ".gsd", "runtime", "paused-session.json"); + const pausedPath = join(projectCwd, ".sf", "runtime", "paused-session.json"); if (checkExists(pausedPath)) { try { // Validate the file is readable JSON (not corrupt). @@ -85,7 +85,7 @@ function reconcileWithDiskState( } // Check for session lock with a live PID. - const lockPath = join(projectCwd, ".gsd", "auto.lock"); + const lockPath = join(projectCwd, ".sf", "auto.lock"); if (checkExists(lockPath)) { try { const lockData = JSON.parse(readFileSync(lockPath, "utf-8")) as { pid?: number }; diff --git a/src/web/bridge-service.ts b/src/web/bridge-service.ts index edfbdf93f..8b8ed9734 100644 --- a/src/web/bridge-service.ts +++ b/src/web/bridge-service.ts @@ -505,10 +505,10 @@ export interface GSDWorkspaceIndex { // ─── Project Detection ────────────────────────────────────────────────────── export type ProjectDetectionKind = - | "active-gsd" // .gsd with milestones — normal operation - | "empty-gsd" // .gsd exists but no milestones (freshly bootstrapped) - | "v1-legacy" // .planning/ exists, no .gsd - | "brownfield" // existing code (git, package.json, files) but no .gsd + | "active-sf" // .sf with milestones — normal operation + | "empty-sf" // .sf exists but no milestones (freshly bootstrapped) + | "v1-legacy" // .planning/ exists, no .sf + | "brownfield" // existing code (git, package.json, files) but no .sf | "blank"; // empty/near-empty folder export interface ProjectDetectionSignals { @@ -572,7 +572,7 @@ export function detectMonorepo(dirPath: string, checkExists?: (path: string) => export function detectProjectKind(projectCwd: string): ProjectDetection { const checkExists = getBridgeDeps().existsSync ?? existsSync; - const hasGsdFolder = checkExists(join(projectCwd, ".gsd")); + const hasGsdFolder = checkExists(join(projectCwd, ".sf")); const hasPlanningFolder = checkExists(join(projectCwd, ".planning")); const hasGitRepo = checkExists(join(projectCwd, ".git")); const hasPackageJson = checkExists(join(projectCwd, "package.json")); @@ -606,7 +606,7 @@ export function detectProjectKind(projectCwd: string): ProjectDetection { if (hasGsdFolder) { // Check if milestones exist - const milestonesDir = join(projectCwd, ".gsd", "milestones"); + const milestonesDir = join(projectCwd, ".sf", "milestones"); let hasMilestones = false; try { const dirs = readdirSync(milestonesDir, { withFileTypes: true }); @@ -614,7 +614,7 @@ export function detectProjectKind(projectCwd: string): ProjectDetection { } catch { // No milestones dir or can't read it } - kind = hasMilestones ? "active-gsd" : "empty-gsd"; + kind = hasMilestones ? "active-sf" : "empty-sf"; } else if (hasPlanningFolder) { kind = "v1-legacy"; } else if (hasPackageJson || hasCargo || hasGoMod || hasPyproject || fileCount > 2 || (hasGitRepo && fileCount > 0)) { diff --git a/src/web/captures-service.ts b/src/web/captures-service.ts index 3090591a6..f1c8d3dd8 100644 --- a/src/web/captures-service.ts +++ b/src/web/captures-service.ts @@ -11,7 +11,7 @@ const CAPTURES_MAX_BUFFER = 2 * 1024 * 1024 const CAPTURES_MODULE_ENV = "SF_CAPTURES_MODULE" function resolveTsLoaderPath(packageRoot: string): string { - return join(packageRoot, "src", "resources", "extensions", "gsd", "tests", "resolve-ts.mjs") + return join(packageRoot, "src", "resources", "extensions", "sf", "tests", "resolve-ts.mjs") } /** diff --git a/src/web/cleanup-service.ts b/src/web/cleanup-service.ts index f70762fc8..dd0d2c492 100644 --- a/src/web/cleanup-service.ts +++ b/src/web/cleanup-service.ts @@ -11,7 +11,7 @@ const CLEANUP_MAX_BUFFER = 2 * 1024 * 1024 const CLEANUP_MODULE_ENV = "SF_CLEANUP_MODULE" function resolveTsLoaderPath(packageRoot: string): string { - return join(packageRoot, "src", "resources", "extensions", "gsd", "tests", "resolve-ts.mjs") + return join(packageRoot, "src", "resources", "extensions", "sf", "tests", "resolve-ts.mjs") } /** @@ -42,17 +42,17 @@ export async function collectCleanupData(projectCwdOverride?: string): Promise<C 'const basePath = process.env.SF_CLEANUP_BASE;', // Get all SF branches 'let branches = [];', - 'try { branches = mod.nativeBranchList(basePath, "gsd/*"); } catch {}', + 'try { branches = mod.nativeBranchList(basePath, "sf/*"); } catch {}', // Detect main branch and find which SF branches are merged 'let mainBranch = "main";', 'try { mainBranch = mod.nativeDetectMainBranch(basePath); } catch {}', 'let merged = [];', - 'try { merged = mod.nativeBranchListMerged(basePath, mainBranch, "gsd/*"); } catch {}', + 'try { merged = mod.nativeBranchListMerged(basePath, mainBranch, "sf/*"); } catch {}', 'const mergedSet = new Set(merged);', 'const branchList = branches.map(b => ({ name: b, merged: mergedSet.has(b) }));', // Get snapshot refs 'let refs = [];', - 'try { refs = mod.nativeForEachRef(basePath, "refs/gsd/snapshots/"); } catch {}', + 'try { refs = mod.nativeForEachRef(basePath, "refs/sf/snapshots/"); } catch {}', 'const snapshotList = refs.map(r => {', ' const parts = r.split(" ");', ' return { ref: parts[0] || r, date: parts.length > 1 ? parts.slice(1).join(" ") : "" };', diff --git a/src/web/cli-entry.ts b/src/web/cli-entry.ts index eef20e4ec..a8c561a4d 100644 --- a/src/web/cli-entry.ts +++ b/src/web/cli-entry.ts @@ -90,7 +90,7 @@ export function resolveGsdCliEntry(options: ResolveGsdCliEntryOptions): GsdCliEn const messageArgs = options.mode === "interactive" ? options.messages ?? [] : []; const sourceEntry = join(options.packageRoot, "src", "loader.ts"); - const resolveTsLoader = join(options.packageRoot, "src", "resources", "extensions", "gsd", "tests", "resolve-ts.mjs"); + const resolveTsLoader = join(options.packageRoot, "src", "resources", "extensions", "sf", "tests", "resolve-ts.mjs"); const builtEntry = join(options.packageRoot, "dist", "loader.js"); const sourceCliEntry = diff --git a/src/web/doctor-service.ts b/src/web/doctor-service.ts index 71dbad0f6..818d324dc 100644 --- a/src/web/doctor-service.ts +++ b/src/web/doctor-service.ts @@ -11,7 +11,7 @@ const DOCTOR_MAX_BUFFER = 2 * 1024 * 1024 const DOCTOR_MODULE_ENV = "SF_DOCTOR_MODULE" function resolveTsLoaderPath(packageRoot: string): string { - return join(packageRoot, "src", "resources", "extensions", "gsd", "tests", "resolve-ts.mjs") + return join(packageRoot, "src", "resources", "extensions", "sf", "tests", "resolve-ts.mjs") } function runDoctorChild( diff --git a/src/web/export-service.ts b/src/web/export-service.ts index b3594dd9d..255c329d8 100644 --- a/src/web/export-service.ts +++ b/src/web/export-service.ts @@ -11,12 +11,12 @@ const EXPORT_MAX_BUFFER = 4 * 1024 * 1024 const EXPORT_MODULE_ENV = "SF_EXPORT_MODULE" function resolveTsLoaderPath(packageRoot: string): string { - return join(packageRoot, "src", "resources", "extensions", "gsd", "tests", "resolve-ts.mjs") + return join(packageRoot, "src", "resources", "extensions", "sf", "tests", "resolve-ts.mjs") } /** * Generates an export file via a child process and returns its content. - * The child calls writeExportFile() which creates a timestamped file in .gsd/, + * The child calls writeExportFile() which creates a timestamped file in .sf/, * then reads its content back for browser display. */ export async function collectExportData( diff --git a/src/web/forensics-service.ts b/src/web/forensics-service.ts index 5fd686c21..6ef3bff2a 100644 --- a/src/web/forensics-service.ts +++ b/src/web/forensics-service.ts @@ -11,7 +11,7 @@ const FORENSICS_MAX_BUFFER = 2 * 1024 * 1024 const FORENSICS_MODULE_ENV = "SF_FORENSICS_MODULE" function resolveTsLoaderPath(packageRoot: string): string { - return join(packageRoot, "src", "resources", "extensions", "gsd", "tests", "resolve-ts.mjs") + return join(packageRoot, "src", "resources", "extensions", "sf", "tests", "resolve-ts.mjs") } /** diff --git a/src/web/history-service.ts b/src/web/history-service.ts index 9f38ebc68..843254ee8 100644 --- a/src/web/history-service.ts +++ b/src/web/history-service.ts @@ -11,7 +11,7 @@ const HISTORY_MAX_BUFFER = 2 * 1024 * 1024 const HISTORY_MODULE_ENV = "SF_HISTORY_MODULE" function resolveTsLoaderPath(packageRoot: string): string { - return join(packageRoot, "src", "resources", "extensions", "gsd", "tests", "resolve-ts.mjs") + return join(packageRoot, "src", "resources", "extensions", "sf", "tests", "resolve-ts.mjs") } /** diff --git a/src/web/hooks-service.ts b/src/web/hooks-service.ts index bfedd2a90..9aeaa3cbb 100644 --- a/src/web/hooks-service.ts +++ b/src/web/hooks-service.ts @@ -11,7 +11,7 @@ const HOOKS_MAX_BUFFER = 512 * 1024 const HOOKS_MODULE_ENV = "SF_HOOKS_MODULE" function resolveTsLoaderPath(packageRoot: string): string { - return join(packageRoot, "src", "resources", "extensions", "gsd", "tests", "resolve-ts.mjs") + return join(packageRoot, "src", "resources", "extensions", "sf", "tests", "resolve-ts.mjs") } /** @@ -38,7 +38,7 @@ export async function collectHooksData(projectCwdOverride?: string): Promise<Hoo } // getHookStatus() internally calls resolvePostUnitHooks() and resolvePreDispatchHooks() - // from preferences.ts, which read from process.cwd()/.gsd/PREFERENCES.md. + // from preferences.ts, which read from process.cwd()/.sf/PREFERENCES.md. // We set cwd to projectCwd so preferences resolution finds the right files. // In a cold child process, cycleCounts is empty, so activeCycles will be {}. const script = [ diff --git a/src/web/inspect-service.ts b/src/web/inspect-service.ts index fc21cd460..d4ccd9ec3 100644 --- a/src/web/inspect-service.ts +++ b/src/web/inspect-service.ts @@ -5,15 +5,15 @@ import { resolveBridgeRuntimeConfig } from "./bridge-service.ts" import type { InspectData } from "../../web/lib/remaining-command-types.ts" /** - * Collects project inspection data by reading gsd-db.json directly. - * No child process needed — gsd-db.json is plain JSON with no .js imports. + * Collects project inspection data by reading sf-db.json directly. + * No child process needed — sf-db.json is plain JSON with no .js imports. */ export async function collectInspectData(projectCwdOverride?: string): Promise<InspectData> { const config = resolveBridgeRuntimeConfig(undefined, projectCwdOverride) const { projectCwd } = config - const gsdDir = join(projectCwd, ".gsd") - const dbPath = join(gsdDir, "gsd-db.json") + const gsdDir = join(projectCwd, ".sf") + const dbPath = join(gsdDir, "sf-db.json") let schemaVersion: number | null = null let decisions: Array<{ id: string; decision: string; choice: string; [k: string]: unknown }> = [] diff --git a/src/web/knowledge-service.ts b/src/web/knowledge-service.ts index acb13f99e..bca5b56b7 100644 --- a/src/web/knowledge-service.ts +++ b/src/web/knowledge-service.ts @@ -13,7 +13,7 @@ export async function collectKnowledgeData(projectCwdOverride?: string): Promise const config = resolveBridgeRuntimeConfig(undefined, projectCwdOverride) const { projectCwd } = config - const filePath = join(projectCwd, ".gsd", "KNOWLEDGE.md") + const filePath = join(projectCwd, ".sf", "KNOWLEDGE.md") if (!existsSync(filePath)) { return { entries: [], filePath, lastModified: null } diff --git a/src/web/notifications-service.ts b/src/web/notifications-service.ts index 9f7a113b6..a4182e55d 100644 --- a/src/web/notifications-service.ts +++ b/src/web/notifications-service.ts @@ -26,7 +26,7 @@ const NOTIFICATIONS_MAX_BUFFER = 2 * 1024 * 1024 const NOTIFICATIONS_MODULE_ENV = "SF_NOTIFICATIONS_MODULE" function resolveTsLoaderPath(packageRoot: string): string { - return join(packageRoot, "src", "resources", "extensions", "gsd", "tests", "resolve-ts.mjs") + return join(packageRoot, "src", "resources", "extensions", "sf", "tests", "resolve-ts.mjs") } export async function collectNotificationsData(projectCwdOverride?: string): Promise<NotificationsData> { diff --git a/src/web/project-discovery-service.ts b/src/web/project-discovery-service.ts index 86c468de4..da70f22f2 100644 --- a/src/web/project-discovery-service.ts +++ b/src/web/project-discovery-service.ts @@ -26,7 +26,7 @@ export interface ProjectMetadata { const EXCLUDED_DIRS = new Set(["node_modules", ".git"]); /** - * Parse a project's `.gsd/STATE.md` for active milestone, slice, phase, + * Parse a project's `.sf/STATE.md` for active milestone, slice, phase, * and milestone completion tally. * * Returns `null` when the file is missing or unreadable. @@ -34,7 +34,7 @@ const EXCLUDED_DIRS = new Set(["node_modules", ".git"]); */ export function readProjectProgress(projectPath: string): ProjectProgressInfo | null { try { - const content = readFileSync(join(projectPath, ".gsd", "STATE.md"), "utf-8"); + const content = readFileSync(join(projectPath, ".sf", "STATE.md"), "utf-8"); const lines = content.split("\n"); let activeMilestone: string | null = null; @@ -86,7 +86,7 @@ export function discoverProjects(devRootPath: string, includeProgress?: boolean) // ── Check if the root itself is a project/monorepo ────────────── // If the devRoot has a .git repo AND looks like a monorepo (pnpm-workspace, // lerna, workspaces, etc.) or looks like a standalone project root (has - // .gsd, or is a recognizable project), return it as a single entry. + // .sf, or is a recognizable project), return it as a single entry. const rootDetection = detectProjectKind(devRootPath); if (rootDetection.signals.isMonorepo) { const stat = statSync(devRootPath); diff --git a/src/web/recovery-diagnostics-service.ts b/src/web/recovery-diagnostics-service.ts index 6a73e1047..877c677f2 100644 --- a/src/web/recovery-diagnostics-service.ts +++ b/src/web/recovery-diagnostics-service.ts @@ -206,12 +206,12 @@ function buildCommandSuggestions( } } - if (phase === "planning") add("/gsd", "Open SF planning") - if (phase === "executing" || phase === "summarizing") add("/gsd auto", "Resume SF auto mode") - if (activeScope) add(`/gsd doctor ${activeScope}`, "Inspect scoped doctor report") - if (activeScope) add(`/gsd doctor fix ${activeScope}`, "Apply scoped doctor fixes") - if (validationCount > 0 && activeScope) add(`/gsd doctor audit ${activeScope}`, "Audit validation diagnostics") - add("/gsd status", "Check current-project status") + if (phase === "planning") add("/sf", "Open SF planning") + if (phase === "executing" || phase === "summarizing") add("/sf auto", "Resume SF auto mode") + if (activeScope) add(`/sf doctor ${activeScope}`, "Inspect scoped doctor report") + if (activeScope) add(`/sf doctor fix ${activeScope}`, "Apply scoped doctor fixes") + if (validationCount > 0 && activeScope) add(`/sf doctor audit ${activeScope}`, "Audit validation diagnostics") + add("/sf status", "Check current-project status") return [...suggestions.values()] } @@ -357,7 +357,7 @@ function resolveSummary(options: { } function resolveTsLoaderPath(packageRoot: string): string { - return join(packageRoot, "src", "resources", "extensions", "gsd", "tests", "resolve-ts.mjs") + return join(packageRoot, "src", "resources", "extensions", "sf", "tests", "resolve-ts.mjs") } async function collectRecoveryDiagnosticsChildPayload( @@ -486,7 +486,7 @@ async function collectRecoveryDiagnosticsChildPayload( SF_RECOVERY_UNIT_TYPE: unit?.type ?? "execute-project", SF_RECOVERY_UNIT_ID: unit?.id ?? "project", SF_RECOVERY_SESSION_FILE: sessionFile ?? "", - SF_RECOVERY_ACTIVITY_DIR: join(basePath, ".gsd", "activity"), + SF_RECOVERY_ACTIVITY_DIR: join(basePath, ".sf", "activity"), SF_RECOVERY_DOCTOR_MODULE: doctorModulePath, SF_RECOVERY_FORENSICS_MODULE: sessionForensicsModulePath, }, diff --git a/src/web/safe-import-meta-resolve.ts b/src/web/safe-import-meta-resolve.ts index 56ba2e653..8ade58c78 100644 --- a/src/web/safe-import-meta-resolve.ts +++ b/src/web/safe-import-meta-resolve.ts @@ -5,7 +5,7 @@ import { fileURLToPath } from "node:url"; * Derive a package root from an import.meta.url, returning null on failure. * * The Next.js standalone build bakes import.meta.url as the CI runner's - * absolute path (e.g. file:///home/runner/work/gsd-2/gsd-2/src/web/bridge-service.ts). + * absolute path (e.g. file:///home/runner/work/sf-2/sf-2/src/web/bridge-service.ts). * On Windows, fileURLToPath() rejects this Linux path with * "File URL path must be absolute". * diff --git a/src/web/settings-service.ts b/src/web/settings-service.ts index f16df23e0..9f34bcc84 100644 --- a/src/web/settings-service.ts +++ b/src/web/settings-service.ts @@ -10,7 +10,7 @@ import type { SettingsData } from "../../web/lib/settings-types.ts" const SETTINGS_MAX_BUFFER = 2 * 1024 * 1024 function resolveTsLoaderPath(packageRoot: string): string { - return join(packageRoot, "src", "resources", "extensions", "gsd", "tests", "resolve-ts.mjs") + return join(packageRoot, "src", "resources", "extensions", "sf", "tests", "resolve-ts.mjs") } /** diff --git a/src/web/skill-health-service.ts b/src/web/skill-health-service.ts index 099023812..439d66718 100644 --- a/src/web/skill-health-service.ts +++ b/src/web/skill-health-service.ts @@ -11,7 +11,7 @@ const SKILL_HEALTH_MAX_BUFFER = 2 * 1024 * 1024 const SKILL_HEALTH_MODULE_ENV = "SF_SKILL_HEALTH_MODULE" function resolveTsLoaderPath(packageRoot: string): string { - return join(packageRoot, "src", "resources", "extensions", "gsd", "tests", "resolve-ts.mjs") + return join(packageRoot, "src", "resources", "extensions", "sf", "tests", "resolve-ts.mjs") } /** diff --git a/src/web/undo-service.ts b/src/web/undo-service.ts index b1b94309b..8524ffa04 100644 --- a/src/web/undo-service.ts +++ b/src/web/undo-service.ts @@ -12,7 +12,7 @@ const UNDO_MODULE_ENV = "SF_UNDO_MODULE" const PATHS_MODULE_ENV = "SF_PATHS_MODULE" function resolveTsLoaderPath(packageRoot: string): string { - return join(packageRoot, "src", "resources", "extensions", "gsd", "tests", "resolve-ts.mjs") + return join(packageRoot, "src", "resources", "extensions", "sf", "tests", "resolve-ts.mjs") } /** @@ -24,7 +24,7 @@ export async function collectUndoInfo(projectCwdOverride?: string): Promise<Undo const config = resolveBridgeRuntimeConfig(undefined, projectCwdOverride) const { projectCwd } = config - const gsdDir = join(projectCwd, ".gsd") + const gsdDir = join(projectCwd, ".sf") const completedPath = join(gsdDir, "completed-units.json") const empty: UndoInfo = { diff --git a/src/web/visualizer-service.ts b/src/web/visualizer-service.ts index bfea7b506..c027decd3 100644 --- a/src/web/visualizer-service.ts +++ b/src/web/visualizer-service.ts @@ -36,7 +36,7 @@ export interface SerializedVisualizerData { } function resolveTsLoaderPath(packageRoot: string): string { - return join(packageRoot, "src", "resources", "extensions", "gsd", "tests", "resolve-ts.mjs") + return join(packageRoot, "src", "resources", "extensions", "sf", "tests", "resolve-ts.mjs") } /** diff --git a/src/welcome-screen.ts b/src/welcome-screen.ts index 0172a8055..c21349c27 100644 --- a/src/welcome-screen.ts +++ b/src/welcome-screen.ts @@ -90,7 +90,7 @@ export function printWelcomeScreen(opts: WelcomeScreenOptions): void { // Tools left, hint right-aligned on the same row const toolsLeft = toolParts.length > 0 ? chalk.dim(' ' + toolParts.join(' · ')) : '' - const hintRight = chalk.dim('/gsd to begin · /gsd help') + const hintRight = chalk.dim('/sf to begin · /sf help') const footerFill = RIGHT_INNER - visLen(toolsLeft) - visLen(hintRight) const footerRow = toolsLeft + ' '.repeat(Math.max(1, footerFill)) + hintRight diff --git a/src/worktree-cli.ts b/src/worktree-cli.ts index 213a9cfe4..a2d711fe7 100644 --- a/src/worktree-cli.ts +++ b/src/worktree-cli.ts @@ -2,12 +2,12 @@ * SF Worktree CLI — standalone subcommand and -w flag handling. * * Manages the full worktree lifecycle from the command line: - * gsd -w Create auto-named worktree, start interactive session - * gsd -w my-feature Create/resume named worktree - * gsd worktree list List worktrees with status - * gsd worktree merge [name] Squash-merge a worktree into main - * gsd worktree clean Remove all merged/empty worktrees - * gsd worktree remove <n> Remove a specific worktree + * sf -w Create auto-named worktree, start interactive session + * sf -w my-feature Create/resume named worktree + * sf worktree list List worktrees with status + * sf worktree merge [name] Squash-merge a worktree into main + * sf worktree clean Remove all merged/empty worktrees + * sf worktree remove <n> Remove a specific worktree * * On session exit (via session_shutdown event), auto-commits dirty work * so nothing is lost. The SF extension reads SF_CLI_WORKTREE to know @@ -151,7 +151,7 @@ async function handleList(basePath: string): Promise<void> { const worktrees = ext.listWorktrees(basePath) if (worktrees.length === 0) { - process.stderr.write(chalk.dim('No worktrees. Create one with: gsd -w <name>\n')) + process.stderr.write(chalk.dim('No worktrees. Create one with: sf -w <name>\n')) return } @@ -174,8 +174,8 @@ async function handleMerge(basePath: string, args: string[]): Promise<void> { await doMerge(ext, basePath, worktrees[0].name) return } - process.stderr.write(chalk.red('Usage: gsd worktree merge <name>\n')) - process.stderr.write(chalk.dim('Run gsd worktree list to see worktrees.\n')) + process.stderr.write(chalk.red('Usage: sf worktree merge <name>\n')) + process.stderr.write(chalk.dim('Run sf worktree list to see worktrees.\n')) process.exit(1) } await doMerge(ext, basePath, name) @@ -220,7 +220,7 @@ async function doMerge(ext: ExtensionModules, basePath: string, name: string): P } catch (err) { const msg = err instanceof Error ? err.message : String(err) process.stderr.write(chalk.red(`✗ Merge failed: ${msg}\n`)) - process.stderr.write(chalk.dim(' Resolve conflicts manually, then run gsd worktree merge again.\n')) + process.stderr.write(chalk.dim(' Resolve conflicts manually, then run sf worktree merge again.\n')) process.exit(1) } } @@ -260,7 +260,7 @@ async function handleRemove(basePath: string, args: string[]): Promise<void> { const ext = await loadExtensionModules() const name = args[0] if (!name) { - process.stderr.write(chalk.red('Usage: gsd worktree remove <name>\n')) + process.stderr.write(chalk.red('Usage: sf worktree remove <name>\n')) process.exit(1) } @@ -274,7 +274,7 @@ async function handleRemove(basePath: string, args: string[]): Promise<void> { const status = getWorktreeStatus(ext, basePath, name, wt.path) if (status.filesChanged > 0 || status.uncommitted) { process.stderr.write(chalk.yellow(`⚠ Worktree "${name}" has unmerged changes (${status.filesChanged} files).\n`)) - process.stderr.write(chalk.yellow(' Use --force to remove anyway, or merge first: gsd worktree merge ' + name + '\n')) + process.stderr.write(chalk.yellow(' Use --force to remove anyway, or merge first: sf worktree merge ' + name + '\n')) if (!process.argv.includes('--force')) { process.exit(1) } @@ -306,7 +306,7 @@ async function handleStatusBanner(basePath: string): Promise<void> { chalk.yellow(`${withChanges.length} worktree${withChanges.length === 1 ? '' : 's'} with unmerged changes: `) + names + '\n' + chalk.dim('[forge] ') + - chalk.dim('Resume: gsd -w <name> | Merge: gsd worktree merge <name> | List: gsd worktree list\n\n'), + chalk.dim('Resume: sf -w <name> | Merge: sf worktree merge <name> | List: sf worktree list\n\n'), ) } @@ -316,7 +316,7 @@ async function handleWorktreeFlag(worktreeFlag: boolean | string): Promise<void> const ext = await loadExtensionModules() const basePath = process.cwd() - // gsd -w (no name) — resume most recent worktree with changes, or create new + // sf -w (no name) — resume most recent worktree with changes, or create new if (worktreeFlag === true) { const existing = ext.listWorktrees(basePath) const withChanges = existing.filter(wt => { @@ -345,7 +345,7 @@ async function handleWorktreeFlag(worktreeFlag: boolean | string): Promise<void> const status = getWorktreeStatus(ext, basePath, wt.name, wt.path) process.stderr.write(formatStatus(status) + '\n\n') } - process.stderr.write(chalk.dim('Specify which one: gsd -w <name>\n')) + process.stderr.write(chalk.dim('Specify which one: sf -w <name>\n')) process.exit(0) } @@ -355,7 +355,7 @@ async function handleWorktreeFlag(worktreeFlag: boolean | string): Promise<void> return } - // gsd -w <name> — create or resume named worktree + // sf -w <name> — create or resume named worktree const name = worktreeFlag as string const existing = ext.listWorktrees(basePath) const found = existing.find(wt => wt.name === name) diff --git a/web/app/api/browse-directories/route.ts b/web/app/api/browse-directories/route.ts index adc875bd0..f480a603d 100644 --- a/web/app/api/browse-directories/route.ts +++ b/web/app/api/browse-directories/route.ts @@ -11,7 +11,7 @@ export const dynamic = "force-dynamic"; */ function getDevRoot(): string { try { - const prefsPath = join(homedir(), ".gsd", "web-preferences.json"); + const prefsPath = join(homedir(), ".sf", "web-preferences.json"); if (existsSync(prefsPath)) { const prefs = JSON.parse(readFileSync(prefsPath, "utf-8")) as Record<string, unknown>; if (typeof prefs.devRoot === "string" && prefs.devRoot) { diff --git a/web/app/api/dev-mode/route.ts b/web/app/api/dev-mode/route.ts index f145be9e5..66b363782 100644 --- a/web/app/api/dev-mode/route.ts +++ b/web/app/api/dev-mode/route.ts @@ -9,7 +9,7 @@ export function GET(): Response { const packageRoot = process.env.SF_WEB_PACKAGE_ROOT ?? ""; const isSourceDev = hostKind === "source-dev"; - // When running via `npm run gsd:web` from the monorepo, the host resolves + // When running via `npm run sf:web` from the monorepo, the host resolves // as packaged-standalone (because the build exists), but the source web/ // directory is still present at the package root. A truly published package // won't have web/app/ next to dist/. diff --git a/web/app/api/experimental/route.ts b/web/app/api/experimental/route.ts index ea87edcae..98269240c 100644 --- a/web/app/api/experimental/route.ts +++ b/web/app/api/experimental/route.ts @@ -11,7 +11,7 @@ const NO_STORE = { "Cache-Control": "no-store" } as const // ─── Helpers (same pattern as remote-questions/route.ts) ───────────────────── function getPreferencesPath(): string { - return join(homedir(), ".gsd", "PREFERENCES.md") + return join(homedir(), ".sf", "PREFERENCES.md") } function parseFrontmatter(content: string): { data: Record<string, unknown>; body: string } { diff --git a/web/app/api/files/route.ts b/web/app/api/files/route.ts index e744d942c..91e1b7826 100644 --- a/web/app/api/files/route.ts +++ b/web/app/api/files/route.ts @@ -27,7 +27,7 @@ const PROJECT_SKIP_DIRS = new Set([ ".parcel-cache", ]); -type RootMode = "gsd" | "project"; +type RootMode = "sf" | "project"; interface FileNode { name: string; @@ -36,7 +36,7 @@ interface FileNode { } function getGsdRoot(projectCwd: string): string { - return join(projectCwd, ".gsd"); + return join(projectCwd, ".sf"); } function getRootForMode(mode: RootMode, projectCwd: string): string { @@ -101,11 +101,11 @@ function buildTree(dirPath: string, skipDirs?: Set<string>, depth = 0, maxDepth export async function GET(request: Request): Promise<Response> { const { searchParams } = new URL(request.url); const pathParam = searchParams.get("path"); - const rootParam = (searchParams.get("root") ?? "gsd") as RootMode; + const rootParam = (searchParams.get("root") ?? "sf") as RootMode; - if (rootParam !== "gsd" && rootParam !== "project") { + if (rootParam !== "sf" && rootParam !== "project") { return Response.json( - { error: `Invalid root: must be "gsd" or "project"` }, + { error: `Invalid root: must be "sf" or "project"` }, { status: 400 }, ); } @@ -127,7 +127,7 @@ export async function GET(request: Request): Promise<Response> { // Mode B: return file content const resolvedPath = resolveSecurePath(pathParam, root); if (!resolvedPath) { - const label = rootParam === "project" ? "project root" : ".gsd/"; + const label = rootParam === "project" ? "project root" : ".sf/"; return Response.json( { error: `Invalid path: path must be relative within ${label} and cannot contain '..' or start with '/'` }, { status: 400, headers }, @@ -172,15 +172,15 @@ export async function POST(request: Request): Promise<Response> { ); } - const { path: pathParam, content, root: rootParam = "gsd" } = body as { + const { path: pathParam, content, root: rootParam = "sf" } = body as { path?: string; content?: unknown; root?: string; }; - if (rootParam !== "gsd" && rootParam !== "project") { + if (rootParam !== "sf" && rootParam !== "project") { return Response.json( - { error: `Invalid root: must be "gsd" or "project"` }, + { error: `Invalid root: must be "sf" or "project"` }, { status: 400 }, ); } @@ -211,7 +211,7 @@ export async function POST(request: Request): Promise<Response> { const resolvedPath = resolveSecurePath(pathParam, root); if (!resolvedPath) { - const label = rootParam === "project" ? "project root" : ".gsd/"; + const label = rootParam === "project" ? "project root" : ".sf/"; return Response.json( { error: `Invalid path: path must be relative within ${label} and cannot contain '..' or start with '/'` }, { status: 400 }, @@ -238,15 +238,15 @@ export async function PATCH(request: Request): Promise<Response> { return Response.json({ error: "Invalid JSON body" }, { status: 400 }); } - const { from, to, root: rootParam = "gsd" } = body as { + const { from, to, root: rootParam = "sf" } = body as { from?: string; to?: string; root?: string; }; - if (rootParam !== "gsd" && rootParam !== "project") { + if (rootParam !== "sf" && rootParam !== "project") { return Response.json( - { error: `Invalid root: must be "gsd" or "project"` }, + { error: `Invalid root: must be "sf" or "project"` }, { status: 400 }, ); } @@ -267,7 +267,7 @@ export async function PATCH(request: Request): Promise<Response> { const projectCwd = requireProjectCwd(request); const root = getRootForMode(rootParam as RootMode, projectCwd); - const label = rootParam === "project" ? "project root" : ".gsd/"; + const label = rootParam === "project" ? "project root" : ".sf/"; const resolvedFrom = resolveSecurePath(from, root); if (!resolvedFrom) { @@ -322,11 +322,11 @@ export async function PATCH(request: Request): Promise<Response> { export async function DELETE(request: Request): Promise<Response> { const { searchParams } = new URL(request.url); const pathParam = searchParams.get("path"); - const rootParam = (searchParams.get("root") ?? "gsd") as RootMode; + const rootParam = (searchParams.get("root") ?? "sf") as RootMode; - if (rootParam !== "gsd" && rootParam !== "project") { + if (rootParam !== "sf" && rootParam !== "project") { return Response.json( - { error: `Invalid root: must be "gsd" or "project"` }, + { error: `Invalid root: must be "sf" or "project"` }, { status: 400 }, ); } @@ -340,7 +340,7 @@ export async function DELETE(request: Request): Promise<Response> { const projectCwd = requireProjectCwd(request); const root = getRootForMode(rootParam, projectCwd); - const label = rootParam === "project" ? "project root" : ".gsd/"; + const label = rootParam === "project" ? "project root" : ".sf/"; const resolvedPath = resolveSecurePath(pathParam, root); if (!resolvedPath) { @@ -378,15 +378,15 @@ export async function PUT(request: Request): Promise<Response> { return Response.json({ error: "Invalid JSON body" }, { status: 400 }); } - const { path: pathParam, type = "file", root: rootParam = "gsd" } = body as { + const { path: pathParam, type = "file", root: rootParam = "sf" } = body as { path?: string; type?: "file" | "directory"; root?: string; }; - if (rootParam !== "gsd" && rootParam !== "project") { + if (rootParam !== "sf" && rootParam !== "project") { return Response.json( - { error: `Invalid root: must be "gsd" or "project"` }, + { error: `Invalid root: must be "sf" or "project"` }, { status: 400 }, ); } @@ -407,7 +407,7 @@ export async function PUT(request: Request): Promise<Response> { const projectCwd = requireProjectCwd(request); const root = getRootForMode(rootParam as RootMode, projectCwd); - const label = rootParam === "project" ? "project root" : ".gsd/"; + const label = rootParam === "project" ? "project root" : ".sf/"; const resolvedPath = resolveSecurePath(pathParam, root); if (!resolvedPath) { diff --git a/web/app/api/remote-questions/route.ts b/web/app/api/remote-questions/route.ts index 0215e08b3..c8db3178d 100644 --- a/web/app/api/remote-questions/route.ts +++ b/web/app/api/remote-questions/route.ts @@ -41,7 +41,7 @@ const AUTH_PROVIDER_IDS: Record<RemoteChannel, string> = { // ─── Auth.json Helpers ──────────────────────────────────────────────────────── function getAuthPath(): string { - return join(homedir(), ".gsd", "agent", "auth.json") + return join(homedir(), ".sf", "agent", "auth.json") } function readAuthData(): Record<string, unknown> { @@ -84,7 +84,7 @@ function maskToken(token: string): string { // ─── Helpers ────────────────────────────────────────────────────────────────── function getPreferencesPath(): string { - return join(homedir(), ".gsd", "PREFERENCES.md") + return join(homedir(), ".sf", "PREFERENCES.md") } function clamp(value: number | undefined, defaultVal: number, min: number, max: number): number { diff --git a/web/app/api/steer/route.ts b/web/app/api/steer/route.ts index d159f57f4..8cdb8cbd3 100644 --- a/web/app/api/steer/route.ts +++ b/web/app/api/steer/route.ts @@ -11,7 +11,7 @@ export async function GET(request: Request): Promise<Response> { try { const projectCwd = requireProjectCwd(request); const config = resolveBridgeRuntimeConfig(undefined, projectCwd) - const overridesPath = join(config.projectCwd, ".gsd", "OVERRIDES.md") + const overridesPath = join(config.projectCwd, ".sf", "OVERRIDES.md") let overridesContent: string | null = null if (existsSync(overridesPath)) { diff --git a/web/app/api/terminal/upload/route.ts b/web/app/api/terminal/upload/route.ts index b658561ab..e8e20bb2d 100644 --- a/web/app/api/terminal/upload/route.ts +++ b/web/app/api/terminal/upload/route.ts @@ -5,7 +5,7 @@ * Body: multipart/form-data with a single `file` field * * Returns: - * 200 { ok: true, path: "/tmp/gsd-upload-..." } + * 200 { ok: true, path: "/tmp/sf-upload-..." } * 400 { error: "No file provided" } * 413 { error: "File too large (...)" } * 415 { error: "Unsupported image type: ..." } @@ -79,7 +79,7 @@ export async function POST(request: Request): Promise<Response> { // Generate unique filename and write to temp dir const ext = MIME_TO_EXT[file.type] ?? "bin"; const hex = randomBytes(4).toString("hex"); - const filename = `gsd-upload-${Date.now()}-${hex}.${ext}`; + const filename = `sf-upload-${Date.now()}-${hex}.${ext}`; const filePath = join(tmpdir(), filename); try { diff --git a/web/components/sf/app-shell.tsx b/web/components/sf/app-shell.tsx index 79fbe609f..22893be34 100644 --- a/web/components/sf/app-shell.tsx +++ b/web/components/sf/app-shell.tsx @@ -39,7 +39,7 @@ import { getAuthToken } from "@/lib/auth" const KNOWN_VIEWS = new Set(["dashboard", "power", "chat", "roadmap", "files", "activity", "visualize"]) function viewStorageKey(projectCwd: string): string { - return `gsd-active-view:${projectCwd}` + return `sf-active-view:${projectCwd}` } function WorkspaceChrome() { @@ -111,7 +111,7 @@ function WorkspaceChrome() { useEffect(() => { const restoreTimer = window.setTimeout(() => { try { - const stored = localStorage.getItem("gsd-sidebar-collapsed") + const stored = localStorage.getItem("sf-sidebar-collapsed") if (stored === "true") setSidebarCollapsed(true) } catch { // localStorage may be unavailable @@ -123,7 +123,7 @@ function WorkspaceChrome() { // Persist sidebar collapsed state useEffect(() => { try { - localStorage.setItem("gsd-sidebar-collapsed", String(sidebarCollapsed)) + localStorage.setItem("sf-sidebar-collapsed", String(sidebarCollapsed)) } catch { // localStorage may be unavailable } @@ -150,7 +150,7 @@ function WorkspaceChrome() { return () => window.removeEventListener("sf:open-file", handler) }, []) - // Listen for cross-component view navigation events (e.g. /gsd visualize dispatch) + // Listen for cross-component view navigation events (e.g. /sf visualize dispatch) useEffect(() => { const handler = (e: CustomEvent<{ view: string }>) => { if (KNOWN_VIEWS.has(e.detail.view)) { @@ -164,8 +164,8 @@ function WorkspaceChrome() { // Listen for projects panel toggle (sidebar icon, or programmatic) useEffect(() => { const handler = () => setProjectsPanelOpen(true) - window.addEventListener("gsd:open-projects", handler) - return () => window.removeEventListener("gsd:open-projects", handler) + window.addEventListener("sf:open-projects", handler) + return () => window.removeEventListener("sf:open-projects", handler) }, []) // Terminal + sidebar panel drag-to-resize @@ -273,7 +273,7 @@ function WorkspaceChrome() { (including the{" "} <code className="rounded bg-muted px-1 py-0.5 font-mono text-xs">#token=…</code>{" "} part) or restart with{" "} - <code className="rounded bg-muted px-1 py-0.5 font-mono text-xs">gsd --web</code>. + <code className="rounded bg-muted px-1 py-0.5 font-mono text-xs">sf --web</code>. </p> </div> </div> diff --git a/web/components/sf/chat-mode.tsx b/web/components/sf/chat-mode.tsx index 46219753c..c5cf9c4b4 100644 --- a/web/components/sf/chat-mode.tsx +++ b/web/components/sf/chat-mode.tsx @@ -28,7 +28,7 @@ import { useTerminalFontSize } from "@/lib/use-terminal-font-size" /* ─── SF Action Definitions ─── */ /** - * Defines every /gsd subcommand available in the chat input bar. + * Defines every /sf subcommand available in the chat input bar. * Top 3 are shown as standalone buttons; the rest live in the overflow menu. * All commands dispatch through the main bridge session. */ @@ -44,33 +44,33 @@ interface SFActionDef { const SF_ACTIONS: SFActionDef[] = [ // ── Top 3 (standalone buttons) ── - { label: "Discuss", command: "/gsd discuss", icon: MessageCircle, description: "Start guided milestone/slice discussion", category: "workflow", disabledDuringAuto: true }, - { label: "Next", command: "/gsd next", icon: Play, description: "Execute next task, then pause", category: "workflow" }, - { label: "Auto", command: "/gsd auto", icon: Zap, description: "Run all queued units continuously", category: "workflow" }, + { label: "Discuss", command: "/sf discuss", icon: MessageCircle, description: "Start guided milestone/slice discussion", category: "workflow", disabledDuringAuto: true }, + { label: "Next", command: "/sf next", icon: Play, description: "Execute next task, then pause", category: "workflow" }, + { label: "Auto", command: "/sf auto", icon: Zap, description: "Run all queued units continuously", category: "workflow" }, // ── Overflow: Workflow ── - { label: "Stop", command: "/gsd stop", icon: Square, description: "Stop auto-mode gracefully", category: "workflow" }, - { label: "Pause", command: "/gsd pause", icon: Pause, description: "Pause auto-mode (preserves state)", category: "workflow" }, + { label: "Stop", command: "/sf stop", icon: Square, description: "Stop auto-mode gracefully", category: "workflow" }, + { label: "Pause", command: "/sf pause", icon: Pause, description: "Pause auto-mode (preserves state)", category: "workflow" }, // ── Overflow: Visibility ── - { label: "Status", command: "/gsd status", icon: BarChart3, description: "Show progress dashboard", category: "visibility" }, - { label: "Visualize", command: "/gsd visualize", icon: LayoutGrid, description: "Interactive TUI (progress, deps, metrics, timeline)", category: "visibility" }, - { label: "Queue", command: "/gsd queue", icon: ListOrdered, description: "Show queued/dispatched units and execution order", category: "visibility" }, - { label: "History", command: "/gsd history", icon: History, description: "View execution history with cost/phase/model details", category: "visibility" }, + { label: "Status", command: "/sf status", icon: BarChart3, description: "Show progress dashboard", category: "visibility" }, + { label: "Visualize", command: "/sf visualize", icon: LayoutGrid, description: "Interactive TUI (progress, deps, metrics, timeline)", category: "visibility" }, + { label: "Queue", command: "/sf queue", icon: ListOrdered, description: "Show queued/dispatched units and execution order", category: "visibility" }, + { label: "History", command: "/sf history", icon: History, description: "View execution history with cost/phase/model details", category: "visibility" }, // ── Overflow: Course correction ── - { label: "Steer", command: "/gsd steer", icon: Compass, description: "Apply user override to active work", category: "correction" }, - { label: "Capture", command: "/gsd capture", icon: PenLine, description: "Quick-capture a thought to CAPTURES.md", category: "correction" }, - { label: "Triage", command: "/gsd triage", icon: Inbox, description: "Classify and route pending captures", category: "correction", disabledDuringAuto: true }, - { label: "Skip", command: "/gsd skip", icon: SkipForward, description: "Prevent a unit from auto-mode dispatch", category: "correction" }, - { label: "Undo", command: "/gsd undo", icon: Undo2, description: "Revert last completed unit", category: "correction" }, + { label: "Steer", command: "/sf steer", icon: Compass, description: "Apply user override to active work", category: "correction" }, + { label: "Capture", command: "/sf capture", icon: PenLine, description: "Quick-capture a thought to CAPTURES.md", category: "correction" }, + { label: "Triage", command: "/sf triage", icon: Inbox, description: "Classify and route pending captures", category: "correction", disabledDuringAuto: true }, + { label: "Skip", command: "/sf skip", icon: SkipForward, description: "Prevent a unit from auto-mode dispatch", category: "correction" }, + { label: "Undo", command: "/sf undo", icon: Undo2, description: "Revert last completed unit", category: "correction" }, // ── Overflow: Knowledge ── - { label: "Knowledge", command: "/gsd knowledge", icon: BookOpen, description: "Add rule, pattern, or lesson to KNOWLEDGE.md", category: "knowledge" }, + { label: "Knowledge", command: "/sf knowledge", icon: BookOpen, description: "Add rule, pattern, or lesson to KNOWLEDGE.md", category: "knowledge" }, // ── Overflow: Configuration ── - { label: "Mode", command: "/gsd mode", icon: SlidersHorizontal, description: "Set workflow mode (solo/team)", category: "config" }, - { label: "Prefs", command: "/gsd prefs", icon: Settings, description: "Manage preferences (global/project)", category: "config" }, + { label: "Mode", command: "/sf mode", icon: SlidersHorizontal, description: "Set workflow mode (solo/team)", category: "config" }, + { label: "Prefs", command: "/sf prefs", icon: Settings, description: "Manage preferences (global/project)", category: "config" }, // ── Overflow: Maintenance ── - { label: "Doctor", command: "/gsd doctor", icon: Stethoscope, description: "Diagnose and repair .gsd/ state", category: "maintenance" }, - { label: "Export", command: "/gsd export", icon: FileOutput, description: "Export milestone/slice results (JSON or Markdown)", category: "maintenance" }, - { label: "Cleanup", command: "/gsd cleanup", icon: Trash2, description: "Remove merged branches or snapshots", category: "maintenance" }, - { label: "Remote", command: "/gsd remote", icon: Globe, description: "Control remote auto-mode (Slack/Discord)", category: "maintenance" }, + { label: "Doctor", command: "/sf doctor", icon: Stethoscope, description: "Diagnose and repair .sf/ state", category: "maintenance" }, + { label: "Export", command: "/sf export", icon: FileOutput, description: "Export milestone/slice results (JSON or Markdown)", category: "maintenance" }, + { label: "Cleanup", command: "/sf cleanup", icon: Trash2, description: "Remove merged branches or snapshots", category: "maintenance" }, + { label: "Remote", command: "/sf remote", icon: Globe, description: "Control remote auto-mode (Slack/Discord)", category: "maintenance" }, ] /** Top 3 shown as standalone buttons next to chat input */ @@ -103,13 +103,13 @@ function groupByCategory(actions: SFActionDef[]): Array<{ category: SFActionDef[ /** * ChatMode — main view for the Chat tab. * - * All /gsd commands dispatch through the main bridge session. + * All /sf commands dispatch through the main bridge session. * Commands that inject competing LLM prompts (discuss, triage) * are disabled while auto-mode is active. * * Observability: * - This component mounts only when activeView === "chat" (no hidden pre-init). - * - sessionStorage key "gsd-active-view:<cwd>" equals "chat" when this view is active. + * - sessionStorage key "sf-active-view:<cwd>" equals "chat" when this view is active. * - Header toolbar: data-testid="chat-mode-action-bar" confirms toolbar rendered. * - Primary button: data-testid="chat-primary-action" reflects current workflowAction label. * - Secondary buttons: data-testid="chat-secondary-action-{command}". @@ -137,8 +137,8 @@ export function ChatMode({ className }: { className?: string }) { {/* ── Main chat pane ── */} <ChatPane - sessionId="gsd-main" - command="gsd" + sessionId="sf-main" + command="sf" className="flex-1" onOpenAction={(action) => handleAction(action.command)} /> @@ -1156,7 +1156,7 @@ function ChatMessageList({ * - Disabled when disconnected; shows "Disconnected" badge * - Send button visible when input has content and connected * - Top 3 action buttons (Discuss, Next, Auto) shown standalone - * - Overflow menu (⋯) contains all remaining /gsd subcommands grouped by category + * - Overflow menu (⋯) contains all remaining /sf subcommands grouped by category * - Every action has a tooltip with description on hover */ function ChatInputBar({ diff --git a/web/components/sf/command-surface.tsx b/web/components/sf/command-surface.tsx index dca8167b7..25b92113e 100644 --- a/web/components/sf/command-surface.tsx +++ b/web/components/sf/command-surface.tsx @@ -417,43 +417,43 @@ export function CommandSurface() { const remainingCommands = commandSurface.remainingCommands useEffect(() => { if (!commandSurface.open) return - if (commandSurface.section === "gsd-forensics" && diagnostics.forensics.phase === "idle") { + if (commandSurface.section === "sf-forensics" && diagnostics.forensics.phase === "idle") { void loadForensicsDiagnostics() - } else if (commandSurface.section === "gsd-doctor" && diagnostics.doctor.phase === "idle") { + } else if (commandSurface.section === "sf-doctor" && diagnostics.doctor.phase === "idle") { void loadDoctorDiagnostics() - } else if (commandSurface.section === "gsd-skill-health" && diagnostics.skillHealth.phase === "idle") { + } else if (commandSurface.section === "sf-skill-health" && diagnostics.skillHealth.phase === "idle") { void loadSkillHealthDiagnostics() } else if ( - commandSurface.section === "gsd-knowledge" && + commandSurface.section === "sf-knowledge" && knowledgeCaptures.knowledge.phase === "idle" ) { void loadKnowledgeData() void loadCapturesData() } else if ( - (commandSurface.section === "gsd-capture" || commandSurface.section === "gsd-triage") && + (commandSurface.section === "sf-capture" || commandSurface.section === "sf-triage") && knowledgeCaptures.captures.phase === "idle" ) { void loadCapturesData() void loadKnowledgeData() } else if ( - (commandSurface.section === "gsd-prefs" || - commandSurface.section === "gsd-mode" || - commandSurface.section === "gsd-config" || + (commandSurface.section === "sf-prefs" || + commandSurface.section === "sf-mode" || + commandSurface.section === "sf-config" || commandSurface.section === "experimental") && settingsData.phase === "idle" ) { void loadSettingsData() - } else if (commandSurface.section === "gsd-history" && remainingCommands.history.phase === "idle") { + } else if (commandSurface.section === "sf-history" && remainingCommands.history.phase === "idle") { void loadHistoryData() - } else if (commandSurface.section === "gsd-inspect" && remainingCommands.inspect.phase === "idle") { + } else if (commandSurface.section === "sf-inspect" && remainingCommands.inspect.phase === "idle") { void loadInspectData() - } else if (commandSurface.section === "gsd-hooks" && remainingCommands.hooks.phase === "idle") { + } else if (commandSurface.section === "sf-hooks" && remainingCommands.hooks.phase === "idle") { void loadHooksData() - } else if (commandSurface.section === "gsd-undo" && remainingCommands.undo.phase === "idle") { + } else if (commandSurface.section === "sf-undo" && remainingCommands.undo.phase === "idle") { void loadUndoInfo() - } else if (commandSurface.section === "gsd-cleanup" && remainingCommands.cleanup.phase === "idle") { + } else if (commandSurface.section === "sf-cleanup" && remainingCommands.cleanup.phase === "idle") { void loadCleanupData() - } else if (commandSurface.section === "gsd-steer" && remainingCommands.steer.phase === "idle") { + } else if (commandSurface.section === "sf-steer" && remainingCommands.steer.phase === "idle") { void loadSteerData() } }, [ @@ -2048,7 +2048,7 @@ export function CommandSurface() { <div className="rounded-lg border border-border/50 bg-card/50 px-3 py-2.5 text-xs text-muted-foreground"> This tab is only visible when running via{" "} - <code className="rounded bg-muted px-1 py-0.5 font-mono text-[11px]">npm run gsd:web</code>. + <code className="rounded bg-muted px-1 py-0.5 font-mono text-[11px]">npm run sf:web</code>. Overrides reset on page refresh. </div> </div> @@ -2130,13 +2130,13 @@ export function CommandSurface() { case "compact": return renderCompactSection() case "workspace": return <DevRootSettingsSection /> case "integrations": return <RemoteQuestionsPanel /> - case "gsd-forensics": return <ForensicsPanel /> - case "gsd-doctor": return <DoctorPanel /> - case "gsd-skill-health": return <SkillHealthPanel /> - case "gsd-knowledge": return <KnowledgeCapturesPanel initialTab="knowledge" /> - case "gsd-capture": return <KnowledgeCapturesPanel initialTab="captures" /> - case "gsd-triage": return <KnowledgeCapturesPanel initialTab="captures" /> - case "gsd-prefs": return ( + case "sf-forensics": return <ForensicsPanel /> + case "sf-doctor": return <DoctorPanel /> + case "sf-skill-health": return <SkillHealthPanel /> + case "sf-knowledge": return <KnowledgeCapturesPanel initialTab="knowledge" /> + case "sf-capture": return <KnowledgeCapturesPanel initialTab="captures" /> + case "sf-triage": return <KnowledgeCapturesPanel initialTab="captures" /> + case "sf-prefs": return ( <div className="space-y-6"> <DevRootSettingsSection /> <PrefsPanel /> @@ -2147,24 +2147,24 @@ export function CommandSurface() { <ExperimentalPanel /> </div> ) - case "gsd-mode": return <ModelRoutingPanel /> - case "gsd-config": return <BudgetPanel /> - case "gsd-quick": return <QuickPanel /> - case "gsd-history": return <HistoryPanel /> - case "gsd-undo": return <UndoPanel /> - case "gsd-steer": return <SteerPanel /> - case "gsd-hooks": return <HooksPanel /> - case "gsd-inspect": return <InspectPanel /> - case "gsd-export": return <ExportPanel /> - case "gsd-cleanup": return <CleanupPanel /> - case "gsd-queue": return <QueuePanel /> - case "gsd-status": return <StatusPanel /> + case "sf-mode": return <ModelRoutingPanel /> + case "sf-config": return <BudgetPanel /> + case "sf-quick": return <QuickPanel /> + case "sf-history": return <HistoryPanel /> + case "sf-undo": return <UndoPanel /> + case "sf-steer": return <SteerPanel /> + case "sf-hooks": return <HooksPanel /> + case "sf-inspect": return <InspectPanel /> + case "sf-export": return <ExportPanel /> + case "sf-cleanup": return <CleanupPanel /> + case "sf-queue": return <QueuePanel /> + case "sf-status": return <StatusPanel /> default: // Safety net for any unknown SF surface - if (commandSurface.section?.startsWith("gsd-")) { + if (commandSurface.section?.startsWith("sf-")) { return ( - <div className="p-4 text-sm text-muted-foreground" data-testid={`gsd-surface-${commandSurface.section}`}> - <p className="font-medium text-foreground">/gsd {commandSurface.section.slice(4)}</p> + <div className="p-4 text-sm text-muted-foreground" data-testid={`sf-surface-${commandSurface.section}`}> + <p className="font-medium text-foreground">/sf {commandSurface.section.slice(4)}</p> <p className="mt-1">Unknown SF surface.</p> </div> ) diff --git a/web/components/sf/dual-terminal.tsx b/web/components/sf/dual-terminal.tsx index 30341a566..9f593a6f3 100644 --- a/web/components/sf/dual-terminal.tsx +++ b/web/components/sf/dual-terminal.tsx @@ -106,7 +106,7 @@ export function DualTerminal() { <div style={{ width: `${100 - splitPosition}%` }} className="h-full min-w-0 overflow-hidden bg-terminal"> <ShellTerminal className="h-full" - command="gsd" + command="sf" sessionPrefix="sf-interactive" fontSize={terminalFontSize} hideInitialSfHeader diff --git a/web/components/sf/file-content-viewer.tsx b/web/components/sf/file-content-viewer.tsx index 50a706e7d..b3a281ae8 100644 --- a/web/components/sf/file-content-viewer.tsx +++ b/web/components/sf/file-content-viewer.tsx @@ -530,7 +530,7 @@ interface FileContentViewerProps { filepath: string className?: string /** Required for editing — the root context for the file */ - root?: "gsd" | "project" + root?: "sf" | "project" /** Required for editing — the relative path within the root */ path?: string /** Required for editing — called with new content when the user saves */ diff --git a/web/components/sf/files-view.tsx b/web/components/sf/files-view.tsx index dfa39fcb1..00b588aeb 100644 --- a/web/components/sf/files-view.tsx +++ b/web/components/sf/files-view.tsx @@ -26,7 +26,7 @@ import { authFetch } from "@/lib/auth" import { FileContentViewer } from "@/components/sf/file-content-viewer" import { ChatPane } from "@/components/sf/chat-mode" -type RootMode = "gsd" | "project" +type RootMode = "sf" | "project" // Global pending file request — survives across component mount/unmount cycles. // Set by the custom event, consumed by FilesView on mount or when already mounted. @@ -51,7 +51,7 @@ interface FileNode { /* ── Persistence helpers ── */ function storageKey(projectCwd: string, root: RootMode): string { - return `gsd-files-expanded:${root}:${projectCwd}` + return `sf-files-expanded:${root}:${projectCwd}` } function loadExpanded(projectCwd: string | undefined, root: RootMode): Set<string> { @@ -460,7 +460,7 @@ function tabKey(root: RootMode, path: string): string { } function tabDisplayPath(tab: OpenTab): string { - return tab.root === "gsd" ? `.gsd/${tab.path}` : tab.path + return tab.root === "sf" ? `.sf/${tab.path}` : tab.path } function tabLabel(tab: OpenTab): string { @@ -475,7 +475,7 @@ export function FilesView() { const workspace = useSFWorkspaceState() const projectCwd = workspace.boot?.project.cwd - const [activeRoot, setActiveRoot] = useState<RootMode>("gsd") + const [activeRoot, setActiveRoot] = useState<RootMode>("sf") const [leftPanel, setLeftPanel] = useState<LeftPanel>("tree") const [gsdTree, setGsdTree] = useState<FileNode[] | null>(null) const [projectTree, setProjectTree] = useState<FileNode[] | null>(null) @@ -522,7 +522,7 @@ export function FilesView() { ) // Expanded paths per root, restored from sessionStorage - const [gsdExpanded, setGsdExpanded] = useState<Set<string>>(() => loadExpanded(projectCwd, "gsd")) + const [gsdExpanded, setGsdExpanded] = useState<Set<string>>(() => loadExpanded(projectCwd, "sf")) const [projectExpanded, setProjectExpanded] = useState<Set<string>>(() => loadExpanded(projectCwd, "project")) // Re-hydrate from storage once projectCwd is available (boot may arrive after first render) @@ -530,12 +530,12 @@ export function FilesView() { useEffect(() => { if (!projectCwd || hydratedRef.current) return hydratedRef.current = true - setGsdExpanded(loadExpanded(projectCwd, "gsd")) + setGsdExpanded(loadExpanded(projectCwd, "sf")) setProjectExpanded(loadExpanded(projectCwd, "project")) }, [projectCwd]) - const expandedPaths = activeRoot === "gsd" ? gsdExpanded : projectExpanded - const setExpandedPaths = activeRoot === "gsd" ? setGsdExpanded : setProjectExpanded + const expandedPaths = activeRoot === "sf" ? gsdExpanded : projectExpanded + const setExpandedPaths = activeRoot === "sf" ? setGsdExpanded : setProjectExpanded // ── Multi-tab state ── const [openTabs, setOpenTabs] = useState<OpenTab[]>([]) @@ -553,8 +553,8 @@ export function FilesView() { // The selected path in the tree corresponds to the active tab const selectedPath = activeTab?.path ?? null - const tree = activeRoot === "gsd" ? gsdTree : projectTree - const treeLoaded = activeRoot === "gsd" ? gsdTree !== null : projectTree !== null + const tree = activeRoot === "sf" ? gsdTree : projectTree + const treeLoaded = activeRoot === "sf" ? gsdTree !== null : projectTree !== null const fetchTree = useCallback(async (root: RootMode) => { try { @@ -567,7 +567,7 @@ export function FilesView() { } const data = await res.json() const nodes = data.tree ?? [] - if (root === "gsd") { + if (root === "sf") { setGsdTree(nodes) } else { setProjectTree(nodes) @@ -588,7 +588,7 @@ export function FilesView() { // Initial load useEffect(() => { - fetchTree("gsd") + fetchTree("sf") }, [fetchTree]) // ── Open or focus a file tab and fetch its content ── @@ -609,7 +609,7 @@ export function FilesView() { // Auto-expand parent dirs const parts = path.split("/") - const setExpanded = root === "gsd" ? setGsdExpanded : setProjectExpanded + const setExpanded = root === "sf" ? setGsdExpanded : setProjectExpanded setExpanded((prev) => { const next = new Set(prev) for (let i = 1; i < parts.length; i++) { @@ -676,8 +676,8 @@ export function FilesView() { // Process a file open request (used both on mount and on event) const processFileOpen = useCallback(async (root: RootMode, path: string) => { // Ensure tree is loaded for this root - if (root === "gsd" && !gsdTree) { - fetchTree("gsd") + if (root === "sf" && !gsdTree) { + fetchTree("sf") } else if (root === "project" && !projectTree) { fetchTree("project") } @@ -792,7 +792,7 @@ export function FilesView() { const handleNewFile = useCallback((parentDir: string) => { // Ensure parent directory is expanded if (parentDir) { - const setExpanded = activeRoot === "gsd" ? setGsdExpanded : setProjectExpanded + const setExpanded = activeRoot === "sf" ? setGsdExpanded : setProjectExpanded setExpanded((prev) => { const next = new Set(prev) const parts = parentDir.split("/") @@ -808,7 +808,7 @@ export function FilesView() { const handleNewFolder = useCallback((parentDir: string) => { if (parentDir) { - const setExpanded = activeRoot === "gsd" ? setGsdExpanded : setProjectExpanded + const setExpanded = activeRoot === "sf" ? setGsdExpanded : setProjectExpanded setExpanded((prev) => { const next = new Set(prev) const parts = parentDir.split("/") @@ -958,7 +958,7 @@ export function FilesView() { }, []) const handleCopyPath = useCallback((path: string) => { - const displayPath = activeRoot === "gsd" ? `.gsd/${path}` : path + const displayPath = activeRoot === "sf" ? `.sf/${path}` : path void navigator.clipboard.writeText(displayPath) }, [activeRoot]) @@ -1033,7 +1033,7 @@ export function FilesView() { const hasStateMd = gsdTree.some((n) => n.name === "STATE.md" && n.type === "file") if (hasStateMd) { autoSelectedRef.current = true - void openFileTab("gsd", "STATE.md") + void openFileTab("sf", "STATE.md") } }, [gsdTree, openTabs.length, openFileTab]) @@ -1054,7 +1054,7 @@ export function FilesView() { if (!filePath) continue // Determine root and relative path - const gsdPrefix = ".gsd/" + const gsdPrefix = ".sf/" let root: RootMode = "project" let relativePath = filePath @@ -1065,7 +1065,7 @@ export function FilesView() { } if (relativePath.startsWith(gsdPrefix)) { - root = "gsd" + root = "sf" relativePath = relativePath.slice(gsdPrefix.length) } @@ -1129,10 +1129,10 @@ export function FilesView() { {/* Tab bar */} <div className="flex border-b border-border flex-shrink-0"> <button - onClick={() => { setLeftPanel("tree"); handleTreeRootChange("gsd") }} + onClick={() => { setLeftPanel("tree"); handleTreeRootChange("sf") }} className={cn( "flex-1 px-3 py-2 text-xs font-medium transition-colors", - leftPanel === "tree" && activeRoot === "gsd" + leftPanel === "tree" && activeRoot === "sf" ? "border-b-2 border-foreground text-foreground" : "text-muted-foreground hover:text-foreground", )} @@ -1216,7 +1216,7 @@ export function FilesView() { </div> ) : tree && tree.length === 0 ? ( <div className="flex items-center justify-center py-8 text-muted-foreground text-xs"> - {activeRoot === "gsd" ? "No .gsd/ files found" : "No files found"} + {activeRoot === "sf" ? "No .sf/ files found" : "No files found"} </div> ) : tree ? ( <> diff --git a/web/components/sf/onboarding/step-mode.tsx b/web/components/sf/onboarding/step-mode.tsx index 41444fde4..2e7156f42 100644 --- a/web/components/sf/onboarding/step-mode.tsx +++ b/web/components/sf/onboarding/step-mode.tsx @@ -27,7 +27,7 @@ const MODE_OPTIONS: { icon: Code2, tagline: "Full control", description: - "Dashboard metrics, dual-pane power mode, and direct /gsd command access. Built for people who want visibility into every milestone and task.", + "Dashboard metrics, dual-pane power mode, and direct /sf command access. Built for people who want visibility into every milestone and task.", }, { id: "vibe-coder", diff --git a/web/components/sf/project-welcome.tsx b/web/components/sf/project-welcome.tsx index 1b8b8080e..49e4f1c5f 100644 --- a/web/components/sf/project-welcome.tsx +++ b/web/components/sf/project-welcome.tsx @@ -37,7 +37,7 @@ function getVariant(detection: ProjectDetection): WelcomeVariant { headline: "Existing project detected", body: "SF will map your codebase and ask a few questions about what you want to build. From there it generates structured milestones and deliverable slices.", primaryLabel: "Map & Initialize", - primaryCommand: "/gsd", + primaryCommand: "/sf", secondary: { label: "Browse files first", action: "files-view", @@ -48,14 +48,14 @@ function getVariant(detection: ProjectDetection): WelcomeVariant { return { icon: <ArrowUpCircle className="h-8 w-8 text-foreground" strokeWidth={1.5} />, headline: "SF v1 project found", - body: "This project has a .planning/ folder from an earlier SF version. Migration converts your existing planning data into the new .gsd/ format.", + body: "This project has a .planning/ folder from an earlier SF version. Migration converts your existing planning data into the new .sf/ format.", detail: "Your original files will be preserved — migration creates the new structure alongside them.", primaryLabel: "Migrate to v2", - primaryCommand: "/gsd migrate", + primaryCommand: "/sf migrate", secondary: { label: "Start fresh instead", action: "command", - command: "/gsd", + command: "/sf", }, } @@ -65,7 +65,7 @@ function getVariant(detection: ProjectDetection): WelcomeVariant { headline: "Start a new project", body: "This folder is empty. SF will ask what you want to build, then generate a structured plan — milestones broken into deliverable slices with risk-ordered execution.", primaryLabel: "Start Project Setup", - primaryCommand: "/gsd", + primaryCommand: "/sf", } // active-sf and empty-sf shouldn't reach here, but handle gracefully @@ -75,7 +75,7 @@ function getVariant(detection: ProjectDetection): WelcomeVariant { headline: "Set up your project", body: "Run the SF wizard to get started.", primaryLabel: "Get Started", - primaryCommand: "/gsd", + primaryCommand: "/sf", } } } diff --git a/web/components/sf/remaining-command-panels.tsx b/web/components/sf/remaining-command-panels.tsx index 83da1a7ce..46e8607dd 100644 --- a/web/components/sf/remaining-command-panels.tsx +++ b/web/components/sf/remaining-command-panels.tsx @@ -149,7 +149,7 @@ function formatDuration(ms: number): string { export function QuickPanel() { return ( - <div className="space-y-4" data-testid="gsd-surface-gsd-quick"> + <div className="space-y-4" data-testid="sf-surface-sf-quick"> <PanelHeader title="Quick Task" icon={<Zap className="h-3.5 w-3.5" />} @@ -164,7 +164,7 @@ export function QuickPanel() { <div className="space-y-2"> <h4 className="text-[11px] font-medium text-muted-foreground uppercase tracking-wide">Usage</h4> <div className="rounded-md border border-border/50 bg-background/50 px-3 py-2 font-mono text-[11px] text-foreground/80"> - /gsd quick <description> + /sf quick <description> </div> </div> @@ -179,7 +179,7 @@ export function QuickPanel() { ].map((example) => ( <div key={example} className="flex items-center gap-2 text-[11px]"> <span className="text-muted-foreground">$</span> - <code className="font-mono text-muted-foreground">/gsd quick {example}</code> + <code className="font-mono text-muted-foreground">/sf quick {example}</code> </div> ))} </div> @@ -209,7 +209,7 @@ export function HistoryPanel() { const [activeTab, setActiveTab] = useState<HistoryTab>("phase") return ( - <div className="space-y-4" data-testid="gsd-surface-gsd-history"> + <div className="space-y-4" data-testid="sf-surface-sf-history"> <PanelHeader title="History & Metrics" icon={<Clock className="h-3.5 w-3.5" />} @@ -395,7 +395,7 @@ export function UndoPanel() { } return ( - <div className="space-y-4" data-testid="gsd-surface-gsd-undo"> + <div className="space-y-4" data-testid="sf-surface-sf-undo"> <PanelHeader title="Undo Last Unit" icon={<Undo2 className="h-3.5 w-3.5" />} @@ -544,7 +544,7 @@ export function SteerPanel() { } return ( - <div className="space-y-4" data-testid="gsd-surface-gsd-steer"> + <div className="space-y-4" data-testid="sf-surface-sf-steer"> <PanelHeader title="Steer" icon={<Navigation className="h-3.5 w-3.5" />} @@ -614,7 +614,7 @@ export function HooksPanel() { const busy = state.phase === "loading" return ( - <div className="space-y-4" data-testid="gsd-surface-gsd-hooks"> + <div className="space-y-4" data-testid="sf-surface-sf-hooks"> <PanelHeader title="Hooks" icon={<Layers className="h-3.5 w-3.5" />} @@ -706,7 +706,7 @@ export function InspectPanel() { const busy = state.phase === "loading" return ( - <div className="space-y-4" data-testid="gsd-surface-gsd-inspect"> + <div className="space-y-4" data-testid="sf-surface-sf-inspect"> <PanelHeader title="Inspect Database" icon={<Database className="h-3.5 w-3.5" />} @@ -833,7 +833,7 @@ export function ExportPanel() { } return ( - <div className="space-y-4" data-testid="gsd-surface-gsd-export"> + <div className="space-y-4" data-testid="sf-surface-sf-export"> <PanelHeader title="Export" icon={<Download className="h-3.5 w-3.5" />} @@ -934,7 +934,7 @@ export function CleanupPanel() { } return ( - <div className="space-y-4" data-testid="gsd-surface-gsd-cleanup"> + <div className="space-y-4" data-testid="sf-surface-sf-cleanup"> <PanelHeader title="Cleanup" icon={<Trash2 className="h-3.5 w-3.5" />} @@ -1078,7 +1078,7 @@ export function QueuePanel() { const active = workspaceIndex?.active return ( - <div className="space-y-4" data-testid="gsd-surface-gsd-queue"> + <div className="space-y-4" data-testid="sf-surface-sf-queue"> <PanelHeader title="Queue" icon={<ListChecks className="h-3.5 w-3.5" />} @@ -1187,7 +1187,7 @@ export function StatusPanel() { const doneSlices = milestones.reduce((sum: number, m: WorkspaceMilestoneTarget) => sum + m.slices.filter((s) => s.done).length, 0) return ( - <div className="space-y-4" data-testid="gsd-surface-gsd-status"> + <div className="space-y-4" data-testid="sf-surface-sf-status"> <PanelHeader title="Status" icon={<Terminal className="h-3.5 w-3.5" />} @@ -1257,7 +1257,7 @@ export function StatusPanel() { )} {milestones.length === 0 && ( - <PanelEmpty message="No plan loaded — run /gsd to initialize" /> + <PanelEmpty message="No plan loaded — run /sf to initialize" /> )} </div> ) diff --git a/web/components/sf/roadmap.tsx b/web/components/sf/roadmap.tsx index 738c39395..63dc4c09e 100644 --- a/web/components/sf/roadmap.tsx +++ b/web/components/sf/roadmap.tsx @@ -64,7 +64,7 @@ export function Roadmap() { {workspace.bootStatus === "ready" && milestones.length === 0 && ( <div className="py-8 text-center text-sm text-muted-foreground"> - No milestones found. Create a milestone with <code className="rounded bg-muted px-1.5 py-0.5 font-mono text-xs">/gsd</code> to get started. + No milestones found. Create a milestone with <code className="rounded bg-muted px-1.5 py-0.5 font-mono text-xs">/sf</code> to get started. </div> )} diff --git a/web/components/sf/settings-panels.tsx b/web/components/sf/settings-panels.tsx index c62ba4caf..aecb8fee5 100644 --- a/web/components/sf/settings-panels.tsx +++ b/web/components/sf/settings-panels.tsx @@ -1106,7 +1106,7 @@ export function ExperimentalPanel() { const [saveError, setSaveError] = useState<string | null>(null) // Trigger a settings load if data hasn't been fetched yet (e.g. navigating - // directly to the Experimental tab without going through gsd-prefs first). + // directly to the Experimental tab without going through sf-prefs first). useEffect(() => { if (!data && !busy && state.phase === "idle") { refresh() @@ -1223,7 +1223,7 @@ export function ExperimentalPanel() { {data && ( <p className="text-[11px] text-muted-foreground"> Changes are written to{" "} - <span className="font-mono">{prefs?.path ?? "~/.gsd/PREFERENCES.md"}</span> + <span className="font-mono">{prefs?.path ?? "~/.sf/PREFERENCES.md"}</span> {" "}and take effect on the next session. </p> )} @@ -1235,6 +1235,6 @@ export function ExperimentalPanel() { // LEGACY EXPORTS // ═══════════════════════════════════════════════════════════════════════ -// Legacy exports for backward compatibility with gsd-prefs mega-scroll +// Legacy exports for backward compatibility with sf-prefs mega-scroll export const TerminalSizePanel = GeneralPanel export const EditorSizePanel = () => null diff --git a/web/components/sf/sidebar.tsx b/web/components/sf/sidebar.tsx index 4e4fd1a6c..7618e7eaa 100644 --- a/web/components/sf/sidebar.tsx +++ b/web/components/sf/sidebar.tsx @@ -122,7 +122,7 @@ export function NavRail({ activeView, onViewChange, isConnecting = false }: NavR ))} <div className="mt-auto flex flex-col gap-1"> <button - onClick={() => window.dispatchEvent(new CustomEvent("gsd:open-projects"))} + onClick={() => window.dispatchEvent(new CustomEvent("sf:open-projects"))} disabled={isConnecting} className={cn( "flex h-10 w-10 items-center justify-center rounded-md transition-colors", @@ -242,7 +242,7 @@ function ExitDialog({ <DialogTitle>Stop the SF web server?</DialogTitle> <DialogDescription> This will shut down the server process and close this tab. Run{" "} - <code className="rounded bg-muted px-1 py-0.5 font-mono text-xs">gsd --web</code> again to restart. + <code className="rounded bg-muted px-1 py-0.5 font-mono text-xs">sf --web</code> again to restart. </DialogDescription> </DialogHeader> <DialogFooter> @@ -331,10 +331,10 @@ export function MilestoneExplorer({ isConnecting = false, width, onCollapse }: { const openTaskFile = (absolutePath: string | undefined) => { if (!absolutePath || !projectCwd) return - const gsdPrefix = `${projectCwd}/.gsd/` + const gsdPrefix = `${projectCwd}/.sf/` if (!absolutePath.startsWith(gsdPrefix)) return const relativePath = absolutePath.slice(gsdPrefix.length) - window.dispatchEvent(new CustomEvent("sf:open-file", { detail: { root: "gsd", path: relativePath } })) + window.dispatchEvent(new CustomEvent("sf:open-file", { detail: { root: "sf", path: relativePath } })) } const workflowAction = deriveWorkflowAction({ @@ -761,7 +761,7 @@ function MobileNavPanel({ activeView, onViewChange, isConnecting = false }: NavR </div> <div className="border-t border-border px-2 py-2 space-y-1"> <button - onClick={() => window.dispatchEvent(new CustomEvent("gsd:open-projects"))} + onClick={() => window.dispatchEvent(new CustomEvent("sf:open-projects"))} disabled={isConnecting} className="flex w-full items-center gap-3 rounded-md px-3 py-3 text-sm text-muted-foreground hover:bg-accent/50 hover:text-foreground transition-colors min-h-[44px]" > diff --git a/web/components/sf/visualizer-view.tsx b/web/components/sf/visualizer-view.tsx index 589cfd2d2..c20edb7da 100644 --- a/web/components/sf/visualizer-view.tsx +++ b/web/components/sf/visualizer-view.tsx @@ -1053,8 +1053,8 @@ function ExportTab({ data }: { data: VisualizerData }) { return lines.join("\n") }, [data]) - const handleMarkdown = () => downloadBlob(generateMarkdown(), "gsd-report.md", "text/markdown") - const handleJSON = () => downloadBlob(JSON.stringify(data, null, 2), "gsd-report.json", "application/json") + const handleMarkdown = () => downloadBlob(generateMarkdown(), "sf-report.md", "text/markdown") + const handleJSON = () => downloadBlob(JSON.stringify(data, null, 2), "sf-report.json", "application/json") return ( <div className="space-y-6"> diff --git a/web/lib/auth.ts b/web/lib/auth.ts index d3e7beca6..47d6d46f0 100644 --- a/web/lib/auth.ts +++ b/web/lib/auth.ts @@ -20,7 +20,7 @@ * appended as a `?_token=` query parameter instead. */ -const AUTH_STORAGE_KEY = "gsd-auth-token" +const AUTH_STORAGE_KEY = "sf-auth-token" let cachedToken: string | null = null @@ -35,7 +35,7 @@ export function getAuthToken(): string | null { if (typeof window === "undefined") return null - // 1. Try the URL fragment (initial page load from gsd --web) + // 1. Try the URL fragment (initial page load from sf --web) const hash = window.location.hash if (hash) { const match = hash.match(/token=([a-fA-F0-9]+)/) diff --git a/web/lib/browser-slash-command-dispatch.ts b/web/lib/browser-slash-command-dispatch.ts index 2a8550509..227bdb4eb 100644 --- a/web/lib/browser-slash-command-dispatch.ts +++ b/web/lib/browser-slash-command-dispatch.ts @@ -14,26 +14,26 @@ export type BrowserSlashCommandSurface = | "session" | "export" // SF subcommand surfaces (S02) - | "gsd-status" - | "gsd-visualize" - | "gsd-forensics" - | "gsd-doctor" - | "gsd-skill-health" - | "gsd-knowledge" - | "gsd-capture" - | "gsd-triage" - | "gsd-quick" - | "gsd-history" - | "gsd-undo" - | "gsd-inspect" - | "gsd-prefs" - | "gsd-config" - | "gsd-hooks" - | "gsd-mode" - | "gsd-steer" - | "gsd-export" - | "gsd-cleanup" - | "gsd-queue" + | "sf-status" + | "sf-visualize" + | "sf-forensics" + | "sf-doctor" + | "sf-skill-health" + | "sf-knowledge" + | "sf-capture" + | "sf-triage" + | "sf-quick" + | "sf-history" + | "sf-undo" + | "sf-inspect" + | "sf-prefs" + | "sf-config" + | "sf-hooks" + | "sf-mode" + | "sf-steer" + | "sf-export" + | "sf-cleanup" + | "sf-queue" export type BrowserSlashCommandLocalAction = "clear_terminal" | "refresh_workspace" | "gsd_help" @@ -114,27 +114,27 @@ const SURFACE_COMMANDS = new Map<string, BrowserSlashCommandSurface>([ // --- SF subcommand dispatch (S02) --- const SF_SURFACE_SUBCOMMANDS = new Map<string, BrowserSlashCommandSurface>([ - ["status", "gsd-status"], - ["visualize", "gsd-visualize"], - ["forensics", "gsd-forensics"], - ["doctor", "gsd-doctor"], - ["skill-health", "gsd-skill-health"], - ["knowledge", "gsd-knowledge"], - ["capture", "gsd-capture"], - ["triage", "gsd-triage"], - ["quick", "gsd-quick"], - ["history", "gsd-history"], - ["undo", "gsd-undo"], - ["inspect", "gsd-inspect"], + ["status", "sf-status"], + ["visualize", "sf-visualize"], + ["forensics", "sf-forensics"], + ["doctor", "sf-doctor"], + ["skill-health", "sf-skill-health"], + ["knowledge", "sf-knowledge"], + ["capture", "sf-capture"], + ["triage", "sf-triage"], + ["quick", "sf-quick"], + ["history", "sf-history"], + ["undo", "sf-undo"], + ["inspect", "sf-inspect"], ["model", "model"], - ["prefs", "gsd-prefs"], - ["config", "gsd-config"], - ["hooks", "gsd-hooks"], - ["mode", "gsd-mode"], - ["steer", "gsd-steer"], - ["export", "gsd-export"], - ["cleanup", "gsd-cleanup"], - ["queue", "gsd-queue"], + ["prefs", "sf-prefs"], + ["config", "sf-config"], + ["hooks", "sf-hooks"], + ["mode", "sf-mode"], + ["steer", "sf-steer"], + ["export", "sf-export"], + ["cleanup", "sf-cleanup"], + ["queue", "sf-queue"], ]) const SF_PASSTHROUGH_SUBCOMMANDS = new Set<string>([ @@ -149,7 +149,7 @@ const SF_PASSTHROUGH_SUBCOMMANDS = new Set<string>([ "remote", ]) -export const SF_HELP_TEXT = `Available /gsd subcommands: +export const SF_HELP_TEXT = `Available /sf subcommands: Workflow: next · auto · stop · pause · skip · queue · quick · capture · triage Diagnostics: status · visualize · forensics · doctor · skill-health · inspect @@ -157,7 +157,7 @@ Context: knowledge · history · undo · discuss Settings: model · prefs · config · hooks · mode · steer Advanced: export · cleanup · run-hook · migrate · remote -Type /gsd <subcommand> to run. Use /gsd help for this message.` +Type /sf <subcommand> to run. Use /sf help for this message.` function dispatchGSDSubcommand( input: string, @@ -169,12 +169,12 @@ function dispatchGSDSubcommand( const subcommand = spaceIndex === -1 ? trimmedArgs : trimmedArgs.slice(0, spaceIndex) const subArgs = spaceIndex === -1 ? "" : trimmedArgs.slice(spaceIndex + 1).trim() - // Bare `/gsd` — equivalent to `/gsd next`, pass through to bridge + // Bare `/sf` — equivalent to `/sf next`, pass through to bridge if (!subcommand) { return { kind: "prompt", input, - slashCommandName: "gsd", + slashCommandName: "sf", command: { type: getPromptCommandType(options), message: input, @@ -182,22 +182,22 @@ function dispatchGSDSubcommand( } } - // `/gsd help` — render inline help locally + // `/sf help` — render inline help locally if (subcommand === "help") { return { kind: "local", input, - commandName: "gsd", + commandName: "sf", action: "gsd_help", } } - // `/gsd visualize` — navigate to the visualizer view directly + // `/sf visualize` — navigate to the visualizer view directly if (subcommand === "visualize") { return { kind: "view-navigate", input, - commandName: "gsd", + commandName: "sf", view: "visualize", } } @@ -208,7 +208,7 @@ function dispatchGSDSubcommand( return { kind: "surface", input, - commandName: "gsd", + commandName: "sf", surface, args: subArgs, } @@ -219,7 +219,7 @@ function dispatchGSDSubcommand( return { kind: "prompt", input, - slashCommandName: "gsd", + slashCommandName: "sf", command: { type: getPromptCommandType(options), message: input, @@ -231,7 +231,7 @@ function dispatchGSDSubcommand( return { kind: "prompt", input, - slashCommandName: "gsd", + slashCommandName: "sf", command: { type: getPromptCommandType(options), message: input, @@ -343,8 +343,8 @@ export function dispatchBrowserSlashCommand( } // SF subcommand dispatch — must precede SURFACE_COMMANDS to avoid - // `/gsd export` colliding with the built-in `/export` surface. - if (parsed.name === "gsd") { + // `/sf export` colliding with the built-in `/export` surface. + if (parsed.name === "sf") { return dispatchGSDSubcommand(trimmed, parsed.args, options) } diff --git a/web/lib/command-surface-contract.ts b/web/lib/command-surface-contract.ts index 595c3b08f..42133e1f4 100644 --- a/web/lib/command-surface-contract.ts +++ b/web/lib/command-surface-contract.ts @@ -42,26 +42,26 @@ export type CommandSurfaceSection = | "integrations" | "experimental" // SF subcommand surfaces (S02) - | "gsd-status" - | "gsd-visualize" - | "gsd-forensics" - | "gsd-doctor" - | "gsd-skill-health" - | "gsd-knowledge" - | "gsd-capture" - | "gsd-triage" - | "gsd-quick" - | "gsd-history" - | "gsd-undo" - | "gsd-inspect" - | "gsd-prefs" - | "gsd-config" - | "gsd-hooks" - | "gsd-mode" - | "gsd-steer" - | "gsd-export" - | "gsd-cleanup" - | "gsd-queue" + | "sf-status" + | "sf-visualize" + | "sf-forensics" + | "sf-doctor" + | "sf-skill-health" + | "sf-knowledge" + | "sf-capture" + | "sf-triage" + | "sf-quick" + | "sf-history" + | "sf-undo" + | "sf-inspect" + | "sf-prefs" + | "sf-config" + | "sf-hooks" + | "sf-mode" + | "sf-steer" + | "sf-export" + | "sf-cleanup" + | "sf-queue" export type CommandSurfaceSource = "slash" | "sidebar" | "surface" export type CommandSurfacePendingAction = | "loading_models" @@ -347,7 +347,7 @@ export type CommandSurfaceTarget = | { kind: "fork"; entryId?: string } | { kind: "session"; outputPath?: string } | { kind: "compact"; customInstructions: string } - | { kind: "gsd"; surface: string; subcommand: string; args: string } + | { kind: "sf"; surface: string; subcommand: string; args: string } // ─── Diagnostics panel state ────────────────────────────────────────────────── @@ -672,26 +672,26 @@ export function commandSurfaceSectionForRequest(request: CommandSurfaceOpenReque case "compact": return "compact" // SF subcommand surfaces (S02) - case "gsd-status": return "gsd-status" - case "gsd-visualize": return "gsd-visualize" - case "gsd-forensics": return "gsd-forensics" - case "gsd-doctor": return "gsd-doctor" - case "gsd-skill-health": return "gsd-skill-health" - case "gsd-knowledge": return "gsd-knowledge" - case "gsd-capture": return "gsd-capture" - case "gsd-triage": return "gsd-triage" - case "gsd-quick": return "gsd-quick" - case "gsd-history": return "gsd-history" - case "gsd-undo": return "gsd-undo" - case "gsd-inspect": return "gsd-inspect" - case "gsd-prefs": return "gsd-prefs" - case "gsd-config": return "gsd-config" - case "gsd-hooks": return "gsd-hooks" - case "gsd-mode": return "gsd-mode" - case "gsd-steer": return "gsd-steer" - case "gsd-export": return "gsd-export" - case "gsd-cleanup": return "gsd-cleanup" - case "gsd-queue": return "gsd-queue" + case "sf-status": return "sf-status" + case "sf-visualize": return "sf-visualize" + case "sf-forensics": return "sf-forensics" + case "sf-doctor": return "sf-doctor" + case "sf-skill-health": return "sf-skill-health" + case "sf-knowledge": return "sf-knowledge" + case "sf-capture": return "sf-capture" + case "sf-triage": return "sf-triage" + case "sf-quick": return "sf-quick" + case "sf-history": return "sf-history" + case "sf-undo": return "sf-undo" + case "sf-inspect": return "sf-inspect" + case "sf-prefs": return "sf-prefs" + case "sf-config": return "sf-config" + case "sf-hooks": return "sf-hooks" + case "sf-mode": return "sf-mode" + case "sf-steer": return "sf-steer" + case "sf-export": return "sf-export" + case "sf-cleanup": return "sf-cleanup" + case "sf-queue": return "sf-queue" default: return null } @@ -819,9 +819,9 @@ export function buildCommandSurfaceTarget(request: CommandSurfaceOpenRequest): C } // SF subcommand surfaces — generic target (S02) - if (request.surface?.startsWith("gsd-")) { - const subcommand = request.surface.slice(4) // "gsd-forensics" -> "forensics" - return { kind: "gsd", surface: request.surface, subcommand, args: request.args ?? "" } + if (request.surface?.startsWith("sf-")) { + const subcommand = request.surface.slice(4) // "sf-forensics" -> "forensics" + return { kind: "sf", surface: request.surface, subcommand, args: request.args ?? "" } } return buildSettingsTarget(section) diff --git a/web/lib/pty-chat-parser.ts b/web/lib/pty-chat-parser.ts index b0d50b47f..e82e2ba35 100644 --- a/web/lib/pty-chat-parser.ts +++ b/web/lib/pty-chat-parser.ts @@ -132,7 +132,7 @@ const SYSTEM_LINE_PATTERNS = [ /^\[thinking[.\u2026]*/i, /^\[done\]/i, /^\[error/i, - /^gsd\s+v[\d.]+/i, // version banner + /^sf\s+v[\d.]+/i, // version banner /^✓\s/, // short success lines /^✗\s/, // short failure lines ] diff --git a/web/lib/pty-manager.ts b/web/lib/pty-manager.ts index fd1e6992d..f296ef891 100644 --- a/web/lib/pty-manager.ts +++ b/web/lib/pty-manager.ts @@ -120,7 +120,7 @@ interface TerminalSpawnSpec { } const ALLOWED_TERMINAL_COMMANDS = new Set([ - "gsd", + "sf", process.env.SHELL || "/bin/zsh", "/bin/bash", "/bin/zsh", @@ -142,7 +142,7 @@ function resolveTerminalSpawnSpec(cwd: string, command?: string, commandArgs: st }; } - if (command === "gsd") { + if (command === "sf") { try { const cliEntry = resolveGsdCliEntry({ packageRoot: process.env.SF_WEB_PACKAGE_ROOT || process.cwd(), @@ -156,11 +156,11 @@ function resolveTerminalSpawnSpec(cwd: string, command?: string, commandArgs: st return { executable: cliEntry.command, args: cliEntry.args, - label: "gsd", + label: "sf", }; } catch (error) { console.warn( - "[pty] Falling back to PATH-resolved gsd:", + "[pty] Falling back to PATH-resolved sf:", error instanceof Error ? error.message : String(error), ); } diff --git a/web/lib/use-editor-font-size.ts b/web/lib/use-editor-font-size.ts index c1a69ee71..8a0ef467a 100644 --- a/web/lib/use-editor-font-size.ts +++ b/web/lib/use-editor-font-size.ts @@ -2,7 +2,7 @@ import { useState, useEffect, useCallback } from "react" -const STORAGE_KEY = "gsd-editor-font-size" +const STORAGE_KEY = "sf-editor-font-size" const DEFAULT_SIZE = 14 const CHANGE_EVENT = "editor-font-size-changed" @@ -10,7 +10,7 @@ const CHANGE_EVENT = "editor-font-size-changed" * Persists editor font size to localStorage and syncs across components/tabs. * * Observability: - * - `localStorage.getItem('gsd-editor-font-size')` → current persisted value + * - `localStorage.getItem('sf-editor-font-size')` → current persisted value * - Window event `editor-font-size-changed` fires on every local change * - `storage` events sync across tabs */ diff --git a/web/lib/use-terminal-font-size.ts b/web/lib/use-terminal-font-size.ts index 926967be7..b3d21aeec 100644 --- a/web/lib/use-terminal-font-size.ts +++ b/web/lib/use-terminal-font-size.ts @@ -2,7 +2,7 @@ import { useState, useEffect, useCallback } from "react" -const STORAGE_KEY = "gsd-terminal-font-size" +const STORAGE_KEY = "sf-terminal-font-size" const DEFAULT_SIZE = 13 const CHANGE_EVENT = "terminal-font-size-changed" @@ -10,7 +10,7 @@ const CHANGE_EVENT = "terminal-font-size-changed" * Persists terminal font size to localStorage and syncs across components/tabs. * * Observability: - * - `localStorage.getItem('gsd-terminal-font-size')` → current persisted value + * - `localStorage.getItem('sf-terminal-font-size')` → current persisted value * - Window event `terminal-font-size-changed` fires on every local change * - `storage` events sync across tabs */ diff --git a/web/lib/use-user-mode.ts b/web/lib/use-user-mode.ts index cd9497141..a56ac1bc5 100644 --- a/web/lib/use-user-mode.ts +++ b/web/lib/use-user-mode.ts @@ -8,7 +8,7 @@ export type UserMode = "expert" | "vibe-coder" // ─── Storage ──────────────────────────────────────────────────────── -const STORAGE_KEY = "gsd-user-mode" +const STORAGE_KEY = "sf-user-mode" const DEFAULT_MODE: UserMode = "expert" const listeners = new Set<() => void>() diff --git a/web/lib/workflow-actions.ts b/web/lib/workflow-actions.ts index a4c7e3f5b..3ab07c689 100644 --- a/web/lib/workflow-actions.ts +++ b/web/lib/workflow-actions.ts @@ -37,8 +37,8 @@ export function deriveWorkflowAction(input: WorkflowActionInput): WorkflowAction // Suppress the action bar to avoid duplicate/confusing buttons. if ( projectDetectionKind && - projectDetectionKind !== "active-gsd" && - projectDetectionKind !== "empty-gsd" + projectDetectionKind !== "active-sf" && + projectDetectionKind !== "empty-sf" ) { return { primary: null, secondaries: [], disabled: true, disabledReason: "Project setup pending", isNewMilestone: false } } @@ -64,42 +64,42 @@ export function deriveWorkflowAction(input: WorkflowActionInput): WorkflowAction let isNewMilestone = false if (autoActive && !autoPaused) { - primary = { label: "Stop Auto", command: "/gsd stop", variant: "destructive" } + primary = { label: "Stop Auto", command: "/sf stop", variant: "destructive" } } else if (autoPaused) { - primary = { label: "Resume Auto", command: "/gsd auto", variant: "default" } + primary = { label: "Resume Auto", command: "/sf auto", variant: "default" } } else { // Auto is not active if (phase === "complete") { // All milestones done — surface a distinct "New Milestone" action - primary = { label: "New Milestone", command: "/gsd", variant: "default" } + primary = { label: "New Milestone", command: "/sf", variant: "default" } isNewMilestone = true } else if (phase === "planning") { - primary = { label: "Plan", command: "/gsd", variant: "default" } + primary = { label: "Plan", command: "/sf", variant: "default" } } else if (phase === "executing" || phase === "summarizing") { - primary = { label: "Start Auto", command: "/gsd auto", variant: "default" } + primary = { label: "Start Auto", command: "/sf auto", variant: "default" } } else if (phase === "pre-planning" && !hasMilestones) { - primary = { label: "Initialize Project", command: "/gsd", variant: "default" } + primary = { label: "Initialize Project", command: "/sf", variant: "default" } } else if (phase === "blocked") { - primary = { label: "Blocked", command: "/gsd", variant: "default" } + primary = { label: "Blocked", command: "/sf", variant: "default" } disabled = true disabledReason = "Project is blocked — check blockers" } else if (phase === "paused") { - primary = { label: "Resume", command: "/gsd auto", variant: "default" } + primary = { label: "Resume", command: "/sf auto", variant: "default" } } else if (phase === "validating-milestone") { - primary = { label: "Validate", command: "/gsd", variant: "default" } + primary = { label: "Validate", command: "/sf", variant: "default" } } else if (phase === "completing-milestone") { - primary = { label: "Complete Milestone", command: "/gsd", variant: "default" } + primary = { label: "Complete Milestone", command: "/sf", variant: "default" } } else if (phase === "needs-discussion") { - primary = { label: "Discuss", command: "/gsd", variant: "default" } + primary = { label: "Discuss", command: "/sf", variant: "default" } } else if (phase === "replanning-slice") { - primary = { label: "Replan", command: "/gsd", variant: "default" } + primary = { label: "Replan", command: "/sf", variant: "default" } } else { - primary = { label: "Continue", command: "/gsd", variant: "default" } + primary = { label: "Continue", command: "/sf", variant: "default" } } // Add "Step" secondary when auto is not active (not for new milestone — no step concept there) - if (primary.command !== "/gsd next" && !isNewMilestone) { - secondaries.push({ label: "Step", command: "/gsd next" }) + if (primary.command !== "/sf next" && !isNewMilestone) { + secondaries.push({ label: "Step", command: "/sf next" }) } } diff --git a/web/package-lock.json b/web/package-lock.json index 3e618e460..d53dd0999 100644 --- a/web/package-lock.json +++ b/web/package-lock.json @@ -1,11 +1,11 @@ { - "name": "gsd-web", + "name": "sf-web", "version": "0.1.0", "lockfileVersion": 3, "requires": true, "packages": { "": { - "name": "gsd-web", + "name": "sf-web", "version": "0.1.0", "dependencies": { "@hookform/resolvers": "^3.9.1", diff --git a/web/package.json b/web/package.json index 1763afe22..e24081820 100644 --- a/web/package.json +++ b/web/package.json @@ -1,5 +1,5 @@ { - "name": "gsd-web", + "name": "sf-web", "version": "0.1.0", "private": true, "type": "module",