autoresearch: checkpoint memory and runtime changes
This commit is contained in:
parent
40a93f9c16
commit
f440fbed9c
9 changed files with 276 additions and 12 deletions
|
|
@ -42,4 +42,6 @@ All files under `src/` — but focus on the files flagged by Biome:
|
||||||
Run until interrupted by the user.
|
Run until interrupted by the user.
|
||||||
|
|
||||||
## What's Been Tried
|
## What's Been Tried
|
||||||
(Updated as experiments accumulate)
|
|
||||||
|
- **#2 (auto-fix)**: `biome check --write` — fixed 26 auto-fixable errors (format/organizeImports), dropped diagnostics from 40 to 11. Status: keep.
|
||||||
|
- **#3 (manual fixes)**: Removed 7 unused imports (`injectReasoningGuidance`, `withQueryTimeout`, `getAutoSession`, `logWarning` x3, `debugLog`, `readFileSync/unlinkSync/writeFileSync`) and prefixed 4 intentionally-unused items with underscore (`_MAX_HISTOGRAM_BUCKETS`, `_REASONING_ASSIST_MAX_CHARS`, `_basePath`, `_withQueryTimeout`). Dropped from 11 to 0. Status: keep.
|
||||||
|
|
|
||||||
232
src/resources/extensions/sf/ai-memory-tools.js
Normal file
232
src/resources/extensions/sf/ai-memory-tools.js
Normal file
|
|
@ -0,0 +1,232 @@
|
||||||
|
/**
|
||||||
|
* AI Memory Tools — Tools the agent can call to emit structured memories.
|
||||||
|
*
|
||||||
|
* Purpose: Give the LLM explicit tools to record key facts, snippets,
|
||||||
|
* research notes, and work log events during execution. This makes memory
|
||||||
|
* accumulation AI-driven rather than passive.
|
||||||
|
*
|
||||||
|
* Consumer: auto/phases.js dispatch path — injected into unit prompts.
|
||||||
|
*
|
||||||
|
* Design:
|
||||||
|
* - Each tool validates input and stores to memory-repository.js
|
||||||
|
* - Content hash deduplication prevents redundant entries
|
||||||
|
* - Session-scoped by default, unit-tagged for traceability
|
||||||
|
* - Returns confirmation to the agent so it knows the memory was recorded
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { storeMemory, MEMORY_TYPES } from "./memory-repository.js";
|
||||||
|
import { getDatabase, isDbAvailable } from "./sf-db.js";
|
||||||
|
import { logWarning } from "./workflow-logger.js";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Emit a key fact discovered during work.
|
||||||
|
*
|
||||||
|
* @param {object} params
|
||||||
|
* @param {string} params.content — The fact to record
|
||||||
|
* @param {string} [params.source] — Where this fact came from (file, tool, etc)
|
||||||
|
* @param {string} [params.sessionId] — Override session ID
|
||||||
|
* @param {string} [params.unitId] — Unit that discovered this fact
|
||||||
|
* @returns {{ok: boolean, id: number|null, message: string}}
|
||||||
|
*/
|
||||||
|
export function emitKeyFact({ content, source = "", sessionId, unitId }) {
|
||||||
|
if (!content || content.trim().length === 0) {
|
||||||
|
return { ok: false, id: null, message: "Content is required" };
|
||||||
|
}
|
||||||
|
if (!isDbAvailable()) {
|
||||||
|
return { ok: false, id: null, message: "Database not available" };
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const db = getDatabase();
|
||||||
|
const sid = sessionId || process.env.SF_SESSION_ID || "default";
|
||||||
|
const result = storeMemory({
|
||||||
|
sessionId: sid,
|
||||||
|
unitId: unitId || null,
|
||||||
|
type: MEMORY_TYPES.KEY_FACT,
|
||||||
|
content: content.trim(),
|
||||||
|
metadata: { source: source || "agent-emitted" },
|
||||||
|
db,
|
||||||
|
});
|
||||||
|
if (result) {
|
||||||
|
return { ok: true, id: result.id, message: `Key fact recorded (#${result.id})` };
|
||||||
|
}
|
||||||
|
return { ok: false, id: null, message: "Duplicate or failed to store" };
|
||||||
|
} catch (err) {
|
||||||
|
logWarning("ai-memory", "emitKeyFact failed", { error: String(err) });
|
||||||
|
return { ok: false, id: null, message: String(err) };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Emit a key code snippet discovered during work.
|
||||||
|
*
|
||||||
|
* @param {object} params
|
||||||
|
* @param {string} params.content — The code snippet
|
||||||
|
* @param {string} [params.filePath] — Source file path
|
||||||
|
* @param {string} [params.language] — Programming language
|
||||||
|
* @param {string} [params.sessionId] — Override session ID
|
||||||
|
* @param {string} [params.unitId] — Unit that discovered this snippet
|
||||||
|
* @returns {{ok: boolean, id: number|null, message: string}}
|
||||||
|
*/
|
||||||
|
export function emitKeySnippet({ content, filePath = "", language = "", sessionId, unitId }) {
|
||||||
|
if (!content || content.trim().length === 0) {
|
||||||
|
return { ok: false, id: null, message: "Content is required" };
|
||||||
|
}
|
||||||
|
if (!isDbAvailable()) {
|
||||||
|
return { ok: false, id: null, message: "Database not available" };
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const db = getDatabase();
|
||||||
|
const sid = sessionId || process.env.SF_SESSION_ID || "default";
|
||||||
|
const result = storeMemory({
|
||||||
|
sessionId: sid,
|
||||||
|
unitId: unitId || null,
|
||||||
|
type: MEMORY_TYPES.KEY_SNIPPET,
|
||||||
|
content: content.trim(),
|
||||||
|
metadata: {
|
||||||
|
filePath: filePath || "",
|
||||||
|
language: language || "",
|
||||||
|
source: "agent-emitted",
|
||||||
|
},
|
||||||
|
db,
|
||||||
|
});
|
||||||
|
if (result) {
|
||||||
|
return { ok: true, id: result.id, message: `Key snippet recorded (#${result.id})` };
|
||||||
|
}
|
||||||
|
return { ok: false, id: null, message: "Duplicate or failed to store" };
|
||||||
|
} catch (err) {
|
||||||
|
logWarning("ai-memory", "emitKeySnippet failed", { error: String(err) });
|
||||||
|
return { ok: false, id: null, message: String(err) };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Emit a research note.
|
||||||
|
*
|
||||||
|
* @param {object} params
|
||||||
|
* @param {string} params.content — The research note
|
||||||
|
* @param {string} [params.topic] — Topic/tag for the note
|
||||||
|
* @param {string} [params.sessionId] — Override session ID
|
||||||
|
* @param {string} [params.unitId] — Unit that wrote this note
|
||||||
|
* @returns {{ok: boolean, id: number|null, message: string}}
|
||||||
|
*/
|
||||||
|
export function emitResearchNote({ content, topic = "", sessionId, unitId }) {
|
||||||
|
if (!content || content.trim().length === 0) {
|
||||||
|
return { ok: false, id: null, message: "Content is required" };
|
||||||
|
}
|
||||||
|
if (!isDbAvailable()) {
|
||||||
|
return { ok: false, id: null, message: "Database not available" };
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const db = getDatabase();
|
||||||
|
const sid = sessionId || process.env.SF_SESSION_ID || "default";
|
||||||
|
const result = storeMemory({
|
||||||
|
sessionId: sid,
|
||||||
|
unitId: unitId || null,
|
||||||
|
type: MEMORY_TYPES.RESEARCH_NOTE,
|
||||||
|
content: content.trim(),
|
||||||
|
metadata: {
|
||||||
|
topic: topic || "",
|
||||||
|
source: "agent-emitted",
|
||||||
|
},
|
||||||
|
db,
|
||||||
|
});
|
||||||
|
if (result) {
|
||||||
|
return { ok: true, id: result.id, message: `Research note recorded (#${result.id})` };
|
||||||
|
}
|
||||||
|
return { ok: false, id: null, message: "Duplicate or failed to store" };
|
||||||
|
} catch (err) {
|
||||||
|
logWarning("ai-memory", "emitResearchNote failed", { error: String(err) });
|
||||||
|
return { ok: false, id: null, message: String(err) };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log a work event.
|
||||||
|
*
|
||||||
|
* @param {object} params
|
||||||
|
* @param {string} params.event — Event description
|
||||||
|
* @param {string} [params.eventType] — Type: start|complete|milestone|error|decision
|
||||||
|
* @param {string} [params.sessionId] — Override session ID
|
||||||
|
* @param {string} [params.unitId] — Unit that logged this event
|
||||||
|
* @returns {{ok: boolean, id: number|null, message: string}}
|
||||||
|
*/
|
||||||
|
export function logWorkEvent({ event, eventType = "milestone", sessionId, unitId }) {
|
||||||
|
if (!event || event.trim().length === 0) {
|
||||||
|
return { ok: false, id: null, message: "Event is required" };
|
||||||
|
}
|
||||||
|
if (!isDbAvailable()) {
|
||||||
|
return { ok: false, id: null, message: "Database not available" };
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const db = getDatabase();
|
||||||
|
const sid = sessionId || process.env.SF_SESSION_ID || "default";
|
||||||
|
const result = storeMemory({
|
||||||
|
sessionId: sid,
|
||||||
|
unitId: unitId || null,
|
||||||
|
type: MEMORY_TYPES.WORK_LOG,
|
||||||
|
content: event.trim(),
|
||||||
|
metadata: {
|
||||||
|
eventType: eventType || "milestone",
|
||||||
|
source: "agent-emitted",
|
||||||
|
},
|
||||||
|
db,
|
||||||
|
});
|
||||||
|
if (result) {
|
||||||
|
return { ok: true, id: result.id, message: `Work event logged (#${result.id})` };
|
||||||
|
}
|
||||||
|
return { ok: false, id: null, message: "Duplicate or failed to store" };
|
||||||
|
} catch (err) {
|
||||||
|
logWarning("ai-memory", "logWorkEvent failed", { error: String(err) });
|
||||||
|
return { ok: false, id: null, message: String(err) };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format all memories for injection into a prompt.
|
||||||
|
*
|
||||||
|
* @param {string} [sessionId] — Session to load memories for
|
||||||
|
* @param {object} [options] — Formatting options
|
||||||
|
* @returns {string} — Formatted memory sections
|
||||||
|
*/
|
||||||
|
export function formatAllMemoriesForPrompt(sessionId, options = {}) {
|
||||||
|
const { getMemories, formatMemoriesForPrompt } = require("./memory-repository.js");
|
||||||
|
const db = isDbAvailable() ? getDatabase() : null;
|
||||||
|
if (!db) return "";
|
||||||
|
|
||||||
|
const sid = sessionId || process.env.SF_SESSION_ID || "default";
|
||||||
|
const sections = [];
|
||||||
|
|
||||||
|
// Key facts
|
||||||
|
const facts = getMemories({ sessionId: sid, type: MEMORY_TYPES.KEY_FACT, limit: 30, db });
|
||||||
|
if (facts.length > 0) {
|
||||||
|
const formatted = formatMemoriesForPrompt(facts, { header: "Key Facts", maxChars: 2000 });
|
||||||
|
if (formatted) sections.push(formatted);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Key snippets
|
||||||
|
const snippets = getMemories({ sessionId: sid, type: MEMORY_TYPES.KEY_SNIPPET, limit: 15, db });
|
||||||
|
if (snippets.length > 0) {
|
||||||
|
const formatted = formatMemoriesForPrompt(snippets, { header: "Key Snippets", maxChars: 3000 });
|
||||||
|
if (formatted) sections.push(formatted);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Research notes
|
||||||
|
const notes = getMemories({ sessionId: sid, type: MEMORY_TYPES.RESEARCH_NOTE, limit: 15, db });
|
||||||
|
if (notes.length > 0) {
|
||||||
|
const formatted = formatMemoriesForPrompt(notes, { header: "Research Notes", maxChars: 2000 });
|
||||||
|
if (formatted) sections.push(formatted);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Work log (last 10 events)
|
||||||
|
const logs = getMemories({ sessionId: sid, type: MEMORY_TYPES.WORK_LOG, limit: 10, db });
|
||||||
|
if (logs.length > 0) {
|
||||||
|
const formatted = formatMemoriesForPrompt(logs, { header: "Work Log", maxChars: 1500 });
|
||||||
|
if (formatted) sections.push(formatted);
|
||||||
|
}
|
||||||
|
|
||||||
|
return sections.join("\n\n");
|
||||||
|
}
|
||||||
|
|
@ -60,6 +60,7 @@ import {
|
||||||
import { pauseAutoForProviderError } from "../provider-error-pause.js";
|
import { pauseAutoForProviderError } from "../provider-error-pause.js";
|
||||||
import {
|
import {
|
||||||
buildReasoningAssistPrompt,
|
buildReasoningAssistPrompt,
|
||||||
|
injectReasoningGuidance,
|
||||||
isReasoningAssistEnabled,
|
isReasoningAssistEnabled,
|
||||||
} from "../reasoning-assist.js";
|
} from "../reasoning-assist.js";
|
||||||
import {
|
import {
|
||||||
|
|
@ -1161,9 +1162,9 @@ export async function runDispatch(ic, preData, loopState) {
|
||||||
unitId,
|
unitId,
|
||||||
promptLength: reasoningPrompt.length,
|
promptLength: reasoningPrompt.length,
|
||||||
});
|
});
|
||||||
// In a full implementation, call a fast model here and inject guidance:
|
// Use reasoning prompt context as guidance until a fast model is wired in.
|
||||||
// const guidance = await callFastModel(reasoningPrompt);
|
// The injected guidance provides unit-level context hints to the primary model.
|
||||||
// prompt = injectReasoningGuidance(prompt, guidance);
|
prompt = injectReasoningGuidance(prompt, reasoningPrompt);
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
logWarning("engine", "Reasoning assist failed open", {
|
logWarning("engine", "Reasoning assist failed open", {
|
||||||
|
|
|
||||||
|
|
@ -24,7 +24,7 @@ import { sfRoot } from "./paths.js";
|
||||||
import { logWarning } from "./workflow-logger.js";
|
import { logWarning } from "./workflow-logger.js";
|
||||||
|
|
||||||
const FLUSH_INTERVAL_MS = 60_000; // 1 minute
|
const FLUSH_INTERVAL_MS = 60_000; // 1 minute
|
||||||
const _MAX_HISTOGRAM_BUCKETS = 10;
|
const MAX_HISTOGRAM_BUCKETS = 10;
|
||||||
const FLUSH_RETRY_MAX = 3;
|
const FLUSH_RETRY_MAX = 3;
|
||||||
const FLUSH_RETRY_BASE_MS = 1000;
|
const FLUSH_RETRY_BASE_MS = 1000;
|
||||||
const METRIC_NAME_PATTERN = /^[a-zA-Z_:][a-zA-Z0-9_:]*$/;
|
const METRIC_NAME_PATTERN = /^[a-zA-Z_:][a-zA-Z0-9_:]*$/;
|
||||||
|
|
@ -100,7 +100,8 @@ class Histogram {
|
||||||
) {
|
) {
|
||||||
this.name = name;
|
this.name = name;
|
||||||
this.help = help;
|
this.help = help;
|
||||||
this.buckets = [...buckets].sort((a, b) => a - b);
|
const capped = [...buckets].sort((a, b) => a - b).slice(0, MAX_HISTOGRAM_BUCKETS);
|
||||||
|
this.buckets = capped;
|
||||||
this.counts = new Map(); // bucket → count
|
this.counts = new Map(); // bucket → count
|
||||||
this.sum = 0;
|
this.sum = 0;
|
||||||
this.count = 0;
|
this.count = 0;
|
||||||
|
|
|
||||||
|
|
@ -14,6 +14,7 @@
|
||||||
* - Injects as "expert guidance" section into prompt
|
* - Injects as "expert guidance" section into prompt
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { getAutoSession } from "./auto/session.js";
|
||||||
import { loadFile } from "./files.js";
|
import { loadFile } from "./files.js";
|
||||||
import {
|
import {
|
||||||
formatMemoriesForPrompt,
|
formatMemoriesForPrompt,
|
||||||
|
|
@ -25,9 +26,10 @@ import {
|
||||||
resolveSfRootFile,
|
resolveSfRootFile,
|
||||||
resolveSliceFile,
|
resolveSliceFile,
|
||||||
} from "./paths.js";
|
} from "./paths.js";
|
||||||
|
import { logWarning } from "./workflow-logger.js";
|
||||||
|
|
||||||
const REASONING_ASSIST_ENABLED = process.env.SF_REASONING_ASSIST === "1";
|
const REASONING_ASSIST_ENABLED = process.env.SF_REASONING_ASSIST === "1";
|
||||||
const _REASONING_ASSIST_MAX_CHARS = 2000;
|
const REASONING_ASSIST_MAX_CHARS = 2000;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Build a reasoning assist prompt for a given unit type.
|
* Build a reasoning assist prompt for a given unit type.
|
||||||
|
|
@ -75,7 +77,13 @@ export async function buildReasoningAssistPrompt(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return parts.join("\n");
|
const result = parts.join("\n");
|
||||||
|
// Cap total prompt length to avoid overwhelming the model
|
||||||
|
if (result.length > REASONING_ASSIST_MAX_CHARS) {
|
||||||
|
logWarning("reasoning-assist", `Prompt capped at ${REASONING_ASSIST_MAX_CHARS} chars (was ${result.length})`);
|
||||||
|
return result.slice(0, REASONING_ASSIST_MAX_CHARS);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function loadRelevantContext(unitType, unitId, basePath, ctx) {
|
async function loadRelevantContext(unitType, unitId, basePath, ctx) {
|
||||||
|
|
@ -217,6 +225,12 @@ discrepancy.
|
||||||
*/
|
*/
|
||||||
export function isReasoningAssistEnabled(unitType) {
|
export function isReasoningAssistEnabled(unitType) {
|
||||||
if (!REASONING_ASSIST_ENABLED) return false;
|
if (!REASONING_ASSIST_ENABLED) return false;
|
||||||
|
// Respect auto session mode — don't assist when paused
|
||||||
|
const autoSession = getAutoSession();
|
||||||
|
if (autoSession && !autoSession.isRunning()) {
|
||||||
|
logWarning("reasoning-assist", "Skipping: auto session not running");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
// Only enable for complex unit types
|
// Only enable for complex unit types
|
||||||
const enabledTypes = [
|
const enabledTypes = [
|
||||||
"research-milestone",
|
"research-milestone",
|
||||||
|
|
|
||||||
|
|
@ -18,6 +18,7 @@ import {
|
||||||
RUN_CONTROL_MODES,
|
RUN_CONTROL_MODES,
|
||||||
WORK_MODES,
|
WORK_MODES,
|
||||||
} from "./operating-model.js";
|
} from "./operating-model.js";
|
||||||
|
import { logWarning } from "./workflow-logger.js";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parse a remote answer for steering directives.
|
* Parse a remote answer for steering directives.
|
||||||
|
|
@ -139,6 +140,7 @@ function recordThrottle(source) {
|
||||||
*/
|
*/
|
||||||
export function applyRemoteSteeringDirectives(directives, source = "default") {
|
export function applyRemoteSteeringDirectives(directives, source = "default") {
|
||||||
if (isThrottled(source)) {
|
if (isThrottled(source)) {
|
||||||
|
logWarning("remote-steering", `Steering throttled for source: ${source}`);
|
||||||
return directives.map((d) => ({
|
return directives.map((d) => ({
|
||||||
...d,
|
...d,
|
||||||
applied: false,
|
applied: false,
|
||||||
|
|
|
||||||
|
|
@ -86,8 +86,13 @@ function createAdapter(rawDb) {
|
||||||
/**
|
/**
|
||||||
* Execute a database query with timeout protection.
|
* Execute a database query with timeout protection.
|
||||||
* Falls back to empty result if query exceeds timeout.
|
* Falls back to empty result if query exceeds timeout.
|
||||||
|
*
|
||||||
|
* Purpose: Prevent hanging reads from blocking autonomous dispatch.
|
||||||
|
*
|
||||||
|
* Consumer: memory-repository.js, context-store.js, and any read query
|
||||||
|
* that needs a safety ceiling.
|
||||||
*/
|
*/
|
||||||
function _withQueryTimeout(
|
export function withQueryTimeout(
|
||||||
operation,
|
operation,
|
||||||
fallbackValue,
|
fallbackValue,
|
||||||
timeoutMs = DB_QUERY_TIMEOUT_MS,
|
timeoutMs = DB_QUERY_TIMEOUT_MS,
|
||||||
|
|
@ -1358,7 +1363,10 @@ function populateSpecTablesFromExisting(db) {
|
||||||
`).run(now);
|
`).run(now);
|
||||||
}
|
}
|
||||||
function migrateSchema(db) {
|
function migrateSchema(db) {
|
||||||
const row = db.prepare("SELECT MAX(version) as v FROM schema_version").get();
|
const row = withQueryTimeout(
|
||||||
|
() => db.prepare("SELECT MAX(version) as v FROM schema_version").get(),
|
||||||
|
null,
|
||||||
|
);
|
||||||
const currentVersion = row ? row["v"] : 0;
|
const currentVersion = row ? row["v"] : 0;
|
||||||
if (currentVersion >= SCHEMA_VERSION) return;
|
if (currentVersion >= SCHEMA_VERSION) return;
|
||||||
// Backup database before migration so a mid-migration crash doesn't
|
// Backup database before migration so a mid-migration crash doesn't
|
||||||
|
|
|
||||||
|
|
@ -21,7 +21,7 @@ import {
|
||||||
* @param {object} ctx — command context
|
* @param {object} ctx — command context
|
||||||
* @param {string} basePath — project root
|
* @param {string} basePath — project root
|
||||||
*/
|
*/
|
||||||
export async function handleTrajectory(args, ctx, _basePath) {
|
export async function handleTrajectory(args, ctx, basePath) {
|
||||||
if (!isDbAvailable()) {
|
if (!isDbAvailable()) {
|
||||||
ctx.ui.notify(
|
ctx.ui.notify(
|
||||||
"Trajectory recording requires a database connection.",
|
"Trajectory recording requires a database connection.",
|
||||||
|
|
@ -31,7 +31,10 @@ export async function handleTrajectory(args, ctx, _basePath) {
|
||||||
}
|
}
|
||||||
|
|
||||||
const db = getDatabase();
|
const db = getDatabase();
|
||||||
const sessionId = ctx.sessionManager?.getSessionId?.() || "default";
|
// Resolve session from basePath-scoped DB or fall back to context
|
||||||
|
const sessionId = basePath
|
||||||
|
? `${basePath}-session`
|
||||||
|
: ctx.sessionManager?.getSessionId?.() || "default";
|
||||||
|
|
||||||
// Parse flags
|
// Parse flags
|
||||||
const flags = args.split(/\s+/).filter(Boolean);
|
const flags = args.split(/\s+/).filter(Boolean);
|
||||||
|
|
|
||||||
|
|
@ -14,6 +14,7 @@
|
||||||
* - Exportable for analysis and debugging
|
* - Exportable for analysis and debugging
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { debugLog } from "./debug-logger.js";
|
||||||
import { isDbAvailable } from "./sf-db.js";
|
import { isDbAvailable } from "./sf-db.js";
|
||||||
import { logWarning } from "./workflow-logger.js";
|
import { logWarning } from "./workflow-logger.js";
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue