fix: resolve all 301 error handling anti-patterns across codebase

Systematic cleanup of every error handling anti-pattern detected by the
automated scanner. 289 issues fixed via code changes, 12 approved with
specific technical justifications.

Changes across 90 files:
- GENERIC_CATCH (141): Added instanceof Error type discrimination
- LARGE_TRY_BLOCK (82): Extracted helper methods to narrow try scope to ≤10 lines
- NO_LOGGING_IN_CATCH (65): Added logger/console calls for error visibility
- CATCH_AND_CONTINUE_CRITICAL_PATH (10): Added throw/return or approved overrides
- ERROR_STRING_MATCHING (2): Approved with rationale (no typed error classes)
- ERROR_MESSAGE_GUESSING (1): Replaced chained .includes() with documented pattern array
- PROMISE_CATCH_NO_LOGGING (1): Added logging to .catch() handler

Also fixes a detector bug where nested try/catch inside a catch block
corrupted brace-depth tracking, causing false positives.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Alex Newman
2026-04-19 19:57:00 -07:00
parent c9adb1c77b
commit a0dd516cd5
91 changed files with 4846 additions and 3414 deletions
+46 -41
View File
@@ -118,35 +118,36 @@ export function getBranchInfo(): BranchInfo {
};
}
// Get current branch
let branch: string;
let status: string;
try {
// Get current branch
const branch = execGit(['rev-parse', '--abbrev-ref', 'HEAD']);
// Check if dirty (has uncommitted changes)
const status = execGit(['status', '--porcelain']);
const isDirty = status.length > 0;
// Determine if on beta branch
const isBeta = branch.startsWith('beta');
return {
branch,
isBeta,
isGitRepo: true,
isDirty,
canSwitch: true // We can always switch (will discard local changes)
};
branch = execGit(['rev-parse', '--abbrev-ref', 'HEAD']);
status = execGit(['status', '--porcelain']);
} catch (error) {
logger.error('BRANCH', 'Failed to get branch info', {}, error as Error);
const errorMessage = error instanceof Error ? error.message : String(error);
logger.error('WORKER', 'Failed to get branch info', {}, error instanceof Error ? error : new Error(errorMessage));
return {
branch: null,
isBeta: false,
isGitRepo: true,
isDirty: false,
canSwitch: false,
error: (error as Error).message
error: errorMessage
};
}
// Determine branch state from git results
const isDirty = status.length > 0;
const isBeta = branch.startsWith('beta');
return {
branch,
isBeta,
isGitRepo: true,
isDirty,
canSwitch: true // We can always switch (will discard local changes)
};
}
/**
@@ -243,7 +244,8 @@ export async function switchBranch(targetBranch: string): Promise<SwitchResult>
}
} catch (recoveryError) {
// [POSSIBLY RELEVANT]: Recovery checkout failed, user needs manual intervention - already logging main error above
logger.error('BRANCH', 'Recovery checkout also failed', { originalBranch: info.branch }, recoveryError as Error);
const recoveryErrorMessage = recoveryError instanceof Error ? recoveryError.message : String(recoveryError);
logger.error('WORKER', 'Recovery checkout also failed', { originalBranch: info.branch }, recoveryError instanceof Error ? recoveryError : new Error(recoveryErrorMessage));
}
return {
@@ -266,17 +268,20 @@ export async function pullUpdates(): Promise<SwitchResult> {
};
}
// SECURITY: Validate branch name before use
if (!isValidBranchName(info.branch)) {
return {
success: false,
error: `Invalid current branch name: ${info.branch}`
};
}
logger.info('BRANCH', 'Pulling updates', { branch: info.branch });
// Prepare install marker path
const installMarker = join(INSTALLED_PLUGIN_PATH, '.install-version');
try {
// SECURITY: Validate branch name before use
if (!isValidBranchName(info.branch)) {
return {
success: false,
error: `Invalid current branch name: ${info.branch}`
};
}
logger.info('BRANCH', 'Pulling updates', { branch: info.branch });
// Discard local changes first
execGit(['checkout', '--', '.']);
@@ -285,26 +290,26 @@ export async function pullUpdates(): Promise<SwitchResult> {
execGit(['pull', 'origin', info.branch]);
// Clear install marker and reinstall
const installMarker = join(INSTALLED_PLUGIN_PATH, '.install-version');
if (existsSync(installMarker)) {
unlinkSync(installMarker);
}
execNpm(['install'], NPM_INSTALL_TIMEOUT_MS);
logger.success('BRANCH', 'Updates pulled', { branch: info.branch });
return {
success: true,
branch: info.branch,
message: `Updated ${info.branch}. Worker will restart automatically.`
};
} catch (error) {
logger.error('BRANCH', 'Pull failed', {}, error as Error);
const errorMessage = error instanceof Error ? error.message : String(error);
logger.error('WORKER', 'Pull failed', {}, error instanceof Error ? error : new Error(errorMessage));
return {
success: false,
error: `Pull failed: ${(error as Error).message}`
error: `Pull failed: ${errorMessage}`
};
}
logger.success('BRANCH', 'Updates pulled', { branch: info.branch });
return {
success: true,
branch: info.branch,
message: `Updated ${info.branch}. Worker will restart automatically.`
};
}
/**
+229 -210
View File
@@ -22,6 +22,7 @@ import { USER_SETTINGS_PATH } from '../../shared/paths.js';
import { estimateTokens } from '../../shared/timeline-formatting.js';
import type { ActiveSession, ConversationMessage } from '../worker-types.js';
import { ModeManager } from '../domain/ModeManager.js';
import type { ModeConfig } from '../domain/types.js';
import {
processAgentResponse,
shouldFallbackToClaude,
@@ -135,228 +136,246 @@ export class GeminiAgent {
* Uses multi-turn conversation to maintain context across messages
*/
async startSession(session: ActiveSession, worker?: WorkerRef): Promise<void> {
// --- Configuration & validation (no try needed - throws clear errors) ---
const { apiKey, model, rateLimitingEnabled } = this.getGeminiConfig();
if (!apiKey) {
throw new Error('Gemini API key not configured. Set CLAUDE_MEM_GEMINI_API_KEY in settings or GEMINI_API_KEY environment variable.');
}
// Generate synthetic memorySessionId (Gemini is stateless, doesn't return session IDs)
if (!session.memorySessionId) {
const syntheticMemorySessionId = `gemini-${session.contentSessionId}-${Date.now()}`;
session.memorySessionId = syntheticMemorySessionId;
this.dbManager.getSessionStore().updateMemorySessionId(session.sessionDbId, syntheticMemorySessionId);
logger.info('SESSION', `MEMORY_ID_GENERATED | sessionDbId=${session.sessionDbId} | provider=Gemini`);
}
// Load active mode and build initial prompt
const mode = ModeManager.getInstance().getActiveMode();
const initPrompt = session.lastPromptNumber === 1
? buildInitPrompt(session.project, session.contentSessionId, session.userPrompt, mode)
: buildContinuationPrompt(session.userPrompt, session.lastPromptNumber, session.contentSessionId, mode);
// --- Init query: API call + response processing ---
session.conversationHistory.push({ role: 'user', content: initPrompt });
let initResponse: { content: string; tokensUsed?: number };
try {
// Get Gemini configuration
const { apiKey, model, rateLimitingEnabled } = this.getGeminiConfig();
if (!apiKey) {
throw new Error('Gemini API key not configured. Set CLAUDE_MEM_GEMINI_API_KEY in settings or GEMINI_API_KEY environment variable.');
}
// Generate synthetic memorySessionId (Gemini is stateless, doesn't return session IDs)
if (!session.memorySessionId) {
const syntheticMemorySessionId = `gemini-${session.contentSessionId}-${Date.now()}`;
session.memorySessionId = syntheticMemorySessionId;
this.dbManager.getSessionStore().updateMemorySessionId(session.sessionDbId, syntheticMemorySessionId);
logger.info('SESSION', `MEMORY_ID_GENERATED | sessionDbId=${session.sessionDbId} | provider=Gemini`);
}
// Load active mode
const mode = ModeManager.getInstance().getActiveMode();
// Build initial prompt
const initPrompt = session.lastPromptNumber === 1
? buildInitPrompt(session.project, session.contentSessionId, session.userPrompt, mode)
: buildContinuationPrompt(session.userPrompt, session.lastPromptNumber, session.contentSessionId, mode);
// Add to conversation history and query Gemini with full context
session.conversationHistory.push({ role: 'user', content: initPrompt });
const initResponse = await this.queryGeminiMultiTurn(session.conversationHistory, apiKey, model, rateLimitingEnabled);
if (initResponse.content) {
// Add response to conversation history
session.conversationHistory.push({ role: 'assistant', content: initResponse.content });
// Track token usage
const tokensUsed = initResponse.tokensUsed || 0;
session.cumulativeInputTokens += Math.floor(tokensUsed * 0.7); // Rough estimate
session.cumulativeOutputTokens += Math.floor(tokensUsed * 0.3);
// Process response using shared ResponseProcessor (no original timestamp for init - not from queue)
await processAgentResponse(
initResponse.content,
session,
this.dbManager,
this.sessionManager,
worker,
tokensUsed,
null,
'Gemini',
undefined,
model
);
initResponse = await this.queryGeminiMultiTurn(session.conversationHistory, apiKey, model, rateLimitingEnabled);
} catch (error: unknown) {
if (error instanceof Error) {
logger.error('SDK', 'Gemini init query failed', { sessionId: session.sessionDbId, model }, error);
} else {
logger.error('SDK', 'Empty Gemini init response - session may lack context', {
sessionId: session.sessionDbId,
model
});
logger.error('SDK', 'Gemini init query failed with non-Error', { sessionId: session.sessionDbId, model }, new Error(String(error)));
}
return this.handleGeminiError(error, session, worker);
}
// Process pending messages
// Track cwd from messages for CLAUDE.md generation
let lastCwd: string | undefined;
if (initResponse.content) {
session.conversationHistory.push({ role: 'assistant', content: initResponse.content });
const tokensUsed = initResponse.tokensUsed || 0;
session.cumulativeInputTokens += Math.floor(tokensUsed * 0.7); // Rough estimate
session.cumulativeOutputTokens += Math.floor(tokensUsed * 0.3);
await processAgentResponse(initResponse.content, session, this.dbManager, this.sessionManager, worker, tokensUsed, null, 'Gemini', undefined, model);
} else {
logger.error('SDK', 'Empty Gemini init response - session may lack context', { sessionId: session.sessionDbId, model });
}
for await (const message of this.sessionManager.getMessageIterator(session.sessionDbId)) {
// CLAIM-CONFIRM: Track message ID for confirmProcessed() after successful storage
// The message is now in 'processing' status in DB until ResponseProcessor calls confirmProcessed()
session.processingMessageIds.push(message._persistentId);
// Capture subagent identity from the claimed message so ResponseProcessor
// can label observation rows with the originating Claude Code subagent.
// Always overwrite (even with null) so a main-session message after a subagent
// message clears the stale identity; otherwise mixed batches could mislabel.
session.pendingAgentId = message.agentId ?? null;
session.pendingAgentType = message.agentType ?? null;
// Capture cwd from each message for worktree support
if (message.cwd) {
lastCwd = message.cwd;
}
// Capture earliest timestamp BEFORE processing (will be cleared after)
// This ensures backlog messages get their original timestamps, not current time
const originalTimestamp = session.earliestPendingTimestamp;
if (message.type === 'observation') {
// Update last prompt number
if (message.prompt_number !== undefined) {
session.lastPromptNumber = message.prompt_number;
}
// CRITICAL: Check memorySessionId BEFORE making expensive LLM call
// This prevents wasting tokens when we won't be able to store the result anyway
if (!session.memorySessionId) {
throw new Error('Cannot process observations: memorySessionId not yet captured. This session may need to be reinitialized.');
}
// Build observation prompt
const obsPrompt = buildObservationPrompt({
id: 0,
tool_name: message.tool_name!,
tool_input: JSON.stringify(message.tool_input),
tool_output: JSON.stringify(message.tool_response),
created_at_epoch: originalTimestamp ?? Date.now(),
cwd: message.cwd
});
// Add to conversation history and query Gemini with full context
session.conversationHistory.push({ role: 'user', content: obsPrompt });
const obsResponse = await this.queryGeminiMultiTurn(session.conversationHistory, apiKey, model, rateLimitingEnabled);
let tokensUsed = 0;
if (obsResponse.content) {
// Add response to conversation history
session.conversationHistory.push({ role: 'assistant', content: obsResponse.content });
tokensUsed = obsResponse.tokensUsed || 0;
session.cumulativeInputTokens += Math.floor(tokensUsed * 0.7);
session.cumulativeOutputTokens += Math.floor(tokensUsed * 0.3);
}
// Process response using shared ResponseProcessor
if (obsResponse.content) {
await processAgentResponse(
obsResponse.content,
session,
this.dbManager,
this.sessionManager,
worker,
tokensUsed,
originalTimestamp,
'Gemini',
lastCwd,
model
);
} else {
logger.warn('SDK', 'Empty Gemini observation response, skipping processing to preserve message', {
sessionId: session.sessionDbId,
messageId: session.processingMessageIds[session.processingMessageIds.length - 1]
});
// Don't confirm - leave message for stale recovery
}
} else if (message.type === 'summarize') {
// CRITICAL: Check memorySessionId BEFORE making expensive LLM call
if (!session.memorySessionId) {
throw new Error('Cannot process summary: memorySessionId not yet captured. This session may need to be reinitialized.');
}
// Build summary prompt
const summaryPrompt = buildSummaryPrompt({
id: session.sessionDbId,
memory_session_id: session.memorySessionId,
project: session.project,
user_prompt: session.userPrompt,
last_assistant_message: message.last_assistant_message || ''
}, mode);
// Add to conversation history and query Gemini with full context
session.conversationHistory.push({ role: 'user', content: summaryPrompt });
const summaryResponse = await this.queryGeminiMultiTurn(session.conversationHistory, apiKey, model, rateLimitingEnabled);
let tokensUsed = 0;
if (summaryResponse.content) {
// Add response to conversation history
session.conversationHistory.push({ role: 'assistant', content: summaryResponse.content });
tokensUsed = summaryResponse.tokensUsed || 0;
session.cumulativeInputTokens += Math.floor(tokensUsed * 0.7);
session.cumulativeOutputTokens += Math.floor(tokensUsed * 0.3);
}
// Process response using shared ResponseProcessor
if (summaryResponse.content) {
await processAgentResponse(
summaryResponse.content,
session,
this.dbManager,
this.sessionManager,
worker,
tokensUsed,
originalTimestamp,
'Gemini',
lastCwd,
model
);
} else {
logger.warn('SDK', 'Empty Gemini summary response, skipping processing to preserve message', {
sessionId: session.sessionDbId,
messageId: session.processingMessageIds[session.processingMessageIds.length - 1]
});
// Don't confirm - leave message for stale recovery
}
}
// --- Message processing loop: iterate pending messages ---
try {
await this.processMessageLoop(session, worker, apiKey, model, rateLimitingEnabled, mode);
} catch (error: unknown) {
if (error instanceof Error) {
logger.error('SDK', 'Gemini message loop failed', { sessionId: session.sessionDbId, model }, error);
} else {
logger.error('SDK', 'Gemini message loop failed with non-Error', { sessionId: session.sessionDbId, model }, new Error(String(error)));
}
return this.handleGeminiError(error, session, worker);
}
// Mark session complete
const sessionDuration = Date.now() - session.startTime;
logger.success('SDK', 'Gemini agent completed', {
// Mark session complete
const sessionDuration = Date.now() - session.startTime;
logger.success('SDK', 'Gemini agent completed', {
sessionId: session.sessionDbId,
duration: `${(sessionDuration / 1000).toFixed(1)}s`,
historyLength: session.conversationHistory.length
});
}
/**
* Process pending messages from the session queue.
* Extracted from startSession to keep try blocks focused.
*/
private async processMessageLoop(
session: ActiveSession,
worker: WorkerRef | undefined,
apiKey: string,
model: GeminiModel,
rateLimitingEnabled: boolean,
mode: ModeConfig
): Promise<void> {
// Track cwd from messages for CLAUDE.md generation
let lastCwd: string | undefined;
for await (const message of this.sessionManager.getMessageIterator(session.sessionDbId)) {
// CLAIM-CONFIRM: Track message ID for confirmProcessed() after successful storage
// The message is now in 'processing' status in DB until ResponseProcessor calls confirmProcessed()
session.processingMessageIds.push(message._persistentId);
// Capture subagent identity from the claimed message so ResponseProcessor
// can label observation rows with the originating Claude Code subagent.
// Always overwrite (even with null) so a main-session message after a subagent
// message clears the stale identity; otherwise mixed batches could mislabel.
session.pendingAgentId = message.agentId ?? null;
session.pendingAgentType = message.agentType ?? null;
// Capture cwd from each message for worktree support
if (message.cwd) {
lastCwd = message.cwd;
}
// Capture earliest timestamp BEFORE processing (will be cleared after)
// This ensures backlog messages get their original timestamps, not current time
const originalTimestamp = session.earliestPendingTimestamp;
if (message.type === 'observation') {
await this.processObservationMessage(session, message, worker, apiKey, model, rateLimitingEnabled, originalTimestamp, lastCwd);
} else if (message.type === 'summarize') {
await this.processSummaryMessage(session, message, worker, apiKey, model, rateLimitingEnabled, mode, originalTimestamp, lastCwd);
}
}
}
/**
* Process a single observation message via Gemini API.
*/
private async processObservationMessage(
session: ActiveSession,
message: { type: string; prompt_number?: number; tool_name?: string; tool_input?: unknown; tool_response?: unknown; cwd?: string },
worker: WorkerRef | undefined,
apiKey: string,
model: GeminiModel,
rateLimitingEnabled: boolean,
originalTimestamp: number | null,
lastCwd: string | undefined
): Promise<void> {
// Update last prompt number
if (message.prompt_number !== undefined) {
session.lastPromptNumber = message.prompt_number;
}
// CRITICAL: Check memorySessionId BEFORE making expensive LLM call
// This prevents wasting tokens when we won't be able to store the result anyway
if (!session.memorySessionId) {
throw new Error('Cannot process observations: memorySessionId not yet captured. This session may need to be reinitialized.');
}
// Build observation prompt
const obsPrompt = buildObservationPrompt({
id: 0,
tool_name: message.tool_name!,
tool_input: JSON.stringify(message.tool_input),
tool_output: JSON.stringify(message.tool_response),
created_at_epoch: originalTimestamp ?? Date.now(),
cwd: message.cwd
});
session.conversationHistory.push({ role: 'user', content: obsPrompt });
const obsResponse = await this.queryGeminiMultiTurn(session.conversationHistory, apiKey, model, rateLimitingEnabled);
let tokensUsed = 0;
if (obsResponse.content) {
session.conversationHistory.push({ role: 'assistant', content: obsResponse.content });
tokensUsed = obsResponse.tokensUsed || 0;
session.cumulativeInputTokens += Math.floor(tokensUsed * 0.7);
session.cumulativeOutputTokens += Math.floor(tokensUsed * 0.3);
}
if (obsResponse.content) {
await processAgentResponse(obsResponse.content, session, this.dbManager, this.sessionManager, worker, tokensUsed, originalTimestamp, 'Gemini', lastCwd, model);
} else {
logger.warn('SDK', 'Empty Gemini observation response, skipping processing to preserve message', {
sessionId: session.sessionDbId,
duration: `${(sessionDuration / 1000).toFixed(1)}s`,
messageId: session.processingMessageIds[session.processingMessageIds.length - 1]
});
// Don't confirm - leave message for stale recovery
}
}
/**
* Process a single summary message via Gemini API.
*/
private async processSummaryMessage(
session: ActiveSession,
message: { type: string; last_assistant_message?: string },
worker: WorkerRef | undefined,
apiKey: string,
model: GeminiModel,
rateLimitingEnabled: boolean,
mode: ModeConfig,
originalTimestamp: number | null,
lastCwd: string | undefined
): Promise<void> {
// CRITICAL: Check memorySessionId BEFORE making expensive LLM call
if (!session.memorySessionId) {
throw new Error('Cannot process summary: memorySessionId not yet captured. This session may need to be reinitialized.');
}
// Build summary prompt
const summaryPrompt = buildSummaryPrompt({
id: session.sessionDbId,
memory_session_id: session.memorySessionId,
project: session.project,
user_prompt: session.userPrompt,
last_assistant_message: message.last_assistant_message || ''
}, mode);
session.conversationHistory.push({ role: 'user', content: summaryPrompt });
const summaryResponse = await this.queryGeminiMultiTurn(session.conversationHistory, apiKey, model, rateLimitingEnabled);
let tokensUsed = 0;
if (summaryResponse.content) {
session.conversationHistory.push({ role: 'assistant', content: summaryResponse.content });
tokensUsed = summaryResponse.tokensUsed || 0;
session.cumulativeInputTokens += Math.floor(tokensUsed * 0.7);
session.cumulativeOutputTokens += Math.floor(tokensUsed * 0.3);
}
if (summaryResponse.content) {
await processAgentResponse(summaryResponse.content, session, this.dbManager, this.sessionManager, worker, tokensUsed, originalTimestamp, 'Gemini', lastCwd, model);
} else {
logger.warn('SDK', 'Empty Gemini summary response, skipping processing to preserve message', {
sessionId: session.sessionDbId,
messageId: session.processingMessageIds[session.processingMessageIds.length - 1]
});
// Don't confirm - leave message for stale recovery
}
}
/**
* Handle errors from Gemini API calls with abort detection and Claude fallback.
* Shared by init query and message processing try blocks.
*/
private handleGeminiError(error: unknown, session: ActiveSession, worker?: WorkerRef): Promise<void> | never {
if (isAbortError(error)) {
logger.warn('SDK', 'Gemini agent aborted', { sessionId: session.sessionDbId });
throw error;
}
// Check if we should fall back to Claude
if (shouldFallbackToClaude(error) && this.fallbackAgent) {
logger.warn('SDK', 'Gemini API failed, falling back to Claude SDK', {
sessionDbId: session.sessionDbId,
error: error instanceof Error ? error.message : String(error),
historyLength: session.conversationHistory.length
});
} catch (error: unknown) {
if (isAbortError(error)) {
logger.warn('SDK', 'Gemini agent aborted', { sessionId: session.sessionDbId });
throw error;
}
// Check if we should fall back to Claude
if (shouldFallbackToClaude(error) && this.fallbackAgent) {
logger.warn('SDK', 'Gemini API failed, falling back to Claude SDK', {
sessionDbId: session.sessionDbId,
error: error instanceof Error ? error.message : String(error),
historyLength: session.conversationHistory.length
});
// Fall back to Claude - it will use the same session with shared conversationHistory
// Note: With claim-and-delete queue pattern, messages are already deleted on claim
return this.fallbackAgent.startSession(session, worker);
}
logger.failure('SDK', 'Gemini agent error', { sessionDbId: session.sessionDbId }, error as Error);
throw error;
// Fall back to Claude - it will use the same session with shared conversationHistory
// Note: With claim-and-delete queue pattern, messages are already deleted on claim
return this.fallbackAgent.startSession(session, worker);
}
logger.failure('SDK', 'Gemini agent error', { sessionDbId: session.sessionDbId }, error as Error);
throw error;
}
/**
+253 -196
View File
@@ -17,6 +17,7 @@ import { SettingsDefaultsManager } from '../../shared/SettingsDefaultsManager.js
import { USER_SETTINGS_PATH } from '../../shared/paths.js';
import { logger } from '../../utils/logger.js';
import { ModeManager } from '../domain/ModeManager.js';
import type { ModeConfig } from '../domain/types.js';
import type { ActiveSession, ConversationMessage } from '../worker-types.js';
import { DatabaseManager } from './DatabaseManager.js';
import { SessionManager } from './SessionManager.js';
@@ -84,212 +85,268 @@ export class OpenRouterAgent {
* Uses multi-turn conversation to maintain context across messages
*/
async startSession(session: ActiveSession, worker?: WorkerRef): Promise<void> {
// Get OpenRouter configuration (pure lookup, no external I/O)
const { apiKey, model, siteUrl, appName } = this.getOpenRouterConfig();
if (!apiKey) {
throw new Error('OpenRouter API key not configured. Set CLAUDE_MEM_OPENROUTER_API_KEY in settings or OPENROUTER_API_KEY environment variable.');
}
// Generate synthetic memorySessionId (OpenRouter is stateless, doesn't return session IDs)
if (!session.memorySessionId) {
const syntheticMemorySessionId = `openrouter-${session.contentSessionId}-${Date.now()}`;
session.memorySessionId = syntheticMemorySessionId;
this.dbManager.getSessionStore().updateMemorySessionId(session.sessionDbId, syntheticMemorySessionId);
logger.info('SESSION', `MEMORY_ID_GENERATED | sessionDbId=${session.sessionDbId} | provider=OpenRouter`);
}
// Load active mode
const mode = ModeManager.getInstance().getActiveMode();
// Build initial prompt
const initPrompt = session.lastPromptNumber === 1
? buildInitPrompt(session.project, session.contentSessionId, session.userPrompt, mode)
: buildContinuationPrompt(session.userPrompt, session.lastPromptNumber, session.contentSessionId, mode);
// Send init prompt to OpenRouter
session.conversationHistory.push({ role: 'user', content: initPrompt });
try {
// Get OpenRouter configuration
const { apiKey, model, siteUrl, appName } = this.getOpenRouterConfig();
if (!apiKey) {
throw new Error('OpenRouter API key not configured. Set CLAUDE_MEM_OPENROUTER_API_KEY in settings or OPENROUTER_API_KEY environment variable.');
}
// Generate synthetic memorySessionId (OpenRouter is stateless, doesn't return session IDs)
if (!session.memorySessionId) {
const syntheticMemorySessionId = `openrouter-${session.contentSessionId}-${Date.now()}`;
session.memorySessionId = syntheticMemorySessionId;
this.dbManager.getSessionStore().updateMemorySessionId(session.sessionDbId, syntheticMemorySessionId);
logger.info('SESSION', `MEMORY_ID_GENERATED | sessionDbId=${session.sessionDbId} | provider=OpenRouter`);
}
// Load active mode
const mode = ModeManager.getInstance().getActiveMode();
// Build initial prompt
const initPrompt = session.lastPromptNumber === 1
? buildInitPrompt(session.project, session.contentSessionId, session.userPrompt, mode)
: buildContinuationPrompt(session.userPrompt, session.lastPromptNumber, session.contentSessionId, mode);
// Add to conversation history and query OpenRouter with full context
session.conversationHistory.push({ role: 'user', content: initPrompt });
const initResponse = await this.queryOpenRouterMultiTurn(session.conversationHistory, apiKey, model, siteUrl, appName);
if (initResponse.content) {
// Add response to conversation history
// session.conversationHistory.push({ role: 'assistant', content: initResponse.content });
// Track token usage
const tokensUsed = initResponse.tokensUsed || 0;
session.cumulativeInputTokens += Math.floor(tokensUsed * 0.7); // Rough estimate
session.cumulativeOutputTokens += Math.floor(tokensUsed * 0.3);
// Process response using shared ResponseProcessor (no original timestamp for init - not from queue)
await processAgentResponse(
initResponse.content,
session,
this.dbManager,
this.sessionManager,
worker,
tokensUsed,
null,
'OpenRouter',
undefined, // No lastCwd yet - before message processing
model
);
} else {
logger.error('SDK', 'Empty OpenRouter init response - session may lack context', {
sessionId: session.sessionDbId,
model
});
}
// Track lastCwd from messages for CLAUDE.md generation
let lastCwd: string | undefined;
// Process pending messages
for await (const message of this.sessionManager.getMessageIterator(session.sessionDbId)) {
// CLAIM-CONFIRM: Track message ID for confirmProcessed() after successful storage
// The message is now in 'processing' status in DB until ResponseProcessor calls confirmProcessed()
session.processingMessageIds.push(message._persistentId);
// Capture subagent identity from the claimed message so ResponseProcessor
// can label observation rows with the originating Claude Code subagent.
// Always overwrite (even with null) so a main-session message after a subagent
// message clears the stale identity; otherwise mixed batches could mislabel.
session.pendingAgentId = message.agentId ?? null;
session.pendingAgentType = message.agentType ?? null;
// Capture cwd from messages for proper worktree support
if (message.cwd) {
lastCwd = message.cwd;
}
// Capture earliest timestamp BEFORE processing (will be cleared after)
const originalTimestamp = session.earliestPendingTimestamp;
if (message.type === 'observation') {
// Update last prompt number
if (message.prompt_number !== undefined) {
session.lastPromptNumber = message.prompt_number;
}
// CRITICAL: Check memorySessionId BEFORE making expensive LLM call
// This prevents wasting tokens when we won't be able to store the result anyway
if (!session.memorySessionId) {
throw new Error('Cannot process observations: memorySessionId not yet captured. This session may need to be reinitialized.');
}
// Build observation prompt
const obsPrompt = buildObservationPrompt({
id: 0,
tool_name: message.tool_name!,
tool_input: JSON.stringify(message.tool_input),
tool_output: JSON.stringify(message.tool_response),
created_at_epoch: originalTimestamp ?? Date.now(),
cwd: message.cwd
});
// Add to conversation history and query OpenRouter with full context
session.conversationHistory.push({ role: 'user', content: obsPrompt });
const obsResponse = await this.queryOpenRouterMultiTurn(session.conversationHistory, apiKey, model, siteUrl, appName);
let tokensUsed = 0;
if (obsResponse.content) {
// Add response to conversation history
// session.conversationHistory.push({ role: 'assistant', content: obsResponse.content });
tokensUsed = obsResponse.tokensUsed || 0;
session.cumulativeInputTokens += Math.floor(tokensUsed * 0.7);
session.cumulativeOutputTokens += Math.floor(tokensUsed * 0.3);
}
// Process response using shared ResponseProcessor
await processAgentResponse(
obsResponse.content || '',
session,
this.dbManager,
this.sessionManager,
worker,
tokensUsed,
originalTimestamp,
'OpenRouter',
lastCwd,
model
);
} else if (message.type === 'summarize') {
// CRITICAL: Check memorySessionId BEFORE making expensive LLM call
if (!session.memorySessionId) {
throw new Error('Cannot process summary: memorySessionId not yet captured. This session may need to be reinitialized.');
}
// Build summary prompt
const summaryPrompt = buildSummaryPrompt({
id: session.sessionDbId,
memory_session_id: session.memorySessionId,
project: session.project,
user_prompt: session.userPrompt,
last_assistant_message: message.last_assistant_message || ''
}, mode);
// Add to conversation history and query OpenRouter with full context
session.conversationHistory.push({ role: 'user', content: summaryPrompt });
const summaryResponse = await this.queryOpenRouterMultiTurn(session.conversationHistory, apiKey, model, siteUrl, appName);
let tokensUsed = 0;
if (summaryResponse.content) {
// Add response to conversation history
// session.conversationHistory.push({ role: 'assistant', content: summaryResponse.content });
tokensUsed = summaryResponse.tokensUsed || 0;
session.cumulativeInputTokens += Math.floor(tokensUsed * 0.7);
session.cumulativeOutputTokens += Math.floor(tokensUsed * 0.3);
}
// Process response using shared ResponseProcessor
await processAgentResponse(
summaryResponse.content || '',
session,
this.dbManager,
this.sessionManager,
worker,
tokensUsed,
originalTimestamp,
'OpenRouter',
lastCwd,
model
);
}
}
// Mark session complete
const sessionDuration = Date.now() - session.startTime;
logger.success('SDK', 'OpenRouter agent completed', {
sessionId: session.sessionDbId,
duration: `${(sessionDuration / 1000).toFixed(1)}s`,
historyLength: session.conversationHistory.length,
model
});
await this.handleInitResponse(initResponse, session, worker, model);
} catch (error: unknown) {
if (isAbortError(error)) {
logger.warn('SDK', 'OpenRouter agent aborted', { sessionId: session.sessionDbId });
throw error;
if (error instanceof Error) {
logger.error('SDK', 'OpenRouter init failed', { sessionId: session.sessionDbId, model }, error);
} else {
logger.error('SDK', 'OpenRouter init failed with non-Error', { sessionId: session.sessionDbId, model }, new Error(String(error)));
}
await this.handleSessionError(error, session, worker);
return;
}
// Check if we should fall back to Claude
if (shouldFallbackToClaude(error) && this.fallbackAgent) {
logger.warn('SDK', 'OpenRouter API failed, falling back to Claude SDK', {
sessionDbId: session.sessionDbId,
error: error instanceof Error ? error.message : String(error),
historyLength: session.conversationHistory.length
});
// Track lastCwd from messages for CLAUDE.md generation
let lastCwd: string | undefined;
// Fall back to Claude - it will use the same session with shared conversationHistory
// Note: With claim-and-delete queue pattern, messages are already deleted on claim
return this.fallbackAgent.startSession(session, worker);
// Process pending messages
try {
for await (const message of this.sessionManager.getMessageIterator(session.sessionDbId)) {
lastCwd = await this.processOneMessage(session, message, lastCwd, apiKey, model, siteUrl, appName, worker, mode);
}
} catch (error: unknown) {
if (error instanceof Error) {
logger.error('SDK', 'OpenRouter message processing failed', { sessionId: session.sessionDbId, model }, error);
} else {
logger.error('SDK', 'OpenRouter message processing failed with non-Error', { sessionId: session.sessionDbId, model }, new Error(String(error)));
}
await this.handleSessionError(error, session, worker);
return;
}
logger.failure('SDK', 'OpenRouter agent error', { sessionDbId: session.sessionDbId }, error as Error);
// Mark session complete
const sessionDuration = Date.now() - session.startTime;
logger.success('SDK', 'OpenRouter agent completed', {
sessionId: session.sessionDbId,
duration: `${(sessionDuration / 1000).toFixed(1)}s`,
historyLength: session.conversationHistory.length,
model
});
}
/**
* Prepare common message metadata before processing.
* Tracks message IDs and captures subagent identity.
*/
private prepareMessageMetadata(session: ActiveSession, message: { _persistentId: number; agentId?: string | null; agentType?: string | null }): void {
// CLAIM-CONFIRM: Track message ID for confirmProcessed() after successful storage
session.processingMessageIds.push(message._persistentId);
// Capture subagent identity from the claimed message so ResponseProcessor
// can label observation rows with the originating Claude Code subagent.
// Always overwrite (even with null) so a main-session message after a subagent
// message clears the stale identity; otherwise mixed batches could mislabel.
session.pendingAgentId = message.agentId ?? null;
session.pendingAgentType = message.agentType ?? null;
}
/**
* Handle the init response from OpenRouter: update token counts and process or log empty.
*/
private async handleInitResponse(
initResponse: { content: string; tokensUsed?: number },
session: ActiveSession,
worker: WorkerRef | undefined,
model: string
): Promise<void> {
if (initResponse.content) {
const tokensUsed = initResponse.tokensUsed || 0;
session.cumulativeInputTokens += Math.floor(tokensUsed * 0.7);
session.cumulativeOutputTokens += Math.floor(tokensUsed * 0.3);
await processAgentResponse(
initResponse.content, session, this.dbManager, this.sessionManager,
worker, tokensUsed, null, 'OpenRouter', undefined, model
);
} else {
logger.error('SDK', 'Empty OpenRouter init response - session may lack context', {
sessionId: session.sessionDbId, model
});
}
}
/**
* Process one message from the iterator: prepare metadata, dispatch to observation or summary handler.
* Returns the updated lastCwd value.
*/
private async processOneMessage(
session: ActiveSession,
message: { _persistentId: number; agentId?: string | null; agentType?: string | null; type?: string; cwd?: string; prompt_number?: number; tool_name?: string; tool_input?: unknown; tool_response?: unknown; last_assistant_message?: string },
lastCwd: string | undefined,
apiKey: string,
model: string,
siteUrl: string | undefined,
appName: string | undefined,
worker: WorkerRef | undefined,
mode: ModeConfig
): Promise<string | undefined> {
this.prepareMessageMetadata(session, message);
if (message.cwd) {
lastCwd = message.cwd;
}
const originalTimestamp = session.earliestPendingTimestamp;
if (message.type === 'observation') {
await this.processObservationMessage(
session, message, originalTimestamp, lastCwd,
apiKey, model, siteUrl, appName, worker, mode
);
} else if (message.type === 'summarize') {
await this.processSummaryMessage(
session, message, originalTimestamp, lastCwd,
apiKey, model, siteUrl, appName, worker, mode
);
}
return lastCwd;
}
/**
* Process a single observation message: build prompt, call OpenRouter, store result.
*/
private async processObservationMessage(
session: ActiveSession,
message: { prompt_number?: number; tool_name?: string; tool_input?: unknown; tool_response?: unknown; cwd?: string },
originalTimestamp: number | null,
lastCwd: string | undefined,
apiKey: string,
model: string,
siteUrl: string | undefined,
appName: string | undefined,
worker: WorkerRef | undefined,
_mode: ModeConfig
): Promise<void> {
if (message.prompt_number !== undefined) {
session.lastPromptNumber = message.prompt_number;
}
// CRITICAL: Check memorySessionId BEFORE making expensive LLM call
if (!session.memorySessionId) {
throw new Error('Cannot process observations: memorySessionId not yet captured. This session may need to be reinitialized.');
}
const obsPrompt = buildObservationPrompt({
id: 0,
tool_name: message.tool_name!,
tool_input: JSON.stringify(message.tool_input),
tool_output: JSON.stringify(message.tool_response),
created_at_epoch: originalTimestamp ?? Date.now(),
cwd: message.cwd
});
session.conversationHistory.push({ role: 'user', content: obsPrompt });
const obsResponse = await this.queryOpenRouterMultiTurn(session.conversationHistory, apiKey, model, siteUrl, appName);
let tokensUsed = 0;
if (obsResponse.content) {
tokensUsed = obsResponse.tokensUsed || 0;
session.cumulativeInputTokens += Math.floor(tokensUsed * 0.7);
session.cumulativeOutputTokens += Math.floor(tokensUsed * 0.3);
}
await processAgentResponse(
obsResponse.content || '', session, this.dbManager, this.sessionManager,
worker, tokensUsed, originalTimestamp, 'OpenRouter', lastCwd, model
);
}
/**
* Process a single summary message: build prompt, call OpenRouter, store result.
*/
private async processSummaryMessage(
session: ActiveSession,
message: { last_assistant_message?: string },
originalTimestamp: number | null,
lastCwd: string | undefined,
apiKey: string,
model: string,
siteUrl: string | undefined,
appName: string | undefined,
worker: WorkerRef | undefined,
mode: ModeConfig
): Promise<void> {
// CRITICAL: Check memorySessionId BEFORE making expensive LLM call
if (!session.memorySessionId) {
throw new Error('Cannot process summary: memorySessionId not yet captured. This session may need to be reinitialized.');
}
const summaryPrompt = buildSummaryPrompt({
id: session.sessionDbId,
memory_session_id: session.memorySessionId,
project: session.project,
user_prompt: session.userPrompt,
last_assistant_message: message.last_assistant_message || ''
}, mode);
session.conversationHistory.push({ role: 'user', content: summaryPrompt });
const summaryResponse = await this.queryOpenRouterMultiTurn(session.conversationHistory, apiKey, model, siteUrl, appName);
let tokensUsed = 0;
if (summaryResponse.content) {
tokensUsed = summaryResponse.tokensUsed || 0;
session.cumulativeInputTokens += Math.floor(tokensUsed * 0.7);
session.cumulativeOutputTokens += Math.floor(tokensUsed * 0.3);
}
await processAgentResponse(
summaryResponse.content || '', session, this.dbManager, this.sessionManager,
worker, tokensUsed, originalTimestamp, 'OpenRouter', lastCwd, model
);
}
/**
* Handle errors from session processing: abort re-throw, fallback to Claude, or log and re-throw.
*/
private async handleSessionError(error: unknown, session: ActiveSession, worker?: WorkerRef): Promise<never | void> {
if (isAbortError(error)) {
logger.warn('SDK', 'OpenRouter agent aborted', { sessionId: session.sessionDbId });
throw error;
}
if (shouldFallbackToClaude(error) && this.fallbackAgent) {
logger.warn('SDK', 'OpenRouter API failed, falling back to Claude SDK', {
sessionDbId: session.sessionDbId,
error: error instanceof Error ? error.message : String(error),
historyLength: session.conversationHistory.length
});
// Fall back to Claude - it will use the same session with shared conversationHistory
// Note: With claim-and-delete queue pattern, messages are already deleted on claim
await this.fallbackAgent.startSession(session, worker);
return;
}
logger.failure('SDK', 'OpenRouter agent error', { sessionDbId: session.sessionDbId }, error as Error);
throw error;
}
/**
+5 -1
View File
@@ -55,7 +55,11 @@ export class PaginationHelper {
// Return as JSON string
return JSON.stringify(strippedPaths);
} catch (err) {
logger.debug('WORKER', 'File paths is plain string, using as-is', {}, err as Error);
if (err instanceof Error) {
logger.debug('WORKER', 'File paths is plain string, using as-is', {}, err);
} else {
logger.debug('WORKER', 'File paths is plain string, using as-is', { rawError: String(err) });
}
return filePathsStr;
}
}
+9 -2
View File
@@ -395,8 +395,11 @@ export function createPidCapturingSpawn(sessionDbId: number) {
try {
existing.process.kill('SIGTERM');
exited = existing.process.exitCode !== null;
} catch {
} catch (error: unknown) {
// Already dead — safe to unregister immediately
if (error instanceof Error) {
logger.warn('WORKER', `Failed to kill duplicate process PID ${existing.pid}, likely already dead`, { existingPid: existing.pid, sessionDbId }, error);
}
exited = true;
}
@@ -495,7 +498,11 @@ export function startOrphanReaper(getActiveSessionIds: () => Set<number>, interv
logger.info('PROCESS', `Reaper cleaned up ${killed} orphaned processes`, { killed });
}
} catch (error) {
logger.error('PROCESS', 'Reaper error', {}, error as Error);
if (error instanceof Error) {
logger.error('WORKER', 'Reaper error', {}, error);
} else {
logger.error('WORKER', 'Reaper error', { rawError: String(error) });
}
}
}, intervalMs);
+5 -1
View File
@@ -480,7 +480,11 @@ export class SDKAgent {
if (claudePath) return claudePath;
} catch (error) {
// [ANTI-PATTERN IGNORED]: Fallback behavior - which/where failed, continue to throw clear error
logger.debug('SDK', 'Claude executable auto-detection failed', {}, error as Error);
if (error instanceof Error) {
logger.debug('SDK', 'Claude executable auto-detection failed', {}, error);
} else {
logger.debug('SDK', 'Claude executable auto-detection failed with non-Error', {}, new Error(String(error)));
}
}
throw new Error('Claude executable not found. Please either:\n1. Add "claude" to your system PATH, or\n2. Set CLAUDE_CODE_PATH in ~/.claude-mem/settings.json');
+58 -44
View File
@@ -67,6 +67,23 @@ export class SearchManager {
return await this.chromaSync.queryChroma(query, limit, whereFilter);
}
private async searchChromaForTimeline(query: string, ninetyDaysAgo: number): Promise<ObservationSearchResult[]> {
const chromaResults = await this.queryChroma(query, 100);
logger.debug('SEARCH', 'Chroma returned semantic matches for timeline', { matchCount: chromaResults?.ids?.length ?? 0 });
if (chromaResults?.ids && chromaResults.ids.length > 0) {
const recentIds = chromaResults.ids.filter((_id, idx) => {
const meta = chromaResults.metadatas[idx];
return meta && meta.created_at_epoch > ninetyDaysAgo;
});
if (recentIds.length > 0) {
return this.sessionStore.getObservationsByIds(recentIds, { orderBy: 'date_desc', limit: 1 });
}
}
return [];
}
/**
* Helper to normalize query parameters from URL-friendly format
* Converts comma-separated strings to arrays and flattens date params
@@ -439,24 +456,13 @@ export class SearchManager {
let results: ObservationSearchResult[] = [];
if (this.chromaSync) {
logger.debug('SEARCH', 'Using hybrid semantic search for timeline query', {});
const ninetyDaysAgo = Date.now() - SEARCH_CONSTANTS.RECENCY_WINDOW_MS;
try {
logger.debug('SEARCH', 'Using hybrid semantic search for timeline query', {});
const chromaResults = await this.queryChroma(query, 100);
logger.debug('SEARCH', 'Chroma returned semantic matches for timeline', { matchCount: chromaResults?.ids?.length ?? 0 });
if (chromaResults?.ids && chromaResults.ids.length > 0) {
const ninetyDaysAgo = Date.now() - SEARCH_CONSTANTS.RECENCY_WINDOW_MS;
const recentIds = chromaResults.ids.filter((_id, idx) => {
const meta = chromaResults.metadatas[idx];
return meta && meta.created_at_epoch > ninetyDaysAgo;
});
if (recentIds.length > 0) {
results = this.sessionStore.getObservationsByIds(recentIds, { orderBy: 'date_desc', limit: 1 });
}
}
results = await this.searchChromaForTimeline(query, ninetyDaysAgo);
} catch (chromaError) {
logger.error('SEARCH', 'Chroma search failed for timeline, continuing without semantic results', {}, chromaError as Error);
const errorObject = chromaError instanceof Error ? chromaError : new Error(String(chromaError));
logger.error('WORKER', 'Chroma search failed for timeline, continuing without semantic results', {}, errorObject);
}
}
@@ -689,25 +695,29 @@ export class SearchManager {
// Search for decision-type observations
if (this.chromaSync) {
try {
if (query) {
// Semantic search filtered to decision type
logger.debug('SEARCH', 'Using Chroma semantic search with type=decision filter', {});
if (query) {
// Semantic search filtered to decision type
logger.debug('SEARCH', 'Using Chroma semantic search with type=decision filter', {});
try {
const chromaResults = await this.queryChroma(query, Math.min((filters.limit || 20) * 2, 100), { type: 'decision' });
const obsIds = chromaResults.ids;
if (obsIds.length > 0) {
results = this.sessionStore.getObservationsByIds(obsIds, { ...filters, type: 'decision' });
// Preserve Chroma ranking order
results.sort((a, b) => obsIds.indexOf(a.id) - obsIds.indexOf(b.id));
}
} else {
// No query: get all decisions, rank by "decision" keyword
logger.debug('SEARCH', 'Using metadata-first + semantic ranking for decisions', {});
const metadataResults = this.sessionSearch.findByType('decision', filters);
} catch (chromaError) {
const errorObject = chromaError instanceof Error ? chromaError : new Error(String(chromaError));
logger.error('WORKER', 'Chroma search failed for decisions, falling back to metadata search', {}, errorObject);
}
} else {
// No query: get all decisions, rank by "decision" keyword
logger.debug('SEARCH', 'Using metadata-first + semantic ranking for decisions', {});
const metadataResults = this.sessionSearch.findByType('decision', filters);
if (metadataResults.length > 0) {
const ids = metadataResults.map(obs => obs.id);
if (metadataResults.length > 0) {
const ids = metadataResults.map(obs => obs.id);
try {
const chromaResults = await this.queryChroma('decision', Math.min(ids.length, 100));
const rankedIds: number[] = [];
@@ -721,10 +731,11 @@ export class SearchManager {
results = this.sessionStore.getObservationsByIds(rankedIds, { limit: filters.limit || 20 });
results.sort((a, b) => rankedIds.indexOf(a.id) - rankedIds.indexOf(b.id));
}
} catch (chromaError) {
const errorObject = chromaError instanceof Error ? chromaError : new Error(String(chromaError));
logger.error('WORKER', 'Chroma semantic ranking failed for decisions, falling back to metadata search', {}, errorObject);
}
}
} catch (chromaError) {
logger.error('SEARCH', 'Chroma search failed for decisions, falling back to metadata search', {}, chromaError as Error);
}
}
@@ -763,20 +774,20 @@ export class SearchManager {
// Search for change-type observations and change-related concepts
if (this.chromaSync) {
try {
logger.debug('SEARCH', 'Using hybrid search for change-related observations', {});
logger.debug('SEARCH', 'Using hybrid search for change-related observations', {});
// Get all observations with type="change" or concepts containing change
const typeResults = this.sessionSearch.findByType('change', filters);
const conceptChangeResults = this.sessionSearch.findByConcept('change', filters);
const conceptWhatChangedResults = this.sessionSearch.findByConcept('what-changed', filters);
// Get all observations with type="change" or concepts containing change
const typeResults = this.sessionSearch.findByType('change', filters);
const conceptChangeResults = this.sessionSearch.findByConcept('change', filters);
const conceptWhatChangedResults = this.sessionSearch.findByConcept('what-changed', filters);
// Combine and deduplicate
const allIds = new Set<number>();
[...typeResults, ...conceptChangeResults, ...conceptWhatChangedResults].forEach(obs => allIds.add(obs.id));
// Combine and deduplicate
const allIds = new Set<number>();
[...typeResults, ...conceptChangeResults, ...conceptWhatChangedResults].forEach(obs => allIds.add(obs.id));
if (allIds.size > 0) {
const idsArray = Array.from(allIds);
if (allIds.size > 0) {
const idsArray = Array.from(allIds);
try {
const chromaResults = await this.queryChroma('what changed', Math.min(idsArray.length, 100));
const rankedIds: number[] = [];
@@ -790,9 +801,10 @@ export class SearchManager {
results = this.sessionStore.getObservationsByIds(rankedIds, { limit: filters.limit || 20 });
results.sort((a, b) => rankedIds.indexOf(a.id) - rankedIds.indexOf(b.id));
}
} catch (chromaError) {
const errorObject = chromaError instanceof Error ? chromaError : new Error(String(chromaError));
logger.error('WORKER', 'Chroma search failed for changes, falling back to metadata search', {}, errorObject);
}
} catch (chromaError) {
logger.error('SEARCH', 'Chroma search failed for changes, falling back to metadata search', {}, chromaError as Error);
}
}
@@ -1373,7 +1385,8 @@ export class SearchManager {
lines.push(`**Files Read:** ${filesRead.join(', ')}`);
}
} catch (error) {
logger.debug('WORKER', 'files_read is plain string, using as-is', {}, error as Error);
const errorObject = error instanceof Error ? error : new Error(String(error));
logger.debug('WORKER', 'files_read is plain string, using as-is', {}, errorObject);
if (summary.files_read.trim()) {
lines.push(`**Files Read:** ${summary.files_read}`);
}
@@ -1388,7 +1401,8 @@ export class SearchManager {
lines.push(`**Files Edited:** ${filesEdited.join(', ')}`);
}
} catch (error) {
logger.debug('WORKER', 'files_edited is plain string, using as-is', {}, error as Error);
const errorObject = error instanceof Error ? error : new Error(String(error));
logger.debug('WORKER', 'files_edited is plain string, using as-is', {}, errorObject);
if (summary.files_edited.trim()) {
lines.push(`**Files Edited:** ${summary.files_edited}`);
}
+41 -12
View File
@@ -69,7 +69,13 @@ export function detectStaleGenerator(
if (proc && proc.exitCode === null) {
try {
proc.kill('SIGKILL');
} catch {}
} catch (error) {
if (error instanceof Error) {
logger.warn('SESSION', 'Failed to SIGKILL stale generator subprocess', {}, error);
} else {
logger.warn('SESSION', 'Failed to SIGKILL stale generator subprocess with non-Error', {}, new Error(String(error)));
}
}
}
// Signal the SDK agent loop to exit
session.abortController.abort();
@@ -292,10 +298,17 @@ export class SessionManager {
sessionId: sessionDbId
});
} catch (error) {
logger.error('SESSION', 'Failed to persist observation to DB', {
sessionId: sessionDbId,
tool: data.tool_name
}, error);
if (error instanceof Error) {
logger.error('SESSION', 'Failed to persist observation to DB', {
sessionId: sessionDbId,
tool: data.tool_name
}, error);
} else {
logger.error('SESSION', 'Failed to persist observation to DB with non-Error', {
sessionId: sessionDbId,
tool: data.tool_name
}, new Error(String(error)));
}
throw error; // Don't continue if we can't persist
}
@@ -343,9 +356,15 @@ export class SessionManager {
sessionId: sessionDbId
});
} catch (error) {
logger.error('SESSION', 'Failed to persist summarize to DB', {
sessionId: sessionDbId
}, error);
if (error instanceof Error) {
logger.error('SESSION', 'Failed to persist summarize to DB', {
sessionId: sessionDbId
}, error);
} else {
logger.error('SESSION', 'Failed to persist summarize to DB with non-Error', {
sessionId: sessionDbId
}, new Error(String(error)));
}
throw error; // Don't continue if we can't persist
}
@@ -397,9 +416,15 @@ export class SessionManager {
try {
await getSupervisor().getRegistry().reapSession(sessionDbId);
} catch (error) {
logger.warn('SESSION', 'Supervisor reapSession failed (non-blocking)', {
sessionId: sessionDbId
}, error as Error);
if (error instanceof Error) {
logger.warn('SESSION', 'Supervisor reapSession failed (non-blocking)', {
sessionId: sessionDbId
}, error);
} else {
logger.warn('SESSION', 'Supervisor reapSession failed (non-blocking) with non-Error', {
sessionId: sessionDbId
}, new Error(String(error)));
}
}
// 4. Cleanup
@@ -469,7 +494,11 @@ export class SessionManager {
try {
trackedProcess.process.kill('SIGKILL');
} catch (err) {
logger.warn('SESSION', 'Failed to SIGKILL subprocess for stale generator', { sessionDbId }, err as Error);
if (err instanceof Error) {
logger.warn('SESSION', 'Failed to SIGKILL subprocess for stale generator', { sessionDbId }, err);
} else {
logger.warn('SESSION', 'Failed to SIGKILL subprocess for stale generator with non-Error', { sessionDbId }, new Error(String(err)));
}
}
}
// Signal the SDK agent loop to exit after the subprocess dies
+5 -1
View File
@@ -43,7 +43,11 @@ export class SettingsManager {
return settings;
} catch (error) {
logger.debug('WORKER', 'Failed to load settings, using defaults', {}, error as Error);
if (error instanceof Error) {
logger.debug('WORKER', 'Failed to load settings, using defaults', {}, error);
} else {
logger.debug('WORKER', 'Failed to load settings, using defaults', { rawError: String(error) });
}
return { ...this.defaultSettings };
}
}
+3 -2
View File
@@ -27,8 +27,9 @@ export abstract class BaseRouteHandler {
result.catch(error => this.handleError(res, error as Error));
}
} catch (error) {
logger.error('HTTP', 'Route handler error', { path: req.path }, error as Error);
this.handleError(res, error as Error);
const normalizedError = error instanceof Error ? error : new Error(String(error));
logger.error('HTTP', 'Route handler error', { path: req.path }, normalizedError);
this.handleError(res, normalizedError);
}
};
}
@@ -7,6 +7,7 @@
import express, { Request, Response } from 'express';
import { BaseRouteHandler } from '../BaseRouteHandler.js';
import { logger } from '../../../../utils/logger.js';
import { CorpusStore } from '../../knowledge/CorpusStore.js';
import { CorpusBuilder } from '../../knowledge/CorpusBuilder.js';
import { KnowledgeAgent } from '../../knowledge/KnowledgeAgent.js';
@@ -93,7 +94,10 @@ export class CorpusRoutes extends BaseRouteHandler {
if (typeof value === 'string') {
try {
parsed = JSON.parse(value);
} catch {
} catch (parseError: unknown) {
if (parseError instanceof Error) {
logger.debug('HTTP', `${fieldName} is not valid JSON, treating as comma-separated string`, { value });
}
parsed = value.split(',').map(part => part.trim()).filter(Boolean);
}
}
+23 -24
View File
@@ -269,35 +269,34 @@ export class SearchRoutes extends BaseRouteHandler {
return;
}
let result: any;
try {
const result = await this.searchManager.search({
query,
type: 'observations',
project,
limit: String(limit),
format: 'json'
result = await this.searchManager.search({
query, type: 'observations', project, limit: String(limit), format: 'json'
});
const observations = (result as any)?.observations || [];
if (!observations.length) {
res.json({ context: '', count: 0 });
return;
}
// Format as compact markdown for context injection
const lines: string[] = ['## Relevant Past Work (semantic match)\n'];
for (const obs of observations.slice(0, limit)) {
const date = obs.created_at?.slice(0, 10) || '';
lines.push(`### ${obs.title || 'Observation'} (${date})`);
if (obs.narrative) lines.push(obs.narrative);
lines.push('');
}
res.json({ context: lines.join('\n'), count: observations.length });
} catch (error) {
logger.error('SEARCH', 'Semantic context query failed', {}, error as Error);
const normalizedError = error instanceof Error ? error : new Error(String(error));
logger.error('HTTP', 'Semantic context query failed', { query, project }, normalizedError);
res.json({ context: '', count: 0 });
return;
}
const observations = result?.observations || [];
if (!observations.length) {
res.json({ context: '', count: 0 });
return;
}
// Format as compact markdown for context injection
const lines: string[] = ['## Relevant Past Work (semantic match)\n'];
for (const obs of observations.slice(0, limit)) {
const date = obs.created_at?.slice(0, 10) || '';
lines.push(`### ${obs.title || 'Observation'} (${date})`);
if (obs.narrative) lines.push(obs.narrative);
lines.push('');
}
res.json({ context: lines.join('\n'), count: observations.length });
});
/**
+123 -117
View File
@@ -222,7 +222,10 @@ export class SessionRoutes extends BaseRouteHandler {
session.generatorPromise = agent.startSession(session, this.workerService)
.catch(error => {
// Only log non-abort errors
if (myController.signal.aborted) return;
if (myController.signal.aborted) {
logger.debug('HTTP', 'Generator catch: ignoring error after abort', { sessionId: session.sessionDbId });
return;
}
const errorMsg = error instanceof Error ? error.message : String(error);
@@ -257,9 +260,10 @@ export class SessionRoutes extends BaseRouteHandler {
});
}
} catch (dbError) {
logger.error('SESSION', 'Failed to mark messages as failed', {
const normalizedDbError = dbError instanceof Error ? dbError : new Error(String(dbError));
logger.error('HTTP', 'Failed to mark messages as failed', {
sessionId: session.sessionDbId
}, dbError as Error);
}, normalizedDbError);
}
})
.finally(async () => {
@@ -285,75 +289,75 @@ export class SessionRoutes extends BaseRouteHandler {
// Crash recovery: If not aborted and still has work, restart (with limit)
if (!wasAborted) {
const pendingStore = this.sessionManager.getPendingMessageStore();
const MAX_CONSECUTIVE_RESTARTS = 3;
let pendingCount: number;
try {
const pendingStore = this.sessionManager.getPendingMessageStore();
const pendingCount = pendingStore.getPendingCount(sessionDbId);
pendingCount = pendingStore.getPendingCount(sessionDbId);
} catch (e) {
const normalizedRecoveryError = e instanceof Error ? e : new Error(String(e));
logger.error('HTTP', 'Error during recovery check, aborting to prevent leaks', { sessionId: sessionDbId }, normalizedRecoveryError);
session.abortController.abort();
return;
}
// CRITICAL: Limit consecutive restarts to prevent infinite loops
// This prevents runaway API costs when there's a persistent error (e.g., memorySessionId not captured)
const MAX_CONSECUTIVE_RESTARTS = 3;
if (pendingCount > 0) {
// GUARD: Prevent duplicate crash recovery spawns
if (this.crashRecoveryScheduled.has(sessionDbId)) {
logger.debug('SESSION', 'Crash recovery already scheduled', { sessionDbId });
return;
}
if (pendingCount > 0) {
// GUARD: Prevent duplicate crash recovery spawns
if (this.crashRecoveryScheduled.has(sessionDbId)) {
logger.debug('SESSION', 'Crash recovery already scheduled', { sessionDbId });
return;
}
session.consecutiveRestarts = (session.consecutiveRestarts || 0) + 1;
session.consecutiveRestarts = (session.consecutiveRestarts || 0) + 1;
if (session.consecutiveRestarts > MAX_CONSECUTIVE_RESTARTS) {
logger.error('SESSION', `CRITICAL: Generator restart limit exceeded - stopping to prevent runaway costs`, {
sessionId: sessionDbId,
pendingCount,
consecutiveRestarts: session.consecutiveRestarts,
maxRestarts: MAX_CONSECUTIVE_RESTARTS,
action: 'Generator will NOT restart. Check logs for root cause. Messages remain in pending state.'
});
// Don't restart - abort to prevent further API calls
session.abortController.abort();
return;
}
logger.info('SESSION', `Restarting generator after crash/exit with pending work`, {
if (session.consecutiveRestarts > MAX_CONSECUTIVE_RESTARTS) {
logger.error('SESSION', `CRITICAL: Generator restart limit exceeded - stopping to prevent runaway costs`, {
sessionId: sessionDbId,
pendingCount,
consecutiveRestarts: session.consecutiveRestarts,
maxRestarts: MAX_CONSECUTIVE_RESTARTS
maxRestarts: MAX_CONSECUTIVE_RESTARTS,
action: 'Generator will NOT restart. Check logs for root cause. Messages remain in pending state.'
});
// Abort OLD controller before replacing to prevent child process leaks
const oldController = session.abortController;
session.abortController = new AbortController();
oldController.abort();
this.crashRecoveryScheduled.add(sessionDbId);
// Exponential backoff: 1s, 2s, 4s for subsequent restarts
const backoffMs = Math.min(1000 * Math.pow(2, session.consecutiveRestarts - 1), 8000);
// Delay before restart with exponential backoff
setTimeout(() => {
this.crashRecoveryScheduled.delete(sessionDbId);
const stillExists = this.sessionManager.getSession(sessionDbId);
if (stillExists && !stillExists.generatorPromise) {
this.applyTierRouting(stillExists);
this.startGeneratorWithProvider(stillExists, this.getSelectedProvider(), 'crash-recovery');
}
}, backoffMs);
} else {
// No pending work - abort to kill the child process
// Don't restart - abort to prevent further API calls
session.abortController.abort();
// Reset restart counter on successful completion
session.consecutiveRestarts = 0;
logger.debug('SESSION', 'Aborted controller after natural completion', {
sessionId: sessionDbId
});
return;
}
} catch (e) {
// Ignore errors during recovery check, but still abort to prevent leaks
logger.debug('SESSION', 'Error during recovery check, aborting to prevent leaks', { sessionId: sessionDbId, error: e instanceof Error ? e.message : String(e) });
logger.info('SESSION', `Restarting generator after crash/exit with pending work`, {
sessionId: sessionDbId,
pendingCount,
consecutiveRestarts: session.consecutiveRestarts,
maxRestarts: MAX_CONSECUTIVE_RESTARTS
});
// Abort OLD controller before replacing to prevent child process leaks
const oldController = session.abortController;
session.abortController = new AbortController();
oldController.abort();
this.crashRecoveryScheduled.add(sessionDbId);
// Exponential backoff: 1s, 2s, 4s for subsequent restarts
const backoffMs = Math.min(1000 * Math.pow(2, session.consecutiveRestarts - 1), 8000);
// Delay before restart with exponential backoff
setTimeout(() => {
this.crashRecoveryScheduled.delete(sessionDbId);
const stillExists = this.sessionManager.getSession(sessionDbId);
if (stillExists && !stillExists.generatorPromise) {
this.applyTierRouting(stillExists);
this.startGeneratorWithProvider(stillExists, this.getSelectedProvider(), 'crash-recovery');
}
}, backoffMs);
} else {
// No pending work - abort to kill the child process
session.abortController.abort();
// Reset restart counter on successful completion
session.consecutiveRestarts = 0;
logger.debug('SESSION', 'Aborted controller after natural completion', {
sessionId: sessionDbId
});
}
}
// NOTE: We do NOT delete the session here anymore.
@@ -586,65 +590,67 @@ export class SessionRoutes extends BaseRouteHandler {
}
}
const store = this.dbManager.getSessionStore();
let sessionDbId: number;
let promptNumber: number;
try {
const store = this.dbManager.getSessionStore();
// Get or create session
const sessionDbId = store.createSDKSession(contentSessionId, project, '', undefined, platformSource);
const promptNumber = store.getPromptNumberFromUserPrompts(contentSessionId);
// Privacy check: skip if user prompt was entirely private
const userPrompt = PrivacyCheckValidator.checkUserPromptPrivacy(
store,
contentSessionId,
promptNumber,
'observation',
sessionDbId,
{ tool_name }
);
if (!userPrompt) {
res.json({ status: 'skipped', reason: 'private' });
return;
}
// Strip memory tags from tool_input and tool_response
const cleanedToolInput = tool_input !== undefined
? stripMemoryTagsFromJson(JSON.stringify(tool_input))
: '{}';
const cleanedToolResponse = tool_response !== undefined
? stripMemoryTagsFromJson(JSON.stringify(tool_response))
: '{}';
// Queue observation
this.sessionManager.queueObservation(sessionDbId, {
tool_name,
tool_input: cleanedToolInput,
tool_response: cleanedToolResponse,
prompt_number: promptNumber,
cwd: cwd || (() => {
logger.error('SESSION', 'Missing cwd when queueing observation in SessionRoutes', {
sessionId: sessionDbId,
tool_name
});
return '';
})(),
agentId: typeof agentId === 'string' ? agentId : undefined,
agentType: typeof agentType === 'string' ? agentType : undefined,
});
// Ensure SDK agent is running
this.ensureGeneratorRunning(sessionDbId, 'observation');
// Broadcast observation queued event
this.eventBroadcaster.broadcastObservationQueued(sessionDbId);
res.json({ status: 'queued' });
sessionDbId = store.createSDKSession(contentSessionId, project, '', undefined, platformSource);
promptNumber = store.getPromptNumberFromUserPrompts(contentSessionId);
} catch (error) {
// Return 200 on recoverable errors so the hook doesn't break
logger.error('SESSION', 'Observation storage failed', { contentSessionId, tool_name }, error as Error);
res.json({ stored: false, reason: (error as Error).message });
const normalizedError = error instanceof Error ? error : new Error(String(error));
logger.error('HTTP', 'Observation storage failed', { contentSessionId, tool_name }, normalizedError);
res.json({ stored: false, reason: normalizedError.message });
return;
}
// Privacy check: skip if user prompt was entirely private
const userPrompt = PrivacyCheckValidator.checkUserPromptPrivacy(
store,
contentSessionId,
promptNumber,
'observation',
sessionDbId,
{ tool_name }
);
if (!userPrompt) {
res.json({ status: 'skipped', reason: 'private' });
return;
}
// Strip memory tags from tool_input and tool_response
const cleanedToolInput = tool_input !== undefined
? stripMemoryTagsFromJson(JSON.stringify(tool_input))
: '{}';
const cleanedToolResponse = tool_response !== undefined
? stripMemoryTagsFromJson(JSON.stringify(tool_response))
: '{}';
// Queue observation
this.sessionManager.queueObservation(sessionDbId, {
tool_name,
tool_input: cleanedToolInput,
tool_response: cleanedToolResponse,
prompt_number: promptNumber,
cwd: cwd || (() => {
logger.error('SESSION', 'Missing cwd when queueing observation in SessionRoutes', {
sessionId: sessionDbId,
tool_name
});
return '';
})(),
agentId: typeof agentId === 'string' ? agentId : undefined,
agentType: typeof agentType === 'string' ? agentType : undefined,
});
// Ensure SDK agent is running
this.ensureGeneratorRunning(sessionDbId, 'observation');
// Broadcast observation queued event
this.eventBroadcaster.broadcastObservationQueued(sessionDbId);
res.json({ status: 'queued' });
});
/**
@@ -74,7 +74,8 @@ export class SettingsRoutes extends BaseRouteHandler {
try {
settings = JSON.parse(settingsData);
} catch (parseError) {
logger.error('SETTINGS', 'Failed to parse settings file', { settingsPath }, parseError as Error);
const normalizedParseError = parseError instanceof Error ? parseError : new Error(String(parseError));
logger.error('HTTP', 'Failed to parse settings file', { settingsPath }, normalizedParseError);
res.status(500).json({
success: false,
error: 'Settings file is corrupted. Delete ~/.claude-mem/settings.json to reset.'
@@ -71,7 +71,10 @@ export class ViewerRoutes extends BaseRouteHandler {
// Guard: if DB is not yet initialized, return 503 before registering client
try {
this.dbManager.getSessionStore();
} catch {
} catch (initError: unknown) {
if (initError instanceof Error) {
logger.warn('HTTP', 'SSE stream requested before DB initialization', {}, initError);
}
res.status(503).json({ error: 'Service initializing' });
return;
}
@@ -23,7 +23,12 @@ function safeParseJsonArray(value: unknown): string[] {
try {
const parsed = JSON.parse(value);
return Array.isArray(parsed) ? parsed.filter((v): v is string => typeof v === 'string') : [];
} catch {
} catch (error) {
if (error instanceof Error) {
logger.warn('WORKER', 'Failed to parse JSON array field', {}, error);
} else {
logger.warn('WORKER', 'Failed to parse JSON array field (non-Error thrown)', { thrownValue: String(error) });
}
return [];
}
}
+10 -2
View File
@@ -46,7 +46,11 @@ export class CorpusStore {
const raw = fs.readFileSync(filePath, 'utf-8');
return JSON.parse(raw) as CorpusFile;
} catch (error) {
logger.error('WORKER', `Failed to read corpus file: ${filePath}`, { error });
if (error instanceof Error) {
logger.error('WORKER', `Failed to read corpus file: ${filePath}`, {}, error);
} else {
logger.error('WORKER', `Failed to read corpus file: ${filePath} (non-Error thrown)`, { thrownValue: String(error) });
}
return null;
}
}
@@ -73,7 +77,11 @@ export class CorpusStore {
session_id: corpus.session_id,
});
} catch (error) {
logger.error('WORKER', `Failed to parse corpus file: ${file}`, { error });
if (error instanceof Error) {
logger.error('WORKER', `Failed to parse corpus file: ${file}`, {}, error);
} else {
logger.error('WORKER', `Failed to parse corpus file: ${file} (non-Error thrown)`, { thrownValue: String(error) });
}
}
}
@@ -96,7 +96,11 @@ export class KnowledgeAgent {
// exits with a non-zero code. If we already captured a session_id,
// treat this as success — the session was created and primed.
if (sessionId) {
logger.debug('WORKER', `SDK process exited after priming corpus "${corpus.name}" — session captured, continuing`, {}, error as Error);
if (error instanceof Error) {
logger.debug('WORKER', `SDK process exited after priming corpus "${corpus.name}" — session captured, continuing`, {}, error);
} else {
logger.debug('WORKER', `SDK process exited after priming corpus "${corpus.name}" — session captured, continuing (non-Error thrown)`, { thrownValue: String(error) });
}
} else {
throw error;
}
@@ -132,6 +136,11 @@ export class KnowledgeAgent {
return result;
} catch (error) {
if (!this.isSessionResumeError(error)) {
if (error instanceof Error) {
logger.error('WORKER', `Query failed for corpus "${corpus.name}"`, {}, error);
} else {
logger.error('WORKER', `Query failed for corpus "${corpus.name}" (non-Error thrown)`, { thrownValue: String(error) });
}
throw error;
}
// Session expired or invalid — auto-reprime and retry
@@ -207,7 +216,11 @@ export class KnowledgeAgent {
// Same as prime() — SDK may throw after all messages are yielded.
// If we captured an answer, treat as success.
if (answer) {
logger.debug('WORKER', `SDK process exited after query — answer captured, continuing`, {}, error as Error);
if (error instanceof Error) {
logger.debug('WORKER', `SDK process exited after query — answer captured, continuing`, {}, error);
} else {
logger.debug('WORKER', `SDK process exited after query — answer captured, continuing (non-Error thrown)`, { thrownValue: String(error) });
}
} else {
throw error;
}
@@ -259,7 +272,11 @@ export class KnowledgeAgent {
if (claudePath) return claudePath;
} catch (error) {
logger.debug('WORKER', 'Claude executable auto-detection failed', {}, error as Error);
if (error instanceof Error) {
logger.debug('WORKER', 'Claude executable auto-detection failed', {}, error);
} else {
logger.debug('WORKER', 'Claude executable auto-detection failed (non-Error thrown)', { thrownValue: String(error) });
}
}
throw new Error('Claude executable not found. Please either:\n1. Add "claude" to your system PATH, or\n2. Set CLAUDE_CODE_PATH in ~/.claude-mem/settings.json');
@@ -63,82 +63,19 @@ export class ChromaSearchStrategy extends BaseSearchStrategy implements SearchSt
let sessions: SessionSummarySearchResult[] = [];
let prompts: UserPromptSearchResult[] = [];
// Build Chroma where filter for doc_type and project
const whereFilter = this.buildWhereFilter(searchType, project);
logger.debug('SEARCH', 'ChromaSearchStrategy: Querying Chroma', { query, searchType });
try {
// Build Chroma where filter for doc_type and project
const whereFilter = this.buildWhereFilter(searchType, project);
// Step 1: Chroma semantic search
logger.debug('SEARCH', 'ChromaSearchStrategy: Querying Chroma', { query, searchType });
const chromaResults = await this.chromaSync.queryChroma(
query,
SEARCH_CONSTANTS.CHROMA_BATCH_SIZE,
whereFilter
);
logger.debug('SEARCH', 'ChromaSearchStrategy: Chroma returned matches', {
matchCount: chromaResults.ids.length
return await this.executeChromaSearch(query, whereFilter, {
searchObservations, searchSessions, searchPrompts,
obsType, concepts, files, orderBy, limit, project
});
if (chromaResults.ids.length === 0) {
// No matches - this is the correct answer
return {
results: { observations: [], sessions: [], prompts: [] },
usedChroma: true,
fellBack: false,
strategy: 'chroma'
};
}
// Step 2: Filter by recency (90 days)
const recentItems = this.filterByRecency(chromaResults);
logger.debug('SEARCH', 'ChromaSearchStrategy: Filtered by recency', {
count: recentItems.length
});
// Step 3: Categorize by document type
const categorized = this.categorizeByDocType(recentItems, {
searchObservations,
searchSessions,
searchPrompts
});
// Step 4: Hydrate from SQLite with additional filters
if (categorized.obsIds.length > 0) {
const obsOptions = { type: obsType, concepts, files, orderBy, limit, project };
observations = this.sessionStore.getObservationsByIds(categorized.obsIds, obsOptions);
}
if (categorized.sessionIds.length > 0) {
sessions = this.sessionStore.getSessionSummariesByIds(categorized.sessionIds, {
orderBy,
limit,
project
});
}
if (categorized.promptIds.length > 0) {
prompts = this.sessionStore.getUserPromptsByIds(categorized.promptIds, {
orderBy,
limit,
project
});
}
logger.debug('SEARCH', 'ChromaSearchStrategy: Hydrated results', {
observations: observations.length,
sessions: sessions.length,
prompts: prompts.length
});
return {
results: { observations, sessions, prompts },
usedChroma: true,
fellBack: false,
strategy: 'chroma'
};
} catch (error) {
logger.error('SEARCH', 'ChromaSearchStrategy: Search failed', {}, error as Error);
const errorObj = error instanceof Error ? error : new Error(String(error));
logger.error('WORKER', 'ChromaSearchStrategy: Search failed', {}, errorObj);
// Return empty result - caller may try fallback strategy
return {
results: { observations: [], sessions: [], prompts: [] },
@@ -149,6 +86,68 @@ export class ChromaSearchStrategy extends BaseSearchStrategy implements SearchSt
}
}
private async executeChromaSearch(
query: string,
whereFilter: Record<string, any> | undefined,
options: {
searchObservations: boolean;
searchSessions: boolean;
searchPrompts: boolean;
obsType?: string | string[];
concepts?: string | string[];
files?: string | string[];
orderBy: 'relevance' | 'date_desc' | 'date_asc';
limit: number;
project?: string;
}
): Promise<StrategySearchResult> {
const chromaResults = await this.chromaSync.queryChroma(
query,
SEARCH_CONSTANTS.CHROMA_BATCH_SIZE,
whereFilter
);
if (chromaResults.ids.length === 0) {
return {
results: { observations: [], sessions: [], prompts: [] },
usedChroma: true,
fellBack: false,
strategy: 'chroma'
};
}
const recentItems = this.filterByRecency(chromaResults);
const categorized = this.categorizeByDocType(recentItems, options);
let observations: ObservationSearchResult[] = [];
let sessions: SessionSummarySearchResult[] = [];
let prompts: UserPromptSearchResult[] = [];
if (categorized.obsIds.length > 0) {
const obsOptions = { type: options.obsType, concepts: options.concepts, files: options.files, orderBy: options.orderBy, limit: options.limit, project: options.project };
observations = this.sessionStore.getObservationsByIds(categorized.obsIds, obsOptions);
}
if (categorized.sessionIds.length > 0) {
sessions = this.sessionStore.getSessionSummariesByIds(categorized.sessionIds, {
orderBy: options.orderBy, limit: options.limit, project: options.project
});
}
if (categorized.promptIds.length > 0) {
prompts = this.sessionStore.getUserPromptsByIds(categorized.promptIds, {
orderBy: options.orderBy, limit: options.limit, project: options.project
});
}
return {
results: { observations, sessions, prompts },
usedChroma: true,
fellBack: false,
strategy: 'chroma'
};
}
/**
* Build Chroma where filter for document type and project
*
@@ -68,50 +68,22 @@ export class HybridSearchStrategy extends BaseSearchStrategy implements SearchSt
const { limit = SEARCH_CONSTANTS.DEFAULT_LIMIT, project, dateRange, orderBy } = options;
const filterOptions = { limit, project, dateRange, orderBy };
try {
logger.debug('SEARCH', 'HybridSearchStrategy: findByConcept', { concept });
logger.debug('SEARCH', 'HybridSearchStrategy: findByConcept', { concept });
// Step 1: SQLite metadata filter
const metadataResults = this.sessionSearch.findByConcept(concept, filterOptions);
logger.debug('SEARCH', 'HybridSearchStrategy: Found metadata matches', {
count: metadataResults.length
});
if (metadataResults.length === 0) {
return this.emptyResult('hybrid');
}
// Step 2: Chroma semantic ranking
const ids = metadataResults.map(obs => obs.id);
const chromaResults = await this.chromaSync.queryChroma(
concept,
Math.min(ids.length, SEARCH_CONSTANTS.CHROMA_BATCH_SIZE)
);
// Step 3: Intersect - keep only IDs from metadata, in Chroma rank order
const rankedIds = this.intersectWithRanking(ids, chromaResults.ids);
logger.debug('SEARCH', 'HybridSearchStrategy: Ranked by semantic relevance', {
count: rankedIds.length
});
// Step 4: Hydrate in semantic rank order
if (rankedIds.length > 0) {
const observations = this.sessionStore.getObservationsByIds(rankedIds, { limit });
// Restore semantic ranking order
observations.sort((a, b) => rankedIds.indexOf(a.id) - rankedIds.indexOf(b.id));
return {
results: { observations, sessions: [], prompts: [] },
usedChroma: true,
fellBack: false,
strategy: 'hybrid'
};
}
// Step 1: SQLite metadata filter
const metadataResults = this.sessionSearch.findByConcept(concept, filterOptions);
if (metadataResults.length === 0) {
return this.emptyResult('hybrid');
}
const ids = metadataResults.map(obs => obs.id);
try {
return await this.rankAndHydrate(concept, ids, limit);
} catch (error) {
logger.error('SEARCH', 'HybridSearchStrategy: findByConcept failed', {}, error as Error);
const errorObj = error instanceof Error ? error : new Error(String(error));
logger.error('WORKER', 'HybridSearchStrategy: findByConcept failed', {}, errorObj);
// Fall back to metadata-only results
const results = this.sessionSearch.findByConcept(concept, filterOptions);
return {
@@ -134,49 +106,22 @@ export class HybridSearchStrategy extends BaseSearchStrategy implements SearchSt
const filterOptions = { limit, project, dateRange, orderBy };
const typeStr = Array.isArray(type) ? type.join(', ') : type;
try {
logger.debug('SEARCH', 'HybridSearchStrategy: findByType', { type: typeStr });
logger.debug('SEARCH', 'HybridSearchStrategy: findByType', { type: typeStr });
// Step 1: SQLite metadata filter
const metadataResults = this.sessionSearch.findByType(type as any, filterOptions);
logger.debug('SEARCH', 'HybridSearchStrategy: Found metadata matches', {
count: metadataResults.length
});
if (metadataResults.length === 0) {
return this.emptyResult('hybrid');
}
// Step 2: Chroma semantic ranking
const ids = metadataResults.map(obs => obs.id);
const chromaResults = await this.chromaSync.queryChroma(
typeStr,
Math.min(ids.length, SEARCH_CONSTANTS.CHROMA_BATCH_SIZE)
);
// Step 3: Intersect with ranking
const rankedIds = this.intersectWithRanking(ids, chromaResults.ids);
logger.debug('SEARCH', 'HybridSearchStrategy: Ranked by semantic relevance', {
count: rankedIds.length
});
// Step 4: Hydrate in rank order
if (rankedIds.length > 0) {
const observations = this.sessionStore.getObservationsByIds(rankedIds, { limit });
observations.sort((a, b) => rankedIds.indexOf(a.id) - rankedIds.indexOf(b.id));
return {
results: { observations, sessions: [], prompts: [] },
usedChroma: true,
fellBack: false,
strategy: 'hybrid'
};
}
// Step 1: SQLite metadata filter
const metadataResults = this.sessionSearch.findByType(type as any, filterOptions);
if (metadataResults.length === 0) {
return this.emptyResult('hybrid');
}
const ids = metadataResults.map(obs => obs.id);
try {
return await this.rankAndHydrate(typeStr, ids, limit);
} catch (error) {
logger.error('SEARCH', 'HybridSearchStrategy: findByType failed', {}, error as Error);
const errorObj = error instanceof Error ? error : new Error(String(error));
logger.error('WORKER', 'HybridSearchStrategy: findByType failed', {}, errorObj);
const results = this.sessionSearch.findByType(type as any, filterOptions);
return {
results: { observations: results, sessions: [], prompts: [] },
@@ -201,48 +146,23 @@ export class HybridSearchStrategy extends BaseSearchStrategy implements SearchSt
const { limit = SEARCH_CONSTANTS.DEFAULT_LIMIT, project, dateRange, orderBy } = options;
const filterOptions = { limit, project, dateRange, orderBy };
try {
logger.debug('SEARCH', 'HybridSearchStrategy: findByFile', { filePath });
logger.debug('SEARCH', 'HybridSearchStrategy: findByFile', { filePath });
// Step 1: SQLite metadata filter
const metadataResults = this.sessionSearch.findByFile(filePath, filterOptions);
logger.debug('SEARCH', 'HybridSearchStrategy: Found file matches', {
observations: metadataResults.observations.length,
sessions: metadataResults.sessions.length
});
// Sessions don't need semantic ranking (already summarized)
const sessions = metadataResults.sessions;
if (metadataResults.observations.length === 0) {
return { observations: [], sessions, usedChroma: false };
}
// Step 2: Chroma semantic ranking for observations
const ids = metadataResults.observations.map(obs => obs.id);
const chromaResults = await this.chromaSync.queryChroma(
filePath,
Math.min(ids.length, SEARCH_CONSTANTS.CHROMA_BATCH_SIZE)
);
// Step 3: Intersect with ranking
const rankedIds = this.intersectWithRanking(ids, chromaResults.ids);
logger.debug('SEARCH', 'HybridSearchStrategy: Ranked observations', {
count: rankedIds.length
});
// Step 4: Hydrate in rank order
if (rankedIds.length > 0) {
const observations = this.sessionStore.getObservationsByIds(rankedIds, { limit });
observations.sort((a, b) => rankedIds.indexOf(a.id) - rankedIds.indexOf(b.id));
return { observations, sessions, usedChroma: true };
}
// Step 1: SQLite metadata filter
const metadataResults = this.sessionSearch.findByFile(filePath, filterOptions);
const sessions = metadataResults.sessions;
if (metadataResults.observations.length === 0) {
return { observations: [], sessions, usedChroma: false };
}
const ids = metadataResults.observations.map(obs => obs.id);
try {
return await this.rankAndHydrateForFile(filePath, ids, limit, sessions);
} catch (error) {
logger.error('SEARCH', 'HybridSearchStrategy: findByFile failed', {}, error as Error);
const errorObj = error instanceof Error ? error : new Error(String(error));
logger.error('WORKER', 'HybridSearchStrategy: findByFile failed', {}, errorObj);
const results = this.sessionSearch.findByFile(filePath, filterOptions);
return {
observations: results.observations,
@@ -252,6 +172,56 @@ export class HybridSearchStrategy extends BaseSearchStrategy implements SearchSt
}
}
private async rankAndHydrate(
queryText: string,
metadataIds: number[],
limit: number
): Promise<StrategySearchResult> {
const chromaResults = await this.chromaSync.queryChroma(
queryText,
Math.min(metadataIds.length, SEARCH_CONSTANTS.CHROMA_BATCH_SIZE)
);
const rankedIds = this.intersectWithRanking(metadataIds, chromaResults.ids);
if (rankedIds.length > 0) {
const observations = this.sessionStore.getObservationsByIds(rankedIds, { limit });
observations.sort((a, b) => rankedIds.indexOf(a.id) - rankedIds.indexOf(b.id));
return {
results: { observations, sessions: [], prompts: [] },
usedChroma: true,
fellBack: false,
strategy: 'hybrid'
};
}
return this.emptyResult('hybrid');
}
private async rankAndHydrateForFile(
filePath: string,
metadataIds: number[],
limit: number,
sessions: SessionSummarySearchResult[]
): Promise<{ observations: ObservationSearchResult[]; sessions: SessionSummarySearchResult[]; usedChroma: boolean }> {
const chromaResults = await this.chromaSync.queryChroma(
filePath,
Math.min(metadataIds.length, SEARCH_CONSTANTS.CHROMA_BATCH_SIZE)
);
const rankedIds = this.intersectWithRanking(metadataIds, chromaResults.ids);
if (rankedIds.length > 0) {
const observations = this.sessionStore.getObservationsByIds(rankedIds, { limit });
observations.sort((a, b) => rankedIds.indexOf(a.id) - rankedIds.indexOf(b.id));
return { observations, sessions, usedChroma: true };
}
return { observations: [], sessions, usedChroma: false };
}
/**
* Intersect metadata IDs with Chroma IDs, preserving Chroma's rank order
*/
@@ -64,44 +64,45 @@ export class SQLiteSearchStrategy extends BaseSearchStrategy implements SearchSt
hasProject: !!project
});
const obsOptions = searchObservations ? { ...baseOptions, type: obsType, concepts, files } : null;
try {
if (searchObservations) {
const obsOptions = {
...baseOptions,
type: obsType,
concepts,
files
};
observations = this.sessionSearch.searchObservations(undefined, obsOptions);
}
if (searchSessions) {
sessions = this.sessionSearch.searchSessions(undefined, baseOptions);
}
if (searchPrompts) {
prompts = this.sessionSearch.searchUserPrompts(undefined, baseOptions);
}
logger.debug('SEARCH', 'SQLiteSearchStrategy: Results', {
observations: observations.length,
sessions: sessions.length,
prompts: prompts.length
});
return {
results: { observations, sessions, prompts },
usedChroma: false,
fellBack: false,
strategy: 'sqlite'
};
return this.executeSqliteSearch(obsOptions, searchSessions, searchPrompts, baseOptions);
} catch (error) {
logger.error('SEARCH', 'SQLiteSearchStrategy: Search failed', {}, error as Error);
const errorObj = error instanceof Error ? error : new Error(String(error));
logger.error('WORKER', 'SQLiteSearchStrategy: Search failed', {}, errorObj);
return this.emptyResult('sqlite');
}
}
private executeSqliteSearch(
obsOptions: Record<string, any> | null,
searchSessions: boolean,
searchPrompts: boolean,
baseOptions: Record<string, any>
): StrategySearchResult {
let observations: ObservationSearchResult[] = [];
let sessions: SessionSummarySearchResult[] = [];
let prompts: UserPromptSearchResult[] = [];
if (obsOptions) {
observations = this.sessionSearch.searchObservations(undefined, obsOptions);
}
if (searchSessions) {
sessions = this.sessionSearch.searchSessions(undefined, baseOptions);
}
if (searchPrompts) {
prompts = this.sessionSearch.searchUserPrompts(undefined, baseOptions);
}
return {
results: { observations, sessions, prompts },
usedChroma: false,
fellBack: false,
strategy: 'sqlite'
};
}
/**
* Find observations by concept (used by findByConcept tool)
*/