fix: resolve all CodeRabbit review comments

Critical fixes:
- GeminiCliHooksInstaller: wrap install/uninstall prep in try/catch to maintain
  numeric return contract
- McpIntegrations: move mkdirSync inside try block for Goose installer

Major fixes:
- import-xml-observations: guard invalid dates before toISOString()
- runtime.ts: guard response.json() parsing
- WorktreeAdoption: delay adoptedSqliteIds mutation until SQL succeeds
- CursorHooksInstaller: move mkdirSync inside try block
- McpIntegrations: throw on failed claude-mem block replacement
- OpenClawInstaller: propagate parse failure instead of returning {}
- OpenClawInstaller: move mkdirSync inside try block
- WindsurfHooksInstaller: validate hooks.json shape with optional chaining
- timeline/queries: pass normalized Error directly to logger
- ChromaSync: use composite dedup key (entityType:id) to prevent cross-type collisions
- EnvManager: wrap preflight directory/file ops in try/catch with ENV logging

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Alex Newman
2026-04-19 20:17:57 -07:00
parent 30a0ab4ddb
commit d2eb89d27f
11 changed files with 104 additions and 84 deletions
+3
View File
@@ -78,6 +78,9 @@ function buildTimestampMap(): TimestampMapping {
if (timestamp && sessionId) { if (timestamp && sessionId) {
// Round timestamp to second for matching with XML timestamps // Round timestamp to second for matching with XML timestamps
const roundedTimestamp = new Date(timestamp); const roundedTimestamp = new Date(timestamp);
if (Number.isNaN(roundedTimestamp.getTime())) {
continue;
}
roundedTimestamp.setMilliseconds(0); roundedTimestamp.setMilliseconds(0);
const key = roundedTimestamp.toISOString(); const key = roundedTimestamp.toISOString();
+8 -1
View File
@@ -179,7 +179,14 @@ export async function runSearchCommand(queryParts: string[]): Promise<void> {
process.exit(1); process.exit(1);
} }
const data = await response.json(); let data: unknown;
try {
data = await response.json();
} catch (error: unknown) {
const message = error instanceof Error ? error.message : String(error);
console.error(pc.red(`Search failed: invalid JSON response (${message})`));
process.exit(1);
}
if (typeof data === 'object' && data !== null) { if (typeof data === 'object' && data !== null) {
console.log(JSON.stringify(data, null, 2)); console.log(JSON.stringify(data, null, 2));
@@ -254,10 +254,10 @@ export async function adoptMergedWorktrees(opts: {
worktreeProject, worktreeProject,
parentProject parentProject
) as Array<{ id: number }>; ) as Array<{ id: number }>;
for (const r of rows) adoptedSqliteIds.push(r.id);
const obsChanges = updateObs.run(parentProject, worktreeProject).changes; const obsChanges = updateObs.run(parentProject, worktreeProject).changes;
const sumChanges = updateSum.run(parentProject, worktreeProject).changes; const sumChanges = updateSum.run(parentProject, worktreeProject).changes;
for (const r of rows) adoptedSqliteIds.push(r.id);
result.adoptedObservations += obsChanges; result.adoptedObservations += obsChanges;
result.adoptedSummaries += sumChanges; result.adoptedSummaries += sumChanges;
}; };
@@ -316,9 +316,6 @@ export async function installCursorHooks(target: CursorInstallTarget): Promise<n
const workspaceRoot = process.cwd(); const workspaceRoot = process.cwd();
// Create target directory
mkdirSync(targetDir, { recursive: true });
// Generate hooks.json with unified CLI commands // Generate hooks.json with unified CLI commands
const hooksJsonPath = path.join(targetDir, 'hooks.json'); const hooksJsonPath = path.join(targetDir, 'hooks.json');
@@ -360,6 +357,8 @@ export async function installCursorHooks(target: CursorInstallTarget): Promise<n
}; };
try { try {
// Create target directory inside try to catch EACCES/EPERM
mkdirSync(targetDir, { recursive: true });
await writeHooksJsonAndSetupProject(hooksJsonPath, hooksJson, workerServicePath, target, targetDir, workspaceRoot); await writeHooksJsonAndSetupProject(hooksJsonPath, hooksJson, workerServicePath, target, targetDir, workspaceRoot);
return 0; return 0;
} catch (error) { } catch (error) {
@@ -291,6 +291,7 @@ export async function installGeminiCliHooks(): Promise<number> {
console.log(` Using Bun runtime: ${bunPath}`); console.log(` Using Bun runtime: ${bunPath}`);
console.log(` Worker service: ${workerServicePath}`); console.log(` Worker service: ${workerServicePath}`);
try {
// Build hook commands for all mapped events // Build hook commands for all mapped events
const hooksConfig: GeminiHooksConfig = {}; const hooksConfig: GeminiHooksConfig = {};
for (const geminiEvent of Object.keys(GEMINI_EVENT_TO_INTERNAL_EVENT)) { for (const geminiEvent of Object.keys(GEMINI_EVENT_TO_INTERNAL_EVENT)) {
@@ -302,7 +303,6 @@ export async function installGeminiCliHooks(): Promise<number> {
const existingSettings = readGeminiSettings(); const existingSettings = readGeminiSettings();
const mergedSettings = mergeHooksIntoSettings(existingSettings, hooksConfig); const mergedSettings = mergeHooksIntoSettings(existingSettings, hooksConfig);
try {
writeGeminiHooksAndSetupContext(mergedSettings); writeGeminiHooksAndSetupContext(mergedSettings);
return 0; return 0;
} catch (error) { } catch (error) {
@@ -358,6 +358,7 @@ export function uninstallGeminiCliHooks(): number {
return 0; return 0;
} }
try {
const settings = readGeminiSettings(); const settings = readGeminiSettings();
if (!settings.hooks) { if (!settings.hooks) {
console.log(' No hooks found in Gemini CLI settings — nothing to uninstall.'); console.log(' No hooks found in Gemini CLI settings — nothing to uninstall.');
@@ -388,7 +389,6 @@ export function uninstallGeminiCliHooks(): number {
delete settings.hooks; delete settings.hooks;
} }
try {
writeSettingsAndCleanupGeminiContext(settings, removedCount); writeSettingsAndCleanupGeminiContext(settings, removedCount);
return 0; return 0;
} catch (error) { } catch (error) {
+4 -3
View File
@@ -288,9 +288,9 @@ export async function installGooseMcpIntegration(): Promise<number> {
const configPath = getGooseConfigPath(); const configPath = getGooseConfigPath();
const configDirectory = path.dirname(configPath); const configDirectory = path.dirname(configPath);
mkdirSync(configDirectory, { recursive: true });
try { try {
mkdirSync(configDirectory, { recursive: true });
mergeGooseYamlConfig(configPath, mcpServerPath); mergeGooseYamlConfig(configPath, mcpServerPath);
return 0; return 0;
} catch (error) { } catch (error) {
@@ -308,9 +308,10 @@ function mergeGooseYamlConfig(configPath: string, mcpServerPath: string): void {
const claudeMemPattern = /( {2}claude-mem:\n(?:.*\n)*?(?= {2}\S|\n\n|^\S|$))/m; const claudeMemPattern = /( {2}claude-mem:\n(?:.*\n)*?(?= {2}\S|\n\n|^\S|$))/m;
const newEntry = buildGooseClaudeMemEntryYaml(mcpServerPath) + '\n'; const newEntry = buildGooseClaudeMemEntryYaml(mcpServerPath) + '\n';
if (claudeMemPattern.test(yamlContent)) { if (!claudeMemPattern.test(yamlContent)) {
yamlContent = yamlContent.replace(claudeMemPattern, newEntry); throw new Error('Found mcpServers/claude-mem markers but could not locate a replaceable claude-mem block');
} }
yamlContent = yamlContent.replace(claudeMemPattern, newEntry);
writeFileSync(configPath, yamlContent); writeFileSync(configPath, yamlContent);
console.log(` Updated existing claude-mem entry in: ${configPath}`); console.log(` Updated existing claude-mem entry in: ${configPath}`);
} else if (yamlContent.includes('mcpServers:')) { } else if (yamlContent.includes('mcpServers:')) {
@@ -147,12 +147,9 @@ function readOpenClawConfig(): Record<string, any> {
try { try {
return JSON.parse(readFileSync(configFilePath, 'utf-8')); return JSON.parse(readFileSync(configFilePath, 'utf-8'));
} catch (error) { } catch (error) {
if (error instanceof Error) { const normalizedError = error instanceof Error ? error : new Error(String(error));
logger.error('WORKER', 'Failed to parse openclaw.json, using empty config', { path: configFilePath }, error); logger.error('WORKER', 'Failed to parse openclaw.json', { path: configFilePath }, normalizedError);
} else { throw normalizedError;
logger.error('WORKER', 'Failed to parse openclaw.json, using empty config', { path: configFilePath }, new Error(String(error)));
}
return {};
} }
} }
@@ -255,9 +252,6 @@ export function installOpenClawPlugin(): number {
const extensionDirectory = getOpenClawClaudeMemExtensionDirectory(); const extensionDirectory = getOpenClawClaudeMemExtensionDirectory();
const destinationDistDirectory = path.join(extensionDirectory, 'dist'); const destinationDistDirectory = path.join(extensionDirectory, 'dist');
// Create the extension directory structure
mkdirSync(destinationDistDirectory, { recursive: true });
// Locate optional assets before entering the try block // Locate optional assets before entering the try block
const manifestPath = findPluginManifestPath(); const manifestPath = findPluginManifestPath();
const skillsDirectory = findPluginSkillsDirectory(); const skillsDirectory = findPluginSkillsDirectory();
@@ -271,6 +265,8 @@ export function installOpenClawPlugin(): number {
}; };
try { try {
// Create the extension directory structure inside try to catch EACCES/ENOSPC
mkdirSync(destinationDistDirectory, { recursive: true });
copyPluginFilesAndRegister(preBuiltDistDirectory, destinationDistDirectory, extensionDirectory, manifestPath, skillsDirectory, extensionPackageJson); copyPluginFilesAndRegister(preBuiltDistDirectory, destinationDistDirectory, extensionDirectory, manifestPath, skillsDirectory, extensionPackageJson);
return 0; return 0;
} catch (error) { } catch (error) {
@@ -441,11 +441,13 @@ export function uninstallWindsurfHooks(): number {
} }
function removeClaudeMemHookEntries(): void { function removeClaudeMemHookEntries(): void {
const config: WindsurfHooksJson = JSON.parse(readFileSync(WINDSURF_HOOKS_JSON_PATH, 'utf-8')); const parsed = JSON.parse(readFileSync(WINDSURF_HOOKS_JSON_PATH, 'utf-8')) as Partial<WindsurfHooksJson>;
const config: WindsurfHooksJson = { hooks: parsed.hooks ?? {} };
for (const eventName of WINDSURF_HOOK_EVENTS) { for (const eventName of WINDSURF_HOOK_EVENTS) {
if (config.hooks[eventName]) { const eventHooks = config.hooks[eventName] ?? [];
config.hooks[eventName] = config.hooks[eventName].filter( if (eventHooks.length > 0) {
config.hooks[eventName] = eventHooks.filter(
(hook) => !hook.command.includes('worker-service') || !hook.command.includes('windsurf'), (hook) => !hook.command.includes('worker-service') || !hook.command.includes('windsurf'),
); );
if (config.hooks[eventName].length === 0) { if (config.hooks[eventName].length === 0) {
@@ -487,21 +489,18 @@ export function checkWindsurfHooksStatus(): number {
console.log(`User-level: Installed`); console.log(`User-level: Installed`);
console.log(` Config: ${WINDSURF_HOOKS_JSON_PATH}`); console.log(` Config: ${WINDSURF_HOOKS_JSON_PATH}`);
let parsedConfig: WindsurfHooksJson | null = null; let parsedConfig: Partial<WindsurfHooksJson> | null = null;
try { try {
parsedConfig = JSON.parse(readFileSync(WINDSURF_HOOKS_JSON_PATH, 'utf-8')); parsedConfig = JSON.parse(readFileSync(WINDSURF_HOOKS_JSON_PATH, 'utf-8'));
} catch (error) { } catch (error) {
if (error instanceof Error) { const normalizedError = error instanceof Error ? error : new Error(String(error));
logger.error('WORKER', 'Unable to parse hooks.json', { path: WINDSURF_HOOKS_JSON_PATH }, error); logger.error('WORKER', 'Unable to parse hooks.json', { path: WINDSURF_HOOKS_JSON_PATH }, normalizedError);
} else {
logger.error('WORKER', 'Unable to parse hooks.json', { path: WINDSURF_HOOKS_JSON_PATH }, new Error(String(error)));
}
console.log(` Mode: Unable to parse hooks.json`); console.log(` Mode: Unable to parse hooks.json`);
} }
if (parsedConfig) { if (parsedConfig) {
const registeredEvents = WINDSURF_HOOK_EVENTS.filter( const registeredEvents = WINDSURF_HOOK_EVENTS.filter(
(event) => parsedConfig!.hooks[event]?.some( (event) => (parsedConfig?.hooks?.[event] ?? []).some(
(hook) => hook.command.includes('worker-service') && hook.command.includes('windsurf') (hook) => hook.command.includes('worker-service') && hook.command.includes('windsurf')
) )
); );
+4 -2
View File
@@ -112,7 +112,8 @@ export function getTimelineAroundObservation(
startEpoch = beforeRecords.length > 0 ? beforeRecords[beforeRecords.length - 1].created_at_epoch : anchorEpoch; startEpoch = beforeRecords.length > 0 ? beforeRecords[beforeRecords.length - 1].created_at_epoch : anchorEpoch;
endEpoch = afterRecords.length > 0 ? afterRecords[afterRecords.length - 1].created_at_epoch : anchorEpoch; endEpoch = afterRecords.length > 0 ? afterRecords[afterRecords.length - 1].created_at_epoch : anchorEpoch;
} catch (err) { } catch (err) {
logger.error('DB', 'Error getting boundary observations', undefined, { error: err instanceof Error ? err : new Error(String(err)), project }); const normalizedError = err instanceof Error ? err : new Error(String(err));
logger.error('DB', 'Error getting boundary observations', { project }, normalizedError);
return { observations: [], sessions: [], prompts: [] }; return { observations: [], sessions: [], prompts: [] };
} }
} else { } else {
@@ -144,7 +145,8 @@ export function getTimelineAroundObservation(
startEpoch = beforeRecords.length > 0 ? beforeRecords[beforeRecords.length - 1].created_at_epoch : anchorEpoch; startEpoch = beforeRecords.length > 0 ? beforeRecords[beforeRecords.length - 1].created_at_epoch : anchorEpoch;
endEpoch = afterRecords.length > 0 ? afterRecords[afterRecords.length - 1].created_at_epoch : anchorEpoch; endEpoch = afterRecords.length > 0 ? afterRecords[afterRecords.length - 1].created_at_epoch : anchorEpoch;
} catch (err) { } catch (err) {
logger.error('DB', 'Error getting boundary timestamps', undefined, { error: err instanceof Error ? err : new Error(String(err)), project }); const normalizedError = err instanceof Error ? err : new Error(String(err));
logger.error('DB', 'Error getting boundary timestamps', { project }, normalizedError);
return { observations: [], sessions: [], prompts: [] }; return { observations: [], sessions: [], prompts: [] };
} }
} }
+9 -3
View File
@@ -809,7 +809,7 @@ export class ChromaSync {
// chroma_query_documents returns nested arrays (one per query text) // chroma_query_documents returns nested arrays (one per query text)
// We always pass a single query text, so we access [0] // We always pass a single query text, so we access [0]
const ids: number[] = []; const ids: number[] = [];
const seen = new Set<number>(); const seen = new Set<string>();
const docIds = results?.ids?.[0] || []; const docIds = results?.ids?.[0] || [];
const rawMetadatas = results?.metadatas?.[0] || []; const rawMetadatas = results?.metadatas?.[0] || [];
const rawDistances = results?.distances?.[0] || []; const rawDistances = results?.distances?.[0] || [];
@@ -828,16 +828,22 @@ export class ChromaSync {
const promptMatch = docId.match(/prompt_(\d+)/); const promptMatch = docId.match(/prompt_(\d+)/);
let sqliteId: number | null = null; let sqliteId: number | null = null;
let entityType: string | null = null;
if (obsMatch) { if (obsMatch) {
sqliteId = parseInt(obsMatch[1], 10); sqliteId = parseInt(obsMatch[1], 10);
entityType = 'observation';
} else if (summaryMatch) { } else if (summaryMatch) {
sqliteId = parseInt(summaryMatch[1], 10); sqliteId = parseInt(summaryMatch[1], 10);
entityType = 'session_summary';
} else if (promptMatch) { } else if (promptMatch) {
sqliteId = parseInt(promptMatch[1], 10); sqliteId = parseInt(promptMatch[1], 10);
entityType = 'user_prompt';
} }
if (sqliteId !== null && !seen.has(sqliteId)) { if (sqliteId !== null && entityType) {
seen.add(sqliteId); const dedupeKey = `${entityType}:${sqliteId}`;
if (seen.has(dedupeKey)) continue;
seen.add(dedupeKey);
ids.push(sqliteId); ids.push(sqliteId);
metadatas.push(rawMetadatas[i] ?? null); metadatas.push(rawMetadatas[i] ?? null);
distances.push(rawDistances[i] ?? 0); distances.push(rawDistances[i] ?? 0);
+8 -1
View File
@@ -131,6 +131,8 @@ export function loadClaudeMemEnv(): ClaudeMemEnv {
* Save credentials to ~/.claude-mem/.env * Save credentials to ~/.claude-mem/.env
*/ */
export function saveClaudeMemEnv(env: ClaudeMemEnv): void { export function saveClaudeMemEnv(env: ClaudeMemEnv): void {
let existing: Record<string, string> = {};
try {
// Ensure directory exists with restricted permissions (owner only) // Ensure directory exists with restricted permissions (owner only)
if (!existsSync(DATA_DIR)) { if (!existsSync(DATA_DIR)) {
mkdirSync(DATA_DIR, { recursive: true, mode: 0o700 }); mkdirSync(DATA_DIR, { recursive: true, mode: 0o700 });
@@ -140,9 +142,14 @@ export function saveClaudeMemEnv(env: ClaudeMemEnv): void {
chmodSync(DATA_DIR, 0o700); chmodSync(DATA_DIR, 0o700);
// Load existing to preserve any extra keys // Load existing to preserve any extra keys
const existing = existsSync(ENV_FILE_PATH) existing = existsSync(ENV_FILE_PATH)
? parseEnvFile(readFileSync(ENV_FILE_PATH, 'utf-8')) ? parseEnvFile(readFileSync(ENV_FILE_PATH, 'utf-8'))
: {}; : {};
} catch (error) {
const normalizedError = error instanceof Error ? error : new Error(String(error));
logger.error('ENV', 'Failed to set up env directory or read existing env', {}, normalizedError);
throw normalizedError;
}
// Update with new values // Update with new values
const updated: Record<string, string> = { ...existing }; const updated: Record<string, string> = { ...existing };