Merge main into feat/chroma-http-server

Resolve conflicts between Chroma HTTP server PR and main branch changes
(folder CLAUDE.md, exclusion settings, Zscaler SSL, transport cleanup).

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Alex Newman
2026-02-13 21:02:54 -05:00
257 changed files with 18546 additions and 5184 deletions
-2
View File
@@ -1,8 +1,6 @@
<claude-mem-context>
# Recent Activity
<!-- This section is auto-generated by claude-mem. Edit content outside the tags. -->
### Dec 10, 2025
| ID | Time | T | Title | Read |
-7
View File
@@ -1,7 +0,0 @@
<claude-mem-context>
# Recent Activity
<!-- This section is auto-generated by claude-mem. Edit content outside the tags. -->
*No recent activity*
</claude-mem-context>
+3 -2
View File
@@ -5,10 +5,10 @@
*/
import path from 'path';
import { homedir } from 'os';
import { existsSync, readFileSync } from 'fs';
import { SessionStore } from '../sqlite/SessionStore.js';
import { logger } from '../../utils/logger.js';
import { CLAUDE_CONFIG_DIR } from '../../shared/paths.js';
import type {
ContextConfig,
Observation,
@@ -203,7 +203,8 @@ export function getPriorSessionMessages(
const priorSessionId = priorSessionObs.memory_session_id;
const dashedCwd = cwdToDashed(cwd);
const transcriptPath = path.join(homedir(), '.claude', 'projects', dashedCwd, `${priorSessionId}.jsonl`);
// Use CLAUDE_CONFIG_DIR to support custom Claude config directories
const transcriptPath = path.join(CLAUDE_CONFIG_DIR, 'projects', dashedCwd, `${priorSessionId}.jsonl`);
return extractPriorMessages(transcriptPath);
}
-65
View File
@@ -1,65 +0,0 @@
<claude-mem-context>
# Recent Activity
<!-- This section is auto-generated by claude-mem. Edit content outside the tags. -->
### Jan 3, 2026
**MarkdownFormatter.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #36562 | 9:49 PM | 🟣 | Phase 4 Context Generation Tests Completed | ~524 |
| #36561 | " | 🟣 | Phase 4 Context Generation Test Suite Completion | ~606 |
| #36557 | 9:47 PM | 🟣 | MarkdownFormatter Test Suite Created | ~520 |
| #36553 | 9:43 PM | 🔵 | MarkdownFormatter Rendering Functions | ~445 |
| #36552 | " | 🔵 | Context Generation API Documentation for Phase 4 | ~496 |
| #36292 | 8:04 PM | 🔄 | Phase 4 Module Inventory: 12 Files Created in Context Architecture | ~571 |
**ColorFormatter.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #36390 | 8:50 PM | 🔄 | Comprehensive Monolith Refactor with Modular Architecture | ~724 |
### Jan 4, 2026
**ColorFormatter.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #36949 | 2:45 AM | 🟣 | Added Timestamp to Empty State Context Header | ~268 |
| #36947 | 2:44 AM | 🔵 | ColorFormatter Header Rendering Location Found | ~235 |
| #36946 | " | 🟣 | Context Header Timestamp Display | ~322 |
| #36944 | " | 🔵 | ColorFormatter Architecture - Terminal Context Display | ~374 |
**MarkdownFormatter.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #36948 | 2:44 AM | 🔴 | Add Timestamp to Empty State Context Header | ~270 |
| #36945 | " | 🟣 | Context Header Now Displays Current Date and Time | ~303 |
| #36943 | 2:43 AM | 🔵 | MarkdownFormatter Structure for Context Injection | ~346 |
| #36942 | " | 🔵 | Recent Context Feature Architecture | ~300 |
### Jan 5, 2026
**ColorFormatter.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #38048 | 9:45 PM | 🔴 | PR #558 - Comprehensive Bug Fix and Test Quality Improvement | ~585 |
| #37582 | 4:53 PM | 🔴 | Updated ColorFormatter Second mem-search Reference - Phase 2 Complete | ~398 |
| #37581 | " | 🔴 | Updated ColorFormatter First mem-search Reference | ~362 |
| #37577 | 4:52 PM | 🔵 | ColorFormatter Contains Outdated mem-search References | ~395 |
| #37530 | 4:43 PM | 🔵 | Issue #544 Confirmed in ColorFormatter Second Location | ~344 |
**MarkdownFormatter.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #37617 | 5:32 PM | ⚖️ | PR #558 Review Requirements Categorized by Priority | ~637 |
| #37613 | 5:31 PM | 🔵 | PR #558 Review Feedback Analysis | ~544 |
| #37586 | 4:54 PM | 🔴 | Phase 2 Committed - mem-search Hint Messages Fixed | ~375 |
| #37583 | 4:53 PM | 🔴 | Phase 2 Complete - All mem-search References Updated | ~394 |
| #37580 | " | 🔴 | Updated MarkdownFormatter Second mem-search Reference | ~360 |
| #37579 | " | 🔴 | Updated MarkdownFormatter First mem-search Reference | ~350 |
| #37576 | 4:52 PM | 🔵 | MarkdownFormatter Contains Outdated mem-search References | ~372 |
| #37555 | 4:49 PM | 🔵 | Issue #544 Message Locations and Fix Pattern Documented | ~463 |
| #37545 | 4:47 PM | ✅ | Issue #544 Analysis Report Created for mem-search Skill Messaging Problem | ~480 |
| #37529 | 4:42 PM | 🔵 | Issue #544 Misleading mem-search Skill Reference Located | ~368 |
</claude-mem-context>
-26
View File
@@ -1,26 +0,0 @@
<claude-mem-context>
# Recent Activity
<!-- This section is auto-generated by claude-mem. Edit content outside the tags. -->
### Jan 3, 2026
**FooterRenderer.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #36390 | 8:50 PM | 🔄 | Comprehensive Monolith Refactor with Modular Architecture | ~724 |
| #36283 | 8:02 PM | 🔄 | Phase 4: FooterRenderer Extracted with Conditional Display Logic | ~464 |
**TimelineRenderer.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #36292 | 8:04 PM | 🔄 | Phase 4 Module Inventory: 12 Files Created in Context Architecture | ~571 |
| #36281 | 8:01 PM | 🔄 | Phase 4: TimelineRenderer Extracted with Dual Format Support | ~531 |
### Jan 5, 2026
**FooterRenderer.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #37545 | 4:47 PM | ✅ | Issue #544 Analysis Report Created for mem-search Skill Messaging Problem | ~480 |
</claude-mem-context>
-37
View File
@@ -1,37 +0,0 @@
<claude-mem-context>
# Recent Activity
<!-- This section is auto-generated by claude-mem. Edit content outside the tags. -->
### Jan 4, 2026
**FolderDiscovery.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #37021 | 4:59 PM | ✅ | Deleted Redundant Folder Index Service Directory | ~299 |
| #37011 | 4:50 PM | 🔵 | FolderDiscovery extracts folders from observations and applies depth, exclusion, and activity filters | ~433 |
**ClaudeMdGenerator.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #37012 | 4:51 PM | 🔵 | ClaudeMdGenerator writes tag-wrapped timeline markdown while preserving manual content | ~446 |
| #36981 | 4:25 PM | 🔵 | ClaudeMdGenerator creates and updates CLAUDE.md files with timeline content | ~336 |
**types.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #37010 | 4:50 PM | 🔵 | Type definitions specify folder-index configuration schema and timeline data structures | ~349 |
**FolderTimelineCompiler.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #37009 | 4:50 PM | 🔵 | FolderTimelineCompiler queries database and groups activity chronologically by date | ~419 |
| #37002 | 4:45 PM | 🔴 | Fixed session file deduplication and summary selection in FolderTimelineCompiler | ~306 |
| #37001 | " | 🔴 | Fixed FolderTimelineCompiler to generate concise summaries and deduplicate files | ~284 |
**FolderIndexOrchestrator.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #37008 | 4:50 PM | 🔵 | FolderIndexOrchestrator implements event-driven regeneration triggered by observation saves | ~418 |
| #36983 | 4:26 PM | 🔵 | FolderIndexOrchestrator coordinates automatic CLAUDE.md regeneration after observation saves | ~367 |
</claude-mem-context>
-2
View File
@@ -1,8 +1,6 @@
<claude-mem-context>
# Recent Activity
<!-- This section is auto-generated by claude-mem. Edit content outside the tags. -->
### Jan 4, 2026
| ID | Time | T | Title | Read |
+9 -6
View File
@@ -10,9 +10,9 @@
*/
import path from 'path';
import { homedir } from 'os';
import { readFileSync } from 'fs';
import { logger } from '../../utils/logger.js';
import { MARKETPLACE_ROOT } from '../../shared/paths.js';
/**
* Check if a port is in use by querying the health endpoint
@@ -29,17 +29,21 @@ export async function isPortInUse(port: number): Promise<boolean> {
}
/**
* Wait for the worker to become fully ready (passes readiness check)
* Wait for the worker HTTP server to become responsive (liveness check)
* Uses /api/health instead of /api/readiness because:
* - /api/health returns 200 as soon as HTTP server is listening
* - /api/readiness waits for full initialization (MCP connection can take 5+ minutes)
* See: https://github.com/thedotmack/claude-mem/issues/811
* @param port Worker port to check
* @param timeoutMs Maximum time to wait in milliseconds
* @returns true if worker became ready, false if timeout
* @returns true if worker became responsive, false if timeout
*/
export async function waitForHealth(port: number, timeoutMs: number = 30000): Promise<boolean> {
const start = Date.now();
while (Date.now() - start < timeoutMs) {
try {
// Note: Removed AbortSignal.timeout to avoid Windows Bun cleanup issue (libuv assertion)
const response = await fetch(`http://127.0.0.1:${port}/api/readiness`);
const response = await fetch(`http://127.0.0.1:${port}/api/health`);
if (response.ok) return true;
} catch (error) {
// [ANTI-PATTERN IGNORED]: Retry loop - expected failures during startup, will retry
@@ -96,8 +100,7 @@ export async function httpShutdown(port: number): Promise<boolean> {
* This is the "expected" version that should be running
*/
export function getInstalledPluginVersion(): string {
const marketplaceRoot = path.join(homedir(), '.claude', 'plugins', 'marketplaces', 'thedotmack');
const packageJsonPath = path.join(marketplaceRoot, 'package.json');
const packageJsonPath = path.join(MARKETPLACE_ROOT, 'package.json');
const packageJson = JSON.parse(readFileSync(packageJsonPath, 'utf-8'));
return packageJson.version;
}
+197 -49
View File
@@ -22,6 +22,17 @@ const execAsync = promisify(exec);
const DATA_DIR = path.join(homedir(), '.claude-mem');
const PID_FILE = path.join(DATA_DIR, 'worker.pid');
// Orphaned process cleanup patterns and thresholds
// These are claude-mem processes that can accumulate if not properly terminated
const ORPHAN_PROCESS_PATTERNS = [
'mcp-server.cjs', // Main MCP server process
'worker-service.cjs', // Background worker daemon
'chroma-mcp' // ChromaDB MCP subprocess
];
// Only kill processes older than this to avoid killing the current session
const ORPHAN_MAX_AGE_MINUTES = 30;
export interface PidInfo {
pid: number;
port: number;
@@ -66,7 +77,11 @@ export function removePidFile(): void {
}
/**
* Get platform-adjusted timeout (Windows socket cleanup is slower)
* Get platform-adjusted timeout for worker-side socket operations (2.0x on Windows).
*
* Note: Two platform multiplier functions exist intentionally:
* - getTimeout() in hook-constants.ts uses 1.5x for hook-side operations (fast path)
* - getPlatformTimeout() here uses 2.0x for worker-side socket operations (slower path)
*/
export function getPlatformTimeout(baseMs: number): number {
const WINDOWS_MULTIPLIER = 2.0;
@@ -90,7 +105,7 @@ export async function getChildProcesses(parentPid: number): Promise<number[]> {
try {
// PowerShell Get-Process instead of WMIC (deprecated in Windows 11)
const cmd = `powershell -NoProfile -NonInteractive -Command "Get-Process | Where-Object { \\$_.ParentProcessId -eq ${parentPid} } | Select-Object -ExpandProperty Id"`;
const cmd = `powershell -NoProfile -NonInteractive -Command "Get-Process | Where-Object { $_.ParentProcessId -eq ${parentPid} } | Select-Object -ExpandProperty Id"`;
const { stdout } = await execAsync(cmd, { timeout: HOOK_TIMEOUTS.POWERSHELL_COMMAND });
// PowerShell outputs just numbers (one per line), simpler than WMIC's "ProcessId=1234" format
return stdout
@@ -162,55 +177,119 @@ export async function waitForProcessesExit(pids: number[], timeoutMs: number): P
}
/**
* Clean up orphaned chroma-mcp processes from previous worker sessions
* Prevents process accumulation and memory leaks
* Parse process elapsed time from ps etime format: [[DD-]HH:]MM:SS
* Returns age in minutes, or -1 if parsing fails
*/
export function parseElapsedTime(etime: string): number {
if (!etime || etime.trim() === '') return -1;
const cleaned = etime.trim();
let totalMinutes = 0;
// DD-HH:MM:SS format
const dayMatch = cleaned.match(/^(\d+)-(\d+):(\d+):(\d+)$/);
if (dayMatch) {
totalMinutes = parseInt(dayMatch[1], 10) * 24 * 60 +
parseInt(dayMatch[2], 10) * 60 +
parseInt(dayMatch[3], 10);
return totalMinutes;
}
// HH:MM:SS format
const hourMatch = cleaned.match(/^(\d+):(\d+):(\d+)$/);
if (hourMatch) {
totalMinutes = parseInt(hourMatch[1], 10) * 60 + parseInt(hourMatch[2], 10);
return totalMinutes;
}
// MM:SS format
const minMatch = cleaned.match(/^(\d+):(\d+)$/);
if (minMatch) {
return parseInt(minMatch[1], 10);
}
return -1;
}
/**
* Clean up orphaned claude-mem processes from previous worker sessions
*
* Targets mcp-server.cjs, worker-service.cjs, and chroma-mcp processes
* that survived a previous daemon crash. Only kills processes older than
* ORPHAN_MAX_AGE_MINUTES to avoid killing the current session.
*
* The periodic ProcessRegistry reaper handles in-session orphans;
* this function handles cross-session orphans at startup.
*/
export async function cleanupOrphanedProcesses(): Promise<void> {
const isWindows = process.platform === 'win32';
const pids: number[] = [];
const currentPid = process.pid;
const pidsToKill: number[] = [];
try {
if (isWindows) {
// Windows: Use PowerShell Get-CimInstance instead of WMIC (deprecated in Windows 11)
const cmd = `powershell -NoProfile -NonInteractive -Command "Get-CimInstance Win32_Process | Where-Object { \\$_.Name -like '*python*' -and \\$_.CommandLine -like '*chroma-mcp*' } | Select-Object -ExpandProperty ProcessId"`;
// Windows: Use PowerShell Get-CimInstance with JSON output for age filtering
const patternConditions = ORPHAN_PROCESS_PATTERNS
.map(p => `$_.CommandLine -like '*${p}*'`)
.join(' -or ');
const cmd = `powershell -NoProfile -NonInteractive -Command "Get-CimInstance Win32_Process | Where-Object { (${patternConditions}) -and $_.ProcessId -ne ${currentPid} } | Select-Object ProcessId, CreationDate | ConvertTo-Json"`;
const { stdout } = await execAsync(cmd, { timeout: HOOK_TIMEOUTS.POWERSHELL_COMMAND });
if (!stdout.trim()) {
logger.debug('SYSTEM', 'No orphaned chroma-mcp processes found (Windows)');
if (!stdout.trim() || stdout.trim() === 'null') {
logger.debug('SYSTEM', 'No orphaned claude-mem processes found (Windows)');
return;
}
// PowerShell outputs just numbers (one per line), simpler than WMIC's "ProcessId=1234" format
const lines = stdout
.split('\n')
.map(line => line.trim())
.filter(line => line.length > 0 && /^\d+$/.test(line));
const processes = JSON.parse(stdout);
const processList = Array.isArray(processes) ? processes : [processes];
const now = Date.now();
for (const line of lines) {
const pid = parseInt(line, 10);
// SECURITY: Validate PID is positive integer before adding to list
if (!isNaN(pid) && Number.isInteger(pid) && pid > 0) {
pids.push(pid);
for (const proc of processList) {
const pid = proc.ProcessId;
// SECURITY: Validate PID is positive integer and not current process
if (!Number.isInteger(pid) || pid <= 0 || pid === currentPid) continue;
// Parse Windows WMI date format: /Date(1234567890123)/
const creationMatch = proc.CreationDate?.match(/\/Date\((\d+)\)\//);
if (creationMatch) {
const creationTime = parseInt(creationMatch[1], 10);
const ageMinutes = (now - creationTime) / (1000 * 60);
if (ageMinutes >= ORPHAN_MAX_AGE_MINUTES) {
pidsToKill.push(pid);
logger.debug('SYSTEM', 'Found orphaned process', { pid, ageMinutes: Math.round(ageMinutes) });
}
}
}
} else {
// Unix: Use ps aux | grep
const { stdout } = await execAsync('ps aux | grep "chroma-mcp" | grep -v grep || true');
// Unix: Use ps with elapsed time for age-based filtering
const patternRegex = ORPHAN_PROCESS_PATTERNS.join('|');
const { stdout } = await execAsync(
`ps -eo pid,etime,command | grep -E "${patternRegex}" | grep -v grep || true`
);
if (!stdout.trim()) {
logger.debug('SYSTEM', 'No orphaned chroma-mcp processes found (Unix)');
logger.debug('SYSTEM', 'No orphaned claude-mem processes found (Unix)');
return;
}
const lines = stdout.trim().split('\n');
for (const line of lines) {
const parts = line.trim().split(/\s+/);
if (parts.length > 1) {
const pid = parseInt(parts[1], 10);
// SECURITY: Validate PID is positive integer before adding to list
if (!isNaN(pid) && Number.isInteger(pid) && pid > 0) {
pids.push(pid);
}
// Parse: " 1234 01:23:45 /path/to/process"
const match = line.trim().match(/^(\d+)\s+(\S+)\s+(.*)$/);
if (!match) continue;
const pid = parseInt(match[1], 10);
const etime = match[2];
// SECURITY: Validate PID is positive integer and not current process
if (!Number.isInteger(pid) || pid <= 0 || pid === currentPid) continue;
const ageMinutes = parseElapsedTime(etime);
if (ageMinutes >= ORPHAN_MAX_AGE_MINUTES) {
pidsToKill.push(pid);
logger.debug('SYSTEM', 'Found orphaned process', { pid, ageMinutes, command: match[3].substring(0, 80) });
}
}
}
@@ -220,19 +299,20 @@ export async function cleanupOrphanedProcesses(): Promise<void> {
return;
}
if (pids.length === 0) {
if (pidsToKill.length === 0) {
return;
}
logger.info('SYSTEM', 'Cleaning up orphaned chroma-mcp processes', {
logger.info('SYSTEM', 'Cleaning up orphaned claude-mem processes', {
platform: isWindows ? 'Windows' : 'Unix',
count: pids.length,
pids
count: pidsToKill.length,
pids: pidsToKill,
maxAgeMinutes: ORPHAN_MAX_AGE_MINUTES
});
// Kill all found processes
if (isWindows) {
for (const pid of pids) {
for (const pid of pidsToKill) {
// SECURITY: Double-check PID validation before using in taskkill command
if (!Number.isInteger(pid) || pid <= 0) {
logger.warn('SYSTEM', 'Skipping invalid PID', { pid });
@@ -246,7 +326,7 @@ export async function cleanupOrphanedProcesses(): Promise<void> {
}
}
} else {
for (const pid of pids) {
for (const pid of pidsToKill) {
try {
process.kill(pid, 'SIGKILL');
} catch (error) {
@@ -256,16 +336,16 @@ export async function cleanupOrphanedProcesses(): Promise<void> {
}
}
logger.info('SYSTEM', 'Orphaned processes cleaned up', { count: pids.length });
logger.info('SYSTEM', 'Orphaned processes cleaned up', { count: pidsToKill.length });
}
/**
* Spawn a detached daemon process
* Returns the child PID or undefined if spawn failed
*
* On Windows, uses WMIC to spawn a truly independent process that
* survives parent exit without console popups. WMIC creates processes
* that are not associated with the parent's console.
* On Windows, uses PowerShell Start-Process with -WindowStyle Hidden to spawn
* a truly independent process without console popups. Unlike WMIC, PowerShell
* inherits environment variables from the parent process.
*
* On Unix, uses standard detached spawn.
*
@@ -285,28 +365,46 @@ export function spawnDaemon(
};
if (isWindows) {
// Use WMIC to spawn a process that's independent of the parent console
// This avoids the console popup that occurs with detached: true
// Paths must be individually quoted for WMIC when they contain spaces
// Use PowerShell Start-Process to spawn a hidden, independent process
// Unlike WMIC, PowerShell inherits environment variables from parent
// -WindowStyle Hidden prevents console popup
const execPath = process.execPath;
const script = scriptPath;
// WMIC command format: wmic process call create "\"path1\" \"path2\" args"
const command = `wmic process call create "\\"${execPath}\\" \\"${script}\\" --daemon"`;
const psCommand = `Start-Process -FilePath '${execPath}' -ArgumentList '${script}','--daemon' -WindowStyle Hidden`;
try {
execSync(command, {
execSync(`powershell -NoProfile -Command "${psCommand}"`, {
stdio: 'ignore',
windowsHide: true
windowsHide: true,
env
});
// WMIC returns immediately, we can't get the spawned PID easily
// Worker will write its own PID file after listen()
return 0;
} catch {
return undefined;
}
}
// Unix: standard detached spawn
// Unix: Use setsid to create a new session, fully detaching from the
// controlling terminal. This prevents SIGHUP from reaching the daemon
// even if the in-process SIGHUP handler somehow fails (belt-and-suspenders).
// Fall back to standard detached spawn if setsid is not available.
const setsidPath = '/usr/bin/setsid';
if (existsSync(setsidPath)) {
const child = spawn(setsidPath, [process.execPath, scriptPath, '--daemon'], {
detached: true,
stdio: 'ignore',
env
});
if (child.pid === undefined) {
return undefined;
}
child.unref();
return child.pid;
}
// Fallback: standard detached spawn (macOS, systems without setsid)
const child = spawn(process.execPath, [scriptPath, '--daemon'], {
detached: true,
stdio: 'ignore',
@@ -322,6 +420,56 @@ export function spawnDaemon(
return child.pid;
}
/**
* Check if a process with the given PID is alive.
*
* Uses the process.kill(pid, 0) idiom: signal 0 doesn't send a signal,
* it just checks if the process exists and is reachable.
*
* EPERM is treated as "alive" because it means the process exists but
* belongs to a different user/session (common in multi-user setups).
* PID 0 (Windows WMIC sentinel for unknown PID) is treated as alive.
*/
export function isProcessAlive(pid: number): boolean {
// PID 0 is the Windows WMIC sentinel value — process was spawned but PID unknown
if (pid === 0) return true;
// Invalid PIDs are not alive
if (!Number.isInteger(pid) || pid < 0) return false;
try {
process.kill(pid, 0);
return true;
} catch (error: unknown) {
const code = (error as NodeJS.ErrnoException).code;
// EPERM = process exists but different user/session — treat as alive
if (code === 'EPERM') return true;
// ESRCH = no such process — it's dead
return false;
}
}
/**
* Read the PID file and remove it if the recorded process is dead (stale).
*
* This is a cheap operation: one filesystem read + one signal-0 check.
* Called at the top of ensureWorkerStarted() to clean up after WSL2
* hibernate, OOM kills, or other ungraceful worker deaths.
*/
export function cleanStalePidFile(): void {
const pidInfo = readPidFile();
if (!pidInfo) return;
if (!isProcessAlive(pidInfo.pid)) {
logger.info('SYSTEM', 'Removing stale PID file (worker process is dead)', {
pid: pidInfo.pid,
port: pidInfo.port,
startedAt: pidInfo.startedAt
});
removePidFile();
}
}
/**
* Create signal handler factory for graceful shutdown
* Returns a handler function that can be passed to process.on('SIGTERM') etc.
-7
View File
@@ -1,7 +0,0 @@
<claude-mem-context>
# Recent Activity
<!-- This section is auto-generated by claude-mem. Edit content outside the tags. -->
*No recent activity*
</claude-mem-context>
@@ -16,6 +16,7 @@ import { exec } from 'child_process';
import { promisify } from 'util';
import { logger } from '../../utils/logger.js';
import { getWorkerPort } from '../../shared/worker-utils.js';
import { DATA_DIR, MARKETPLACE_ROOT, CLAUDE_CONFIG_DIR } from '../../shared/paths.js';
import {
readCursorRegistry as readCursorRegistryFromFile,
writeCursorRegistry as writeCursorRegistryToFile,
@@ -27,7 +28,6 @@ import type { CursorInstallTarget, CursorHooksJson, CursorMcpConfig, Platform }
const execAsync = promisify(exec);
// Standard paths
const DATA_DIR = path.join(homedir(), '.claude-mem');
const CURSOR_REGISTRY_FILE = path.join(DATA_DIR, 'cursor-projects.json');
// ============================================================================
@@ -128,12 +128,12 @@ export async function updateCursorContextForProject(projectName: string, port: n
/**
* Find cursor-hooks directory
* Searches in order: marketplace install, source repo
* Checks for both bash (common.sh) and PowerShell (common.ps1) scripts
* Checks for hooks.json (unified CLI mode) or legacy shell scripts
*/
export function findCursorHooksDir(): string | null {
const possiblePaths = [
// Marketplace install location
path.join(homedir(), '.claude', 'plugins', 'marketplaces', 'thedotmack', 'cursor-hooks'),
path.join(MARKETPLACE_ROOT, 'cursor-hooks'),
// Development/source location (relative to built worker-service.cjs in plugin/scripts/)
path.join(path.dirname(__filename), '..', '..', 'cursor-hooks'),
// Alternative dev location
@@ -141,8 +141,10 @@ export function findCursorHooksDir(): string | null {
];
for (const p of possiblePaths) {
// Check for either bash or PowerShell common script
if (existsSync(path.join(p, 'common.sh')) || existsSync(path.join(p, 'common.ps1'))) {
// Check for hooks.json (unified CLI mode) or legacy shell scripts
if (existsSync(path.join(p, 'hooks.json')) ||
existsSync(path.join(p, 'common.sh')) ||
existsSync(path.join(p, 'common.ps1'))) {
return p;
}
}
@@ -156,7 +158,7 @@ export function findCursorHooksDir(): string | null {
export function findMcpServerPath(): string | null {
const possiblePaths = [
// Marketplace install location
path.join(homedir(), '.claude', 'plugins', 'marketplaces', 'thedotmack', 'plugin', 'scripts', 'mcp-server.cjs'),
path.join(MARKETPLACE_ROOT, 'plugin', 'scripts', 'mcp-server.cjs'),
// Development/source location (relative to built worker-service.cjs in plugin/scripts/)
path.join(path.dirname(__filename), 'mcp-server.cjs'),
// Alternative dev location
@@ -178,7 +180,7 @@ export function findMcpServerPath(): string | null {
export function findWorkerServicePath(): string | null {
const possiblePaths = [
// Marketplace install location
path.join(homedir(), '.claude', 'plugins', 'marketplaces', 'thedotmack', 'plugin', 'scripts', 'worker-service.cjs'),
path.join(MARKETPLACE_ROOT, 'plugin', 'scripts', 'worker-service.cjs'),
// Development/source location (relative to built worker-service.cjs in plugin/scripts/)
path.join(path.dirname(__filename), 'worker-service.cjs'),
// Alternative dev location
@@ -193,6 +195,37 @@ export function findWorkerServicePath(): string | null {
return null;
}
/**
* Find the Bun executable path
* Required because worker-service.cjs uses bun:sqlite which is Bun-specific
* Searches common installation locations across platforms
*/
export function findBunPath(): string {
const possiblePaths = [
// Standard user install location (most common)
path.join(homedir(), '.bun', 'bin', 'bun'),
// Global install locations
'/usr/local/bin/bun',
'/usr/bin/bun',
// Windows locations
...(process.platform === 'win32' ? [
path.join(homedir(), '.bun', 'bin', 'bun.exe'),
path.join(process.env.LOCALAPPDATA || '', 'bun', 'bun.exe'),
] : []),
];
for (const p of possiblePaths) {
if (p && existsSync(p)) {
return p;
}
}
// Fallback to 'bun' and hope it's in PATH
// This allows the installation to proceed even if we can't find bun
// The user will get a clear error when the hook runs if bun isn't available
return 'bun';
}
/**
* Get the target directory for Cursor hooks based on install target
*/
@@ -312,15 +345,21 @@ export async function installCursorHooks(_sourceDir: string, target: CursorInsta
// Generate hooks.json with unified CLI commands
const hooksJsonPath = path.join(targetDir, 'hooks.json');
// Find bun executable - required because worker-service.cjs uses bun:sqlite
const bunPath = findBunPath();
const escapedBunPath = bunPath.replace(/\\/g, '\\\\');
// Use the absolute path to worker-service.cjs
// Escape backslashes for JSON on Windows
const escapedWorkerPath = workerServicePath.replace(/\\/g, '\\\\');
// Helper to create hook command using unified CLI
// Helper to create hook command using unified CLI with bun runtime
const makeHookCommand = (command: string) => {
return `node "${escapedWorkerPath}" hook cursor ${command}`;
return `"${escapedBunPath}" "${escapedWorkerPath}" hook cursor ${command}`;
};
console.log(` Using Bun runtime: ${bunPath}`);
const hooksJson: CursorHooksJson = {
version: 1,
hooks: {
@@ -356,7 +395,7 @@ export async function installCursorHooks(_sourceDir: string, target: CursorInsta
Installation complete!
Hooks installed to: ${targetDir}/hooks.json
Using unified CLI: node worker-service.cjs hook cursor <command>
Using unified CLI: bun worker-service.cjs hook cursor <command>
Next steps:
1. Start claude-mem worker: claude-mem start
@@ -532,7 +571,7 @@ export function checkCursorHooksStatus(): number {
const firstCommand = hooksContent?.hooks?.beforeSubmitPrompt?.[0]?.command || '';
if (firstCommand.includes('worker-service.cjs') && firstCommand.includes('hook cursor')) {
console.log(` Mode: Unified CLI (node worker-service.cjs)`);
console.log(` Mode: Unified CLI (bun worker-service.cjs)`);
} else {
// Detect legacy shell scripts
const bashScripts = ['session-init.sh', 'context-inject.sh', 'save-observation.sh'];
@@ -596,8 +635,8 @@ export async function detectClaudeCode(): Promise<boolean> {
logger.debug('SYSTEM', 'Claude CLI not in PATH', {}, error as Error);
}
// Check for Claude Code plugin directory
const pluginDir = path.join(homedir(), '.claude', 'plugins');
// Check for Claude Code plugin directory (respects CLAUDE_CONFIG_DIR)
const pluginDir = path.join(CLAUDE_CONFIG_DIR, 'plugins');
if (existsSync(pluginDir)) {
return true;
}
-97
View File
@@ -1,97 +0,0 @@
<claude-mem-context>
# Recent Activity
<!-- This section is auto-generated by claude-mem. Edit content outside the tags. -->
### Dec 17, 2025
**ProcessManager.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #28932 | 7:30 PM | 🔵 | ProcessManager Architecture and Platform-Specific Process Spawning | ~523 |
| #28929 | " | 🔵 | ProcessManager Usage Across Codebase | ~319 |
| #28747 | 6:25 PM | 🔵 | Branch Diff Analysis - 26 Files Modified | ~374 |
| #28730 | 6:21 PM | 🔵 | Worker Wrapper Solves Windows Zombie Port Problem | ~416 |
| #28729 | " | 🔵 | Windows Worker Wrapper Architecture | ~222 |
| #28721 | 6:18 PM | 🔵 | Final Solution - Worker Wrapper Architecture Successfully Deployed | ~474 |
| #28719 | " | 🔵 | Initial Windows Worker Problem Analysis - Three Interconnected Issues | ~564 |
| #28714 | 6:15 PM | 🔴 | Windows Zombie Port Problem Resolved with Wrapper Process Architecture | ~421 |
| #28711 | 6:13 PM | 🔵 | Overview of Changes Between main and HEAD Branch | ~347 |
| #28660 | 5:31 PM | 🔵 | Branch Modifies 26 Files with Net Addition of 346 Lines | ~445 |
| #28644 | 5:24 PM | ✅ | Modified 27 files with 693 additions and 239 deletions for Windows support | ~447 |
### Dec 18, 2025
**ProcessManager.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #29622 | 5:41 PM | 🔵 | Validation Patterns Across HTTP Routes and Core Services | ~488 |
### Dec 20, 2025
**ProcessManager.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #31066 | 7:53 PM | 🔵 | Comprehensive KISS Principle Audit of Hooks and Worker Services | ~788 |
| #31020 | 7:28 PM | 🔄 | Inlined single-use timeout constants in ProcessManager | ~390 |
| #31013 | 7:27 PM | 🔵 | Comment Analysis Identified Stale FTS5 References and Documentation Gaps | ~681 |
| #31012 | 7:25 PM | 🔴 | Silent Failure Review Identified Regression in getWorkerPort() Error Handling | ~659 |
| #31010 | " | ⚖️ | PR #400 Approved After Comprehensive Code Review | ~594 |
| #31000 | 7:22 PM | 🔄 | ProcessManager timeout constants inlined to literal values | ~356 |
| #30993 | 7:20 PM | 🔴 | ProcessManager getPidInfo() enhanced with error logging | ~290 |
| #30990 | 7:19 PM | 🔵 | PR 400 achieves net deletion of 395 lines across 31 files | ~338 |
| #30988 | " | 🔵 | PR 400 modifies 31 files across hooks, services, and utilities | ~316 |
| #30986 | " | 🔵 | PR #400 File Scope: 31 Files Across Hooks, Services, and Utilities | ~526 |
| #30953 | 7:02 PM | 🔄 | Removed Single-Use Timeout Constants in ProcessManager | ~306 |
| #30949 | " | 🔴 | Fixed undefined constant in ProcessManager waitForExit | ~245 |
| #30948 | 7:01 PM | 🔵 | Windows Process Shutdown Strategy in ProcessManager | ~302 |
| #30907 | 6:46 PM | 🔴 | ProcessManager PID File Corruption Now Logs Warnings | ~326 |
| #30905 | 6:45 PM | 🔴 | ProcessManager getPidInfo Error Visibility | ~333 |
| #30902 | " | 🔴 | Added logging to PID file error handling in ProcessManager | ~260 |
| #30901 | 6:44 PM | 🔵 | Windows Graceful Shutdown via HTTP and Wrapper IPC | ~269 |
| #30900 | " | 🔵 | Platform-Specific Worker Script Selection | ~262 |
| #30899 | " | 🔵 | getPidInfo Usage Pattern in ProcessManager | ~206 |
| #30898 | " | 🔵 | ProcessManager PID File Management Implementation | ~249 |
| #30774 | 5:58 PM | 🔵 | ProcessManager Handles Cross-Platform Worker Lifecycle with Windows Workarounds | ~559 |
| #32307 | 5:56 PM | 🔵 | Worker Service Code Audit: 14 Issues Found Across Validation, Data Structures, and Complexity | ~793 |
| #30673 | 5:08 PM | 🔴 | Windows Worker Stop/Restart Reliability Improvements | ~376 |
| #30663 | 5:07 PM | 🔵 | Cross-Platform Support Across 12 Files | ~307 |
### Dec 24, 2025
**ProcessManager.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #32071 | 3:24 PM | ⚖️ | Worker Startup Architecture Redesigned | ~380 |
| #32070 | " | 🔵 | ProcessManager Worker Spawning Architecture | ~428 |
| #32059 | 3:17 PM | ⚖️ | Worker Startup Refactored with File-Based Locking for Concurrent Hooks | ~552 |
### Dec 26, 2025
**ProcessManager.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #32855 | 7:04 PM | 🔄 | Consolidated worker process management into single service | ~322 |
| #32837 | 6:25 PM | 🔵 | Deleted ProcessManager.ts contained comprehensive PID file infrastructure | ~430 |
| #32814 | 6:05 PM | ✅ | Increased All Timeout Limits to Maximum Values for Slow Systems | ~385 |
### Dec 28, 2025
**ProcessManager.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #33370 | 3:47 PM | 🔵 | ToxMox Wrapper Architecture Deleted December 26, Six Days After Implementation | ~506 |
| #33369 | 3:46 PM | 🔵 | ToxMox December 17 Commit Introduced Wrapper Architecture to Fix Windows Zombie Port Bug | ~632 |
| #33368 | 3:45 PM | 🔵 | ToxMox December 20 Commit Improved Windows Worker Restart Reliability and Logging | ~487 |
| #33294 | 3:08 PM | ✅ | ToxMox Contributions Documented in Comprehensive Markdown Report | ~603 |
| #33284 | 3:07 PM | 🔄 | Consolidated Worker Lifecycle Management (-580 Lines) | ~327 |
| #33270 | 2:59 PM | ⚖️ | Self-Spawn Pattern Chosen for Worker Lifecycle | ~418 |
### Jan 6, 2026
**ProcessManager.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #38108 | 12:15 AM | 🔵 | Complete Windows Zombie Port Bug Technical Deep Dive | ~935 |
| #38105 | 12:14 AM | 🔵 | Windows Console Popup Flash Issue Documented and Fixed | ~455 |
</claude-mem-context>
-7
View File
@@ -1,7 +0,0 @@
<claude-mem-context>
# Recent Activity
<!-- This section is auto-generated by claude-mem. Edit content outside the tags. -->
*No recent activity*
</claude-mem-context>
+58 -7
View File
@@ -3,6 +3,15 @@ import { PendingMessageStore, PersistentPendingMessage } from '../sqlite/Pending
import type { PendingMessageWithId } from '../worker-types.js';
import { logger } from '../../utils/logger.js';
const IDLE_TIMEOUT_MS = 3 * 60 * 1000; // 3 minutes
export interface CreateIteratorOptions {
sessionDbId: number;
signal: AbortSignal;
/** Called when idle timeout occurs - should trigger abort to kill subprocess */
onIdleTimeout?: () => void;
}
export class SessionQueueProcessor {
constructor(
private store: PendingMessageStore,
@@ -14,8 +23,15 @@ export class SessionQueueProcessor {
* Uses atomic claim-and-delete to prevent duplicates.
* The queue is a pure buffer: claim it, delete it, process in memory.
* Waits for 'message' event when queue is empty.
*
* CRITICAL: Calls onIdleTimeout callback after 3 minutes of inactivity.
* The callback should trigger abortController.abort() to kill the SDK subprocess.
* Just returning from the iterator is NOT enough - the subprocess stays alive!
*/
async *createIterator(sessionDbId: number, signal: AbortSignal): AsyncIterableIterator<PendingMessageWithId> {
async *createIterator(options: CreateIteratorOptions): AsyncIterableIterator<PendingMessageWithId> {
const { sessionDbId, signal, onIdleTimeout } = options;
let lastActivityTime = Date.now();
while (!signal.aborted) {
try {
// Atomically claim AND DELETE next message from DB
@@ -23,11 +39,29 @@ export class SessionQueueProcessor {
const persistentMessage = this.store.claimAndDelete(sessionDbId);
if (persistentMessage) {
// Reset activity time when we successfully yield a message
lastActivityTime = Date.now();
// Yield the message for processing (it's already deleted from queue)
yield this.toPendingMessageWithId(persistentMessage);
} else {
// Queue empty - wait for wake-up event
await this.waitForMessage(signal);
// Queue empty - wait for wake-up event or timeout
const receivedMessage = await this.waitForMessage(signal, IDLE_TIMEOUT_MS);
if (!receivedMessage && !signal.aborted) {
// Timeout occurred - check if we've been idle too long
const idleDuration = Date.now() - lastActivityTime;
if (idleDuration >= IDLE_TIMEOUT_MS) {
logger.info('SESSION', 'Idle timeout reached, triggering abort to kill subprocess', {
sessionDbId,
idleDurationMs: idleDuration,
thresholdMs: IDLE_TIMEOUT_MS
});
onIdleTimeout?.();
return;
}
// Reset timer on spurious wakeup - queue is empty but duration check failed
lastActivityTime = Date.now();
}
}
} catch (error) {
if (signal.aborted) return;
@@ -47,25 +81,42 @@ export class SessionQueueProcessor {
};
}
private waitForMessage(signal: AbortSignal): Promise<void> {
return new Promise<void>((resolve) => {
/**
* Wait for a message event or timeout.
* @param signal - AbortSignal to cancel waiting
* @param timeoutMs - Maximum time to wait before returning
* @returns true if a message was received, false if timeout occurred
*/
private waitForMessage(signal: AbortSignal, timeoutMs: number = IDLE_TIMEOUT_MS): Promise<boolean> {
return new Promise<boolean>((resolve) => {
let timeoutId: ReturnType<typeof setTimeout> | undefined;
const onMessage = () => {
cleanup();
resolve();
resolve(true); // Message received
};
const onAbort = () => {
cleanup();
resolve(); // Resolve to let the loop check signal.aborted and exit
resolve(false); // Aborted, let loop check signal.aborted
};
const onTimeout = () => {
cleanup();
resolve(false); // Timeout occurred
};
const cleanup = () => {
if (timeoutId !== undefined) {
clearTimeout(timeoutId);
}
this.events.off('message', onMessage);
signal.removeEventListener('abort', onAbort);
};
this.events.once('message', onMessage);
signal.addEventListener('abort', onAbort, { once: true });
timeoutId = setTimeout(onTimeout, timeoutMs);
});
}
}
-7
View File
@@ -1,7 +0,0 @@
<claude-mem-context>
# Recent Activity
<!-- This section is auto-generated by claude-mem. Edit content outside the tags. -->
*No recent activity*
</claude-mem-context>
+21 -4
View File
@@ -30,6 +30,19 @@ export interface RouteHandler {
setupRoutes(app: Application): void;
}
/**
* AI provider status for health endpoint
*/
export interface AiStatus {
provider: string;
authMethod: string;
lastInteraction: {
timestamp: number;
success: boolean;
error?: string;
} | null;
}
/**
* Options for initializing the server
*/
@@ -42,6 +55,10 @@ export interface ServerOptions {
onShutdown: () => Promise<void>;
/** Restart function for admin endpoints */
onRestart: () => Promise<void>;
/** Filesystem path to the worker entry point */
workerPath: string;
/** Callback to get current AI provider status */
getAiStatus: () => AiStatus;
}
/**
@@ -140,20 +157,20 @@ export class Server {
* Setup core system routes (health, readiness, version, admin)
*/
private setupCoreRoutes(): void {
// Test build ID for debugging which build is running
const TEST_BUILD_ID = 'TEST-008-wrapper-ipc';
// Health check endpoint - always responds, even during initialization
this.app.get('/api/health', (_req: Request, res: Response) => {
res.status(200).json({
status: 'ok',
build: TEST_BUILD_ID,
version: BUILT_IN_VERSION,
workerPath: this.options.workerPath,
uptime: Date.now() - this.startTime,
managed: process.env.CLAUDE_MEM_MANAGED === 'true',
hasIpc: typeof process.send === 'function',
platform: process.platform,
pid: process.pid,
initialized: this.options.getInitializationComplete(),
mcpReady: this.options.getMcpReady(),
ai: this.options.getAiStatus(),
});
});
+88 -2
View File
@@ -1,7 +1,93 @@
<claude-mem-context>
# Recent Activity
<!-- This section is auto-generated by claude-mem. Edit content outside the tags. -->
### Dec 8, 2025
*No recent activity*
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #22310 | 9:46 PM | 🟣 | Complete Hook Lifecycle Documentation Generated | ~603 |
| #22305 | 9:45 PM | 🔵 | Session Summary Storage and Status Lifecycle | ~472 |
| #22304 | " | 🔵 | Session Creation Idempotency and Observation Storage | ~481 |
| #22303 | " | 🔵 | SessionStore CRUD Operations for Hook Integration | ~392 |
| #22300 | 9:44 PM | 🔵 | SessionStore Database Management and Schema Migrations | ~455 |
| #22299 | " | 🔵 | Database Schema and Entity Types | ~460 |
| #21976 | 5:24 PM | 🟣 | storeObservation Saves tool_use_id to Database | ~298 |
### Dec 10, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #23808 | 10:42 PM | 🔵 | migrations.ts Already Migrated to bun:sqlite | ~312 |
| #23807 | " | 🔵 | SessionSearch.ts Already Migrated to bun:sqlite | ~321 |
| #23805 | " | 🔵 | Database.ts Already Migrated to bun:sqlite | ~290 |
| #23784 | 9:59 PM | ✅ | SessionStore.ts db.pragma() Converted to db.query().all() Pattern | ~198 |
| #23783 | 9:58 PM | ✅ | SessionStore.ts Migration004 Multi-Statement db.exec() Converted to db.run() | ~220 |
| #23782 | " | ✅ | SessionStore.ts initializeSchema() db.exec() Converted to db.run() | ~197 |
| #23781 | " | ✅ | SessionStore.ts Constructor PRAGMA Calls Converted to db.run() | ~215 |
| #23780 | " | ✅ | SessionStore.ts Type Annotation Updated | ~183 |
| #23779 | " | ✅ | SessionStore.ts Import Updated to bun:sqlite | ~237 |
| #23778 | 9:57 PM | ✅ | Database.ts Import Updated to bun:sqlite | ~177 |
| #23777 | " | 🔵 | SessionStore.ts Current Implementation - better-sqlite3 Import and API Usage | ~415 |
| #23776 | " | 🔵 | migrations.ts Current Implementation - better-sqlite3 Import | ~285 |
| #23775 | " | 🔵 | Database.ts Current Implementation - better-sqlite3 Import | ~286 |
| #23774 | " | 🔵 | SessionSearch.ts Current Implementation - better-sqlite3 Import | ~309 |
| #23671 | 8:36 PM | 🔵 | getUserPromptsByIds Method Implementation with Filtering and Ordering | ~326 |
| #23670 | " | 🔵 | getUserPromptsByIds Method Location in SessionStore | ~145 |
| #23635 | 8:10 PM | 🔴 | Fixed SessionStore.ts Concepts Filter SQL Parameter Bug | ~297 |
| #23634 | " | 🔵 | SessionStore.ts Concepts Filter Bug Confirmed at Line 849 | ~356 |
| #23522 | 5:27 PM | 🔵 | Complete TypeScript Type Definitions for Database Entities | ~433 |
| #23521 | " | 🔵 | Database Schema Structure with 7 Migration Versions | ~461 |
### Dec 18, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #29868 | 8:19 PM | 🔵 | SessionStore Architecture Review for Mode Metadata Addition | ~350 |
| #29243 | 12:13 AM | 🔵 | Observations Table Schema Migration: Text Field Made Nullable | ~496 |
| #29241 | 12:12 AM | 🔵 | Migration001: Core Schema for Sessions, Memories, Overviews, Diagnostics, Transcripts | ~555 |
| #29238 | 12:11 AM | 🔵 | Observation Type Schema Evolution: Five to Six Types | ~331 |
| #29237 | " | 🔵 | SQLite SessionStore with Schema Migrations and WAL Mode | ~520 |
### Dec 21, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #31622 | 8:26 PM | 🔄 | Completed SessionStore logging standardization | ~270 |
| #31621 | " | 🔄 | Standardized error logging for boundary timestamps query | ~253 |
| #31620 | " | 🔄 | Standardized error logging in getTimelineAroundObservation | ~252 |
| #31619 | " | 🔄 | Replaced console.log with logger.debug in SessionStore | ~263 |
### Dec 27, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #33213 | 9:04 PM | 🔵 | SessionStore Implements KISS Session ID Threading via INSERT OR IGNORE Pattern | ~673 |
### Dec 28, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #33548 | 10:59 PM | ✅ | Reverted memory_session_id NULL Initialization to contentSessionId Placeholder | ~421 |
| #33546 | 10:57 PM | 🔴 | Fixed createSDKSession to Initialize memory_session_id as NULL | ~406 |
| #33545 | " | 🔵 | createSDKSession Sets memory_session_id Equal to content_session_id Initially | ~378 |
| #33544 | " | 🔵 | SessionStore Migration 17 Already Renamed Session ID Columns | ~451 |
### Jan 2, 2026
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #36028 | 9:20 PM | 🔄 | Try-Catch Block Removed from Database Migration | ~291 |
### Jan 3, 2026
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #36653 | 11:03 PM | 🔵 | storeObservation Method Signature Shows Parameter Named memorySessionId | ~474 |
| #36652 | " | 🔵 | createSDKSession Implementation Confirms NULL Initialization With Security Rationale | ~488 |
| #36650 | 11:02 PM | 🔵 | Phase 1 Analysis Reveals Implementation-Test Mismatch on NULL vs Placeholder Initialization | ~687 |
| #36649 | " | 🔵 | SessionStore Implementation Reveals NULL-Based Memory Session ID Initialization Pattern | ~770 |
| #36175 | 6:52 PM | ✅ | MigrationRunner Re-exported from Migrations.ts | ~405 |
| #36172 | " | 🔵 | Migrations.ts Contains Legacy Migration System | ~650 |
| #36163 | 6:48 PM | 🔵 | SessionStore Method Inventory and Extraction Boundaries | ~692 |
| #36162 | 6:47 PM | 🔵 | SessionStore Architecture and Migration History | ~593 |
</claude-mem-context>
+62 -6
View File
@@ -77,12 +77,13 @@ export class PendingMessageStore {
}
/**
* Atomically claim and DELETE the next pending message.
* Finds oldest pending -> returns it -> deletes from queue.
* The queue is a pure buffer: claim it, delete it, process in memory.
* Atomically claim the next pending message by marking it as 'processing'.
* CRITICAL FIX: Does NOT delete - message stays in DB until confirmProcessed() is called.
* This prevents message loss if the generator crashes mid-processing.
* Uses a transaction to prevent race conditions.
*/
claimAndDelete(sessionDbId: number): PersistentPendingMessage | null {
const now = Date.now();
const claimTx = this.db.transaction((sessionId: number) => {
const peekStmt = this.db.prepare(`
SELECT * FROM pending_messages
@@ -93,9 +94,14 @@ export class PendingMessageStore {
const msg = peekStmt.get(sessionId) as PersistentPendingMessage | null;
if (msg) {
// Delete immediately - no "processing" state needed
const deleteStmt = this.db.prepare('DELETE FROM pending_messages WHERE id = ?');
deleteStmt.run(msg.id);
// CRITICAL FIX: Mark as 'processing' instead of deleting
// Message will be deleted by confirmProcessed() after successful store
const updateStmt = this.db.prepare(`
UPDATE pending_messages
SET status = 'processing', started_processing_at_epoch = ?
WHERE id = ?
`);
updateStmt.run(now, msg.id);
// Log claim with minimal info (avoid logging full payload)
logger.info('QUEUE', `CLAIMED | sessionDbId=${sessionId} | messageId=${msg.id} | type=${msg.message_type}`, {
@@ -108,6 +114,39 @@ export class PendingMessageStore {
return claimTx(sessionDbId) as PersistentPendingMessage | null;
}
/**
* Confirm a message was successfully processed - DELETE it from the queue.
* CRITICAL: Only call this AFTER the observation/summary has been stored to DB.
* This prevents message loss on generator crash.
*/
confirmProcessed(messageId: number): void {
const stmt = this.db.prepare('DELETE FROM pending_messages WHERE id = ?');
const result = stmt.run(messageId);
if (result.changes > 0) {
logger.debug('QUEUE', `CONFIRMED | messageId=${messageId} | deleted from queue`);
}
}
/**
* Reset stale 'processing' messages back to 'pending' for retry.
* Called on worker startup and periodically to recover from crashes.
* @param thresholdMs Messages processing longer than this are considered stale (default: 5 minutes)
* @returns Number of messages reset
*/
resetStaleProcessingMessages(thresholdMs: number = 5 * 60 * 1000): number {
const cutoff = Date.now() - thresholdMs;
const stmt = this.db.prepare(`
UPDATE pending_messages
SET status = 'pending', started_processing_at_epoch = NULL
WHERE status = 'processing' AND started_processing_at_epoch < ?
`);
const result = stmt.run(cutoff);
if (result.changes > 0) {
logger.info('QUEUE', `RESET_STALE | count=${result.changes} | thresholdMs=${thresholdMs}`);
}
return result.changes;
}
/**
* Get all pending messages for session (ordered by creation time)
*/
@@ -204,6 +243,23 @@ export class PendingMessageStore {
return result.changes;
}
/**
* Mark all pending and processing messages for a session as failed (abandoned).
* Used when SDK session is terminated and no fallback agent is available:
* prevents the session from appearing in getSessionsWithPendingMessages forever.
* @returns Number of messages marked failed
*/
markAllSessionMessagesAbandoned(sessionDbId: number): number {
const now = Date.now();
const stmt = this.db.prepare(`
UPDATE pending_messages
SET status = 'failed', failed_at_epoch = ?
WHERE session_db_id = ? AND status IN ('pending', 'processing')
`);
const result = stmt.run(now, sessionDbId);
return result.changes;
}
/**
* Abort a specific message (delete from queue)
*/
+268 -19
View File
@@ -47,6 +47,7 @@ export class SessionStore {
this.renameSessionIdColumns();
this.repairSessionIdColumnRename();
this.addFailedAtEpochColumn();
this.addOnUpdateCascadeToForeignKeys();
}
/**
@@ -98,10 +99,10 @@ export class SessionStore {
memory_session_id TEXT NOT NULL,
project TEXT NOT NULL,
text TEXT NOT NULL,
type TEXT NOT NULL CHECK(type IN ('decision', 'bugfix', 'feature', 'refactor', 'discovery')),
type TEXT NOT NULL,
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
FOREIGN KEY(memory_session_id) REFERENCES sdk_sessions(memory_session_id) ON DELETE CASCADE
FOREIGN KEY(memory_session_id) REFERENCES sdk_sessions(memory_session_id) ON DELETE CASCADE ON UPDATE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_observations_sdk_session ON observations(memory_session_id);
@@ -123,7 +124,7 @@ export class SessionStore {
notes TEXT,
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
FOREIGN KEY(memory_session_id) REFERENCES sdk_sessions(memory_session_id) ON DELETE CASCADE
FOREIGN KEY(memory_session_id) REFERENCES sdk_sessions(memory_session_id) ON DELETE CASCADE ON UPDATE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_session_summaries_sdk_session ON session_summaries(memory_session_id);
@@ -341,7 +342,7 @@ export class SessionStore {
memory_session_id TEXT NOT NULL,
project TEXT NOT NULL,
text TEXT,
type TEXT NOT NULL CHECK(type IN ('decision', 'bugfix', 'feature', 'refactor', 'discovery', 'change')),
type TEXT NOT NULL,
title TEXT,
subtitle TEXT,
facts TEXT,
@@ -645,11 +646,191 @@ export class SessionStore {
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(20, new Date().toISOString());
}
/**
* Add ON UPDATE CASCADE to FK constraints on observations and session_summaries (migration 21)
*
* Both tables have FK(memory_session_id) -> sdk_sessions(memory_session_id) with ON DELETE CASCADE
* but missing ON UPDATE CASCADE. This causes FK constraint violations when code updates
* sdk_sessions.memory_session_id while child rows still reference the old value.
*
* SQLite doesn't support ALTER TABLE for FK changes, so we recreate both tables.
*/
private addOnUpdateCascadeToForeignKeys(): void {
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(21) as SchemaVersion | undefined;
if (applied) return;
logger.debug('DB', 'Adding ON UPDATE CASCADE to FK constraints on observations and session_summaries');
// PRAGMA foreign_keys must be set outside a transaction
this.db.run('PRAGMA foreign_keys = OFF');
this.db.run('BEGIN TRANSACTION');
try {
// ==========================================
// 1. Recreate observations table
// ==========================================
// Drop FTS triggers first (they reference the observations table)
this.db.run('DROP TRIGGER IF EXISTS observations_ai');
this.db.run('DROP TRIGGER IF EXISTS observations_ad');
this.db.run('DROP TRIGGER IF EXISTS observations_au');
this.db.run(`
CREATE TABLE observations_new (
id INTEGER PRIMARY KEY AUTOINCREMENT,
memory_session_id TEXT NOT NULL,
project TEXT NOT NULL,
text TEXT,
type TEXT NOT NULL,
title TEXT,
subtitle TEXT,
facts TEXT,
narrative TEXT,
concepts TEXT,
files_read TEXT,
files_modified TEXT,
prompt_number INTEGER,
discovery_tokens INTEGER DEFAULT 0,
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
FOREIGN KEY(memory_session_id) REFERENCES sdk_sessions(memory_session_id) ON DELETE CASCADE ON UPDATE CASCADE
)
`);
this.db.run(`
INSERT INTO observations_new
SELECT id, memory_session_id, project, text, type, title, subtitle, facts,
narrative, concepts, files_read, files_modified, prompt_number,
discovery_tokens, created_at, created_at_epoch
FROM observations
`);
this.db.run('DROP TABLE observations');
this.db.run('ALTER TABLE observations_new RENAME TO observations');
// Recreate indexes
this.db.run(`
CREATE INDEX idx_observations_sdk_session ON observations(memory_session_id);
CREATE INDEX idx_observations_project ON observations(project);
CREATE INDEX idx_observations_type ON observations(type);
CREATE INDEX idx_observations_created ON observations(created_at_epoch DESC);
`);
// Recreate FTS triggers only if observations_fts exists
// (SessionSearch.ensureFTSTables creates it on first use with IF NOT EXISTS)
const hasFTS = (this.db.prepare("SELECT name FROM sqlite_master WHERE type='table' AND name='observations_fts'").all() as { name: string }[]).length > 0;
if (hasFTS) {
this.db.run(`
CREATE TRIGGER IF NOT EXISTS observations_ai AFTER INSERT ON observations BEGIN
INSERT INTO observations_fts(rowid, title, subtitle, narrative, text, facts, concepts)
VALUES (new.id, new.title, new.subtitle, new.narrative, new.text, new.facts, new.concepts);
END;
CREATE TRIGGER IF NOT EXISTS observations_ad AFTER DELETE ON observations BEGIN
INSERT INTO observations_fts(observations_fts, rowid, title, subtitle, narrative, text, facts, concepts)
VALUES('delete', old.id, old.title, old.subtitle, old.narrative, old.text, old.facts, old.concepts);
END;
CREATE TRIGGER IF NOT EXISTS observations_au AFTER UPDATE ON observations BEGIN
INSERT INTO observations_fts(observations_fts, rowid, title, subtitle, narrative, text, facts, concepts)
VALUES('delete', old.id, old.title, old.subtitle, old.narrative, old.text, old.facts, old.concepts);
INSERT INTO observations_fts(rowid, title, subtitle, narrative, text, facts, concepts)
VALUES (new.id, new.title, new.subtitle, new.narrative, new.text, new.facts, new.concepts);
END;
`);
}
// ==========================================
// 2. Recreate session_summaries table
// ==========================================
this.db.run(`
CREATE TABLE session_summaries_new (
id INTEGER PRIMARY KEY AUTOINCREMENT,
memory_session_id TEXT NOT NULL,
project TEXT NOT NULL,
request TEXT,
investigated TEXT,
learned TEXT,
completed TEXT,
next_steps TEXT,
files_read TEXT,
files_edited TEXT,
notes TEXT,
prompt_number INTEGER,
discovery_tokens INTEGER DEFAULT 0,
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
FOREIGN KEY(memory_session_id) REFERENCES sdk_sessions(memory_session_id) ON DELETE CASCADE ON UPDATE CASCADE
)
`);
this.db.run(`
INSERT INTO session_summaries_new
SELECT id, memory_session_id, project, request, investigated, learned,
completed, next_steps, files_read, files_edited, notes,
prompt_number, discovery_tokens, created_at, created_at_epoch
FROM session_summaries
`);
// Drop session_summaries FTS triggers before dropping the table
this.db.run('DROP TRIGGER IF EXISTS session_summaries_ai');
this.db.run('DROP TRIGGER IF EXISTS session_summaries_ad');
this.db.run('DROP TRIGGER IF EXISTS session_summaries_au');
this.db.run('DROP TABLE session_summaries');
this.db.run('ALTER TABLE session_summaries_new RENAME TO session_summaries');
// Recreate indexes
this.db.run(`
CREATE INDEX idx_session_summaries_sdk_session ON session_summaries(memory_session_id);
CREATE INDEX idx_session_summaries_project ON session_summaries(project);
CREATE INDEX idx_session_summaries_created ON session_summaries(created_at_epoch DESC);
`);
// Recreate session_summaries FTS triggers if FTS table exists
const hasSummariesFTS = (this.db.prepare("SELECT name FROM sqlite_master WHERE type='table' AND name='session_summaries_fts'").all() as { name: string }[]).length > 0;
if (hasSummariesFTS) {
this.db.run(`
CREATE TRIGGER IF NOT EXISTS session_summaries_ai AFTER INSERT ON session_summaries BEGIN
INSERT INTO session_summaries_fts(rowid, request, investigated, learned, completed, next_steps, notes)
VALUES (new.id, new.request, new.investigated, new.learned, new.completed, new.next_steps, new.notes);
END;
CREATE TRIGGER IF NOT EXISTS session_summaries_ad AFTER DELETE ON session_summaries BEGIN
INSERT INTO session_summaries_fts(session_summaries_fts, rowid, request, investigated, learned, completed, next_steps, notes)
VALUES('delete', old.id, old.request, old.investigated, old.learned, old.completed, old.next_steps, old.notes);
END;
CREATE TRIGGER IF NOT EXISTS session_summaries_au AFTER UPDATE ON session_summaries BEGIN
INSERT INTO session_summaries_fts(session_summaries_fts, rowid, request, investigated, learned, completed, next_steps, notes)
VALUES('delete', old.id, old.request, old.investigated, old.learned, old.completed, old.next_steps, old.notes);
INSERT INTO session_summaries_fts(rowid, request, investigated, learned, completed, next_steps, notes)
VALUES (new.id, new.request, new.investigated, new.learned, new.completed, new.next_steps, new.notes);
END;
`);
}
// Record migration
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(21, new Date().toISOString());
this.db.run('COMMIT');
this.db.run('PRAGMA foreign_keys = ON');
logger.debug('DB', 'Successfully added ON UPDATE CASCADE to FK constraints');
} catch (error) {
this.db.run('ROLLBACK');
this.db.run('PRAGMA foreign_keys = ON');
throw error;
}
}
/**
* Update the memory session ID for a session
* Called by SDKAgent when it captures the session ID from the first SDK message
* Also used to RESET to null on stale resume failures (worker-service.ts)
*/
updateMemorySessionId(sessionDbId: number, memorySessionId: string): void {
updateMemorySessionId(sessionDbId: number, memorySessionId: string | null): void {
this.db.prepare(`
UPDATE sdk_sessions
SET memory_session_id = ?
@@ -657,6 +838,37 @@ export class SessionStore {
`).run(memorySessionId, sessionDbId);
}
/**
* Ensures memory_session_id is registered in sdk_sessions before FK-constrained INSERT.
* This fixes Issue #846 where observations fail after worker restart because the
* SDK generates a new memory_session_id but it's not registered in the parent table
* before child records try to reference it.
*
* @param sessionDbId - The database ID of the session
* @param memorySessionId - The memory session ID to ensure is registered
*/
ensureMemorySessionIdRegistered(sessionDbId: number, memorySessionId: string): void {
const session = this.db.prepare(`
SELECT id, memory_session_id FROM sdk_sessions WHERE id = ?
`).get(sessionDbId) as { id: number; memory_session_id: string | null } | undefined;
if (!session) {
throw new Error(`Session ${sessionDbId} not found in sdk_sessions`);
}
if (session.memory_session_id !== memorySessionId) {
this.db.prepare(`
UPDATE sdk_sessions SET memory_session_id = ? WHERE id = ?
`).run(memorySessionId, sessionDbId);
logger.info('DB', 'Registered memory_session_id before storage (FK fix)', {
sessionDbId,
oldId: session.memory_session_id,
newId: memorySessionId
});
}
}
/**
* Get recent session summaries for a project
*/
@@ -1151,31 +1363,40 @@ export class SessionStore {
* - Prompt #2+: session_id exists → INSERT ignored, fetch existing ID
* - Result: Same database ID returned for all prompts in conversation
*
* WHY THIS MATTERS:
* - NO "does session exist?" checks needed anywhere
* - NO risk of creating duplicate sessions
* - ALL hooks automatically connected via session_id
* - SAVE hook observations go to correct session (same session_id)
* - SDKAgent continuation prompt has correct context (same session_id)
*
* This is KISS in action: Trust the database UNIQUE constraint and
* INSERT OR IGNORE to handle both creation and lookup elegantly.
* Pure get-or-create: never modifies memory_session_id.
* Multi-terminal isolation is handled by ON UPDATE CASCADE at the schema level.
*/
createSDKSession(contentSessionId: string, project: string, userPrompt: string): number {
const now = new Date();
const nowEpoch = now.getTime();
// Pure INSERT OR IGNORE - no updates, no complexity
// Session reuse: Return existing session ID if already created for this contentSessionId.
const existing = this.db.prepare(`
SELECT id FROM sdk_sessions WHERE content_session_id = ?
`).get(contentSessionId) as { id: number } | undefined;
if (existing) {
// Backfill project if session was created by another hook with empty project
if (project) {
this.db.prepare(`
UPDATE sdk_sessions SET project = ?
WHERE content_session_id = ? AND (project IS NULL OR project = '')
`).run(project, contentSessionId);
}
return existing.id;
}
// New session - insert fresh row
// NOTE: memory_session_id starts as NULL. It is captured by SDKAgent from the first SDK
// response and stored via updateMemorySessionId(). CRITICAL: memory_session_id must NEVER
// equal contentSessionId - that would inject memory messages into the user's transcript!
// response and stored via ensureMemorySessionIdRegistered(). CRITICAL: memory_session_id
// must NEVER equal contentSessionId - that would inject memory messages into the user's transcript!
this.db.prepare(`
INSERT OR IGNORE INTO sdk_sessions
INSERT INTO sdk_sessions
(content_session_id, memory_session_id, project, user_prompt, started_at, started_at_epoch, status)
VALUES (?, NULL, ?, ?, ?, ?, 'active')
`).run(contentSessionId, project, userPrompt, now.toISOString(), nowEpoch);
// Return existing or new ID
// Return new ID
const row = this.db.prepare('SELECT id FROM sdk_sessions WHERE content_session_id = ?')
.get(contentSessionId) as { id: number };
return row.id;
@@ -1907,6 +2128,34 @@ export class SessionStore {
return stmt.get(id) || null;
}
/**
* Get or create a manual session for storing user-created observations
* Manual sessions use a predictable ID format: "manual-{project}"
*/
getOrCreateManualSession(project: string): string {
const memorySessionId = `manual-${project}`;
const contentSessionId = `manual-content-${project}`;
const existing = this.db.prepare(
'SELECT memory_session_id FROM sdk_sessions WHERE memory_session_id = ?'
).get(memorySessionId) as { memory_session_id: string } | undefined;
if (existing) {
return memorySessionId;
}
// Create new manual session
const now = new Date();
this.db.prepare(`
INSERT INTO sdk_sessions (memory_session_id, content_session_id, project, started_at, started_at_epoch, status)
VALUES (?, ?, ?, ?, ?, 'active')
`).run(memorySessionId, contentSessionId, project, now.toISOString(), now.getTime());
logger.info('SESSION', 'Created manual session', { memorySessionId, project });
return memorySessionId;
}
/**
* Close the database connection
*/
-14
View File
@@ -1,14 +0,0 @@
<claude-mem-context>
# Recent Activity
<!-- This section is auto-generated by claude-mem. Edit content outside the tags. -->
### Jan 3, 2026
**bulk.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #36670 | 11:37 PM | ✅ | Resolved merge conflicts by accepting branch changes for 39 files | ~435 |
| #36469 | 9:04 PM | 🔵 | Bulk Import with Duplicate Detection | ~451 |
| #36390 | 8:50 PM | 🔄 | Comprehensive Monolith Refactor with Modular Architecture | ~724 |
</claude-mem-context>
+1 -1
View File
@@ -259,7 +259,7 @@ export const migration004: Migration = {
memory_session_id TEXT NOT NULL,
project TEXT NOT NULL,
text TEXT NOT NULL,
type TEXT NOT NULL CHECK(type IN ('decision', 'bugfix', 'feature', 'refactor', 'discovery')),
type TEXT NOT NULL,
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
FOREIGN KEY(memory_session_id) REFERENCES sdk_sessions(memory_session_id) ON DELETE CASCADE
-15
View File
@@ -1,15 +0,0 @@
<claude-mem-context>
# Recent Activity
<!-- This section is auto-generated by claude-mem. Edit content outside the tags. -->
### Jan 3, 2026
**runner.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #36487 | 9:13 PM | 🔴 | Fixed Foreign Key Constraint Issues in Observations Test Suite | ~677 |
| #36390 | 8:50 PM | 🔄 | Comprehensive Monolith Refactor with Modular Architecture | ~724 |
| #36353 | 8:42 PM | 🔵 | Multiple observation table definitions found across codebase | ~280 |
| #36323 | 8:25 PM | 🔵 | Message Queue Architecture Scope Expanded | ~302 |
</claude-mem-context>
+2 -2
View File
@@ -82,7 +82,7 @@ export class MigrationRunner {
memory_session_id TEXT NOT NULL,
project TEXT NOT NULL,
text TEXT NOT NULL,
type TEXT NOT NULL CHECK(type IN ('decision', 'bugfix', 'feature', 'refactor', 'discovery')),
type TEXT NOT NULL,
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
FOREIGN KEY(memory_session_id) REFERENCES sdk_sessions(memory_session_id) ON DELETE CASCADE
@@ -325,7 +325,7 @@ export class MigrationRunner {
memory_session_id TEXT NOT NULL,
project TEXT NOT NULL,
text TEXT,
type TEXT NOT NULL CHECK(type IN ('decision', 'bugfix', 'feature', 'refactor', 'discovery', 'change')),
type TEXT NOT NULL,
title TEXT,
subtitle TEXT,
facts TEXT,
@@ -1,33 +0,0 @@
<claude-mem-context>
# Recent Activity
<!-- This section is auto-generated by claude-mem. Edit content outside the tags. -->
### Jan 3, 2026
**files.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #36670 | 11:37 PM | ✅ | Resolved merge conflicts by accepting branch changes for 39 files | ~435 |
| #36453 | 9:02 PM | 🔵 | Session File Aggregation | ~384 |
| #36390 | 8:50 PM | 🔄 | Comprehensive Monolith Refactor with Modular Architecture | ~724 |
**store.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #36483 | 9:11 PM | 🟣 | Observations Module Test Suite Implemented | ~716 |
| #36445 | 9:01 PM | 🔵 | Observation Storage with Timestamp Override | ~444 |
**types.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #36470 | 9:06 PM | 🔵 | SQLite Module API Documentation Verified for Test Implementation | ~765 |
| #36447 | 9:02 PM | 🔵 | Observation Type Definitions | ~459 |
### Jan 4, 2026
**types.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #36770 | 12:42 AM | 🔵 | Export Script Type Duplication Analysis Complete | ~555 |
</claude-mem-context>
-32
View File
@@ -1,32 +0,0 @@
<claude-mem-context>
# Recent Activity
<!-- This section is auto-generated by claude-mem. Edit content outside the tags. -->
### Jan 3, 2026
**get.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #36670 | 11:37 PM | ✅ | Resolved merge conflicts by accepting branch changes for 39 files | ~435 |
| #36464 | 9:04 PM | 🔵 | User Prompt Retrieval Functions | ~471 |
| #36390 | 8:50 PM | 🔄 | Comprehensive Monolith Refactor with Modular Architecture | ~724 |
**store.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #36485 | 9:12 PM | 🟣 | Prompts Module Test Suite Implemented | ~680 |
| #36466 | 9:04 PM | 🔵 | User Prompt Storage | ~363 |
**types.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #36470 | 9:06 PM | 🔵 | SQLite Module API Documentation Verified for Test Implementation | ~765 |
### Jan 4, 2026
**types.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #36770 | 12:42 AM | 🔵 | Export Script Type Duplication Analysis Complete | ~555 |
</claude-mem-context>
-7
View File
@@ -1,7 +0,0 @@
<claude-mem-context>
# Recent Activity
<!-- This section is auto-generated by claude-mem. Edit content outside the tags. -->
*No recent activity*
</claude-mem-context>
+25 -12
View File
@@ -14,12 +14,8 @@ import { logger } from '../../../utils/logger.js';
* - Prompt #2+: session_id exists -> INSERT ignored, fetch existing ID
* - Result: Same database ID returned for all prompts in conversation
*
* WHY THIS MATTERS:
* - NO "does session exist?" checks needed anywhere
* - NO risk of creating duplicate sessions
* - ALL hooks automatically connected via session_id
* - SAVE hook observations go to correct session (same session_id)
* - SDKAgent continuation prompt has correct context (same session_id)
* Pure get-or-create: never modifies memory_session_id.
* Multi-terminal isolation is handled by ON UPDATE CASCADE at the schema level.
*/
export function createSDKSession(
db: Database,
@@ -30,17 +26,33 @@ export function createSDKSession(
const now = new Date();
const nowEpoch = now.getTime();
// Pure INSERT OR IGNORE - no updates, no complexity
// Check for existing session
const existing = db.prepare(`
SELECT id FROM sdk_sessions WHERE content_session_id = ?
`).get(contentSessionId) as { id: number } | undefined;
if (existing) {
// Backfill project if session was created by another hook with empty project
if (project) {
db.prepare(`
UPDATE sdk_sessions SET project = ?
WHERE content_session_id = ? AND (project IS NULL OR project = '')
`).run(project, contentSessionId);
}
return existing.id;
}
// New session - insert fresh row
// NOTE: memory_session_id starts as NULL. It is captured by SDKAgent from the first SDK
// response and stored via updateMemorySessionId(). CRITICAL: memory_session_id must NEVER
// equal contentSessionId - that would inject memory messages into the user's transcript!
// response and stored via ensureMemorySessionIdRegistered(). CRITICAL: memory_session_id
// must NEVER equal contentSessionId - that would inject memory messages into the user's transcript!
db.prepare(`
INSERT OR IGNORE INTO sdk_sessions
INSERT INTO sdk_sessions
(content_session_id, memory_session_id, project, user_prompt, started_at, started_at_epoch, status)
VALUES (?, NULL, ?, ?, ?, ?, 'active')
`).run(contentSessionId, project, userPrompt, now.toISOString(), nowEpoch);
// Return existing or new ID
// Return new ID
const row = db.prepare('SELECT id FROM sdk_sessions WHERE content_session_id = ?')
.get(contentSessionId) as { id: number };
return row.id;
@@ -49,11 +61,12 @@ export function createSDKSession(
/**
* Update the memory session ID for a session
* Called by SDKAgent when it captures the session ID from the first SDK message
* Also used to RESET to null on stale resume failures (worker-service.ts)
*/
export function updateMemorySessionId(
db: Database,
sessionDbId: number,
memorySessionId: string
memorySessionId: string | null
): void {
db.prepare(`
UPDATE sdk_sessions
-32
View File
@@ -1,32 +0,0 @@
<claude-mem-context>
# Recent Activity
<!-- This section is auto-generated by claude-mem. Edit content outside the tags. -->
### Jan 3, 2026
**get.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #36670 | 11:37 PM | ✅ | Resolved merge conflicts by accepting branch changes for 39 files | ~435 |
| #36390 | 8:50 PM | 🔄 | Comprehensive Monolith Refactor with Modular Architecture | ~724 |
**store.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #36484 | 9:11 PM | 🟣 | Summaries Module Test Suite Implemented | ~708 |
| #36461 | 9:03 PM | 🔵 | Summary Storage with Timestamp Override | ~439 |
**types.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #36470 | 9:06 PM | 🔵 | SQLite Module API Documentation Verified for Test Implementation | ~765 |
| #36457 | 9:03 PM | 🔵 | Summary Type Hierarchy | ~426 |
### Jan 4, 2026
**types.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #36770 | 12:42 AM | 🔵 | Export Script Type Duplication Analysis Complete | ~555 |
</claude-mem-context>
-7
View File
@@ -1,7 +0,0 @@
<claude-mem-context>
# Recent Activity
<!-- This section is auto-generated by claude-mem. Edit content outside the tags. -->
*No recent activity*
</claude-mem-context>
-68
View File
@@ -1,68 +0,0 @@
<claude-mem-context>
# Recent Activity
<!-- This section is auto-generated by claude-mem. Edit content outside the tags. -->
### Nov 3, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #3465 | 6:26 PM | ⚖️ | PR preparation for hybrid search feature ready for submission | ~521 |
| #3460 | 6:18 PM | ✅ | Suppressed stderr output from Chroma MCP transport | ~231 |
| #3350 | 3:33 PM | ✅ | Document splitting strategy improves semantic search precision by vectorizing field-level content | ~701 |
| #3346 | " | 🟣 | ChromaSync service provides automatic real-time vector database synchronization | ~699 |
| #3345 | " | 🟣 | Completed ChromaDB hybrid search integration with semantic search across all content types | ~762 |
| #3323 | 3:01 PM | 🟣 | Integrated user prompt backfill into ChromaSync.backfill() | ~257 |
| #3322 | " | 🟣 | Implemented real-time user prompt sync to ChromaDB | ~275 |
| #3321 | " | ✅ | Added StoredUserPrompt interface to ChromaSync | ~179 |
### Nov 4, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #3645 | 3:03 PM | 🔵 | Observation Counter Removal Validated Safe for Chroma Integration | ~504 |
| #3643 | " | 🔵 | Chroma Document ID Structure and Granular Field Splitting | ~410 |
| #3642 | " | 🔵 | Observation Counter Independence from Chroma Import Process | ~440 |
### Nov 11, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #6992 | 6:28 PM | ⚖️ | Comprehensive Windows Issue Investigation and Fix Strategy | ~631 |
| #6986 | 6:26 PM | 🔵 | ChromaSync UVX Connection Configuration Analysis | ~333 |
| #6953 | 5:49 PM | 🔵 | ChromaSync Relies on uvx Python Package Runner Instead of npx | ~326 |
| #6952 | 5:48 PM | 🔵 | ChromaSync Uses uvx Command for MCP Server on All Platforms | ~368 |
### Dec 5, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #20401 | 7:18 PM | 🔵 | ChromaSync service synchronizes observations and summaries to vector database for semantic search | ~521 |
### Dec 13, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #25190 | 8:04 PM | 🔴 | Enhanced close() Method to Terminate Transport Subprocess | ~417 |
| #25189 | 8:03 PM | 🔄 | Store Transport Reference in ensureConnection Method | ~284 |
| #25188 | " | 🔄 | Added Transport Reference to ChromaSync Class | ~268 |
| #25187 | " | 🔵 | ChromaSync Has close() Method But May Not Be Called | ~277 |
| #25186 | " | 🔵 | ChromaSync Process Spawning via StdioClientTransport | ~355 |
| #25117 | 7:39 PM | 🟣 | Automatic Collection Migration for Embedding Function Changes | ~493 |
| #25116 | " | 🔄 | Collection Name Changed to Lazy Initialization | ~126 |
| #25115 | " | 🔵 | ChromaSync Service Current Implementation Analysis | ~454 |
| #25092 | 7:20 PM | 🟣 | ChromaSync Now Reads Embedding Function from Settings | ~394 |
| #25090 | 7:19 PM | 🔵 | Located Hardcoded Embedding Function in ChromaSync | ~345 |
### Dec 17, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #28547 | 4:49 PM | 🔴 | Fixed Windows subprocess zombie process issue in ChromaSync | ~368 |
| #28546 | " | ✅ | Added child_process import to ChromaSync | ~215 |
| #28545 | 4:48 PM | 🟣 | Subprocess PID Extraction for Windows Process Management | ~385 |
| #28544 | " | ✅ | Child Process PID Tracking Added to ChromaSync | ~239 |
| #28543 | " | 🔵 | ChromaSync Service Architecture | ~337 |
| #28542 | " | 🟣 | Windows Console Window Hiding for Chroma MCP Transport | ~308 |
| #28468 | 4:25 PM | 🔵 | ChromaSync Fail-Fast MCP Vector Database Integration | ~501 |
</claude-mem-context>
+65
View File
@@ -0,0 +1,65 @@
import { DEFAULT_CONFIG_PATH, DEFAULT_STATE_PATH, expandHomePath, loadTranscriptWatchConfig, writeSampleConfig } from './config.js';
import { TranscriptWatcher } from './watcher.js';
function getArgValue(args: string[], name: string): string | null {
const index = args.indexOf(name);
if (index === -1) return null;
return args[index + 1] ?? null;
}
export async function runTranscriptCommand(subcommand: string | undefined, args: string[]): Promise<number> {
switch (subcommand) {
case 'init': {
const configPath = getArgValue(args, '--config') ?? DEFAULT_CONFIG_PATH;
writeSampleConfig(configPath);
console.log(`Created sample config: ${expandHomePath(configPath)}`);
return 0;
}
case 'watch': {
const configPath = getArgValue(args, '--config') ?? DEFAULT_CONFIG_PATH;
let config;
try {
config = loadTranscriptWatchConfig(configPath);
} catch (error) {
if (error instanceof Error && error.message.includes('not found')) {
writeSampleConfig(configPath);
console.log(`Created sample config: ${expandHomePath(configPath)}`);
config = loadTranscriptWatchConfig(configPath);
} else {
throw error;
}
}
const statePath = expandHomePath(config.stateFile ?? DEFAULT_STATE_PATH);
const watcher = new TranscriptWatcher(config, statePath);
await watcher.start();
console.log('Transcript watcher running. Press Ctrl+C to stop.');
const shutdown = () => {
watcher.stop();
process.exit(0);
};
process.on('SIGINT', shutdown);
process.on('SIGTERM', shutdown);
return await new Promise(() => undefined);
}
case 'validate': {
const configPath = getArgValue(args, '--config') ?? DEFAULT_CONFIG_PATH;
try {
loadTranscriptWatchConfig(configPath);
} catch (error) {
if (error instanceof Error && error.message.includes('not found')) {
writeSampleConfig(configPath);
console.log(`Created sample config: ${expandHomePath(configPath)}`);
loadTranscriptWatchConfig(configPath);
} else {
throw error;
}
}
console.log(`Config OK: ${expandHomePath(configPath)}`);
return 0;
}
default:
console.log('Usage: claude-mem transcript <init|watch|validate> [--config <path>]');
return 1;
}
}
+137
View File
@@ -0,0 +1,137 @@
import { existsSync, readFileSync, writeFileSync, mkdirSync } from 'fs';
import { homedir } from 'os';
import { join, dirname } from 'path';
import type { TranscriptSchema, TranscriptWatchConfig } from './types.js';
export const DEFAULT_CONFIG_PATH = join(homedir(), '.claude-mem', 'transcript-watch.json');
export const DEFAULT_STATE_PATH = join(homedir(), '.claude-mem', 'transcript-watch-state.json');
const CODEX_SAMPLE_SCHEMA: TranscriptSchema = {
name: 'codex',
version: '0.2',
description: 'Schema for Codex session JSONL files under ~/.codex/sessions.',
events: [
{
name: 'session-meta',
match: { path: 'type', equals: 'session_meta' },
action: 'session_context',
fields: {
sessionId: 'payload.id',
cwd: 'payload.cwd'
}
},
{
name: 'turn-context',
match: { path: 'type', equals: 'turn_context' },
action: 'session_context',
fields: {
cwd: 'payload.cwd'
}
},
{
name: 'user-message',
match: { path: 'payload.type', equals: 'user_message' },
action: 'session_init',
fields: {
prompt: 'payload.message'
}
},
{
name: 'assistant-message',
match: { path: 'payload.type', equals: 'agent_message' },
action: 'assistant_message',
fields: {
message: 'payload.message'
}
},
{
name: 'tool-use',
match: { path: 'payload.type', in: ['function_call', 'custom_tool_call', 'web_search_call'] },
action: 'tool_use',
fields: {
toolId: 'payload.call_id',
toolName: {
coalesce: [
'payload.name',
{ value: 'web_search' }
]
},
toolInput: {
coalesce: [
'payload.arguments',
'payload.input',
'payload.action'
]
}
}
},
{
name: 'tool-result',
match: { path: 'payload.type', in: ['function_call_output', 'custom_tool_call_output'] },
action: 'tool_result',
fields: {
toolId: 'payload.call_id',
toolResponse: 'payload.output'
}
},
{
name: 'session-end',
match: { path: 'payload.type', equals: 'turn_aborted' },
action: 'session_end'
}
]
};
export const SAMPLE_CONFIG: TranscriptWatchConfig = {
version: 1,
schemas: {
codex: CODEX_SAMPLE_SCHEMA
},
watches: [
{
name: 'codex',
path: '~/.codex/sessions/**/*.jsonl',
schema: 'codex',
startAtEnd: true,
context: {
mode: 'agents',
path: '~/.codex/AGENTS.md',
updateOn: ['session_start', 'session_end']
}
}
],
stateFile: DEFAULT_STATE_PATH
};
export function expandHomePath(inputPath: string): string {
if (!inputPath) return inputPath;
if (inputPath.startsWith('~')) {
return join(homedir(), inputPath.slice(1));
}
return inputPath;
}
export function loadTranscriptWatchConfig(path = DEFAULT_CONFIG_PATH): TranscriptWatchConfig {
const resolvedPath = expandHomePath(path);
if (!existsSync(resolvedPath)) {
throw new Error(`Transcript watch config not found: ${resolvedPath}`);
}
const raw = readFileSync(resolvedPath, 'utf-8');
const parsed = JSON.parse(raw) as TranscriptWatchConfig;
if (!parsed.version || !parsed.watches) {
throw new Error(`Invalid transcript watch config: ${resolvedPath}`);
}
if (!parsed.stateFile) {
parsed.stateFile = DEFAULT_STATE_PATH;
}
return parsed;
}
export function writeSampleConfig(path = DEFAULT_CONFIG_PATH): void {
const resolvedPath = expandHomePath(path);
const dir = dirname(resolvedPath);
if (!existsSync(dir)) {
mkdirSync(dir, { recursive: true });
}
writeFileSync(resolvedPath, JSON.stringify(SAMPLE_CONFIG, null, 2));
}
+151
View File
@@ -0,0 +1,151 @@
import type { FieldSpec, MatchRule, TranscriptSchema, WatchTarget } from './types.js';
interface ResolveContext {
watch: WatchTarget;
schema: TranscriptSchema;
session?: Record<string, unknown>;
}
function parsePath(path: string): Array<string | number> {
const cleaned = path.trim().replace(/^\$\.?/, '');
if (!cleaned) return [];
const tokens: Array<string | number> = [];
const parts = cleaned.split('.');
for (const part of parts) {
const regex = /([^[\]]+)|\[(\d+)\]/g;
let match: RegExpExecArray | null;
while ((match = regex.exec(part)) !== null) {
if (match[1]) {
tokens.push(match[1]);
} else if (match[2]) {
tokens.push(parseInt(match[2], 10));
}
}
}
return tokens;
}
export function getValueByPath(input: unknown, path: string): unknown {
if (!path) return undefined;
const tokens = parsePath(path);
let current: any = input;
for (const token of tokens) {
if (current === null || current === undefined) return undefined;
current = current[token as any];
}
return current;
}
function isEmptyValue(value: unknown): boolean {
return value === undefined || value === null || value === '';
}
function resolveFromContext(path: string, ctx: ResolveContext): unknown {
if (path.startsWith('$watch.')) {
const key = path.slice('$watch.'.length);
return (ctx.watch as any)[key];
}
if (path.startsWith('$schema.')) {
const key = path.slice('$schema.'.length);
return (ctx.schema as any)[key];
}
if (path.startsWith('$session.')) {
const key = path.slice('$session.'.length);
return ctx.session ? (ctx.session as any)[key] : undefined;
}
if (path === '$cwd') return ctx.watch.workspace;
if (path === '$project') return ctx.watch.project;
return undefined;
}
export function resolveFieldSpec(
spec: FieldSpec | undefined,
entry: unknown,
ctx: ResolveContext
): unknown {
if (spec === undefined) return undefined;
if (typeof spec === 'string') {
const fromContext = resolveFromContext(spec, ctx);
if (fromContext !== undefined) return fromContext;
return getValueByPath(entry, spec);
}
if (spec.coalesce && Array.isArray(spec.coalesce)) {
for (const candidate of spec.coalesce) {
const value = resolveFieldSpec(candidate, entry, ctx);
if (!isEmptyValue(value)) return value;
}
}
if (spec.path) {
const fromContext = resolveFromContext(spec.path, ctx);
if (fromContext !== undefined) return fromContext;
const value = getValueByPath(entry, spec.path);
if (!isEmptyValue(value)) return value;
}
if (spec.value !== undefined) return spec.value;
if (spec.default !== undefined) return spec.default;
return undefined;
}
export function resolveFields(
fields: Record<string, FieldSpec> | undefined,
entry: unknown,
ctx: ResolveContext
): Record<string, unknown> {
const resolved: Record<string, unknown> = {};
if (!fields) return resolved;
for (const [key, spec] of Object.entries(fields)) {
resolved[key] = resolveFieldSpec(spec, entry, ctx);
}
return resolved;
}
export function matchesRule(
entry: unknown,
rule: MatchRule | undefined,
schema: TranscriptSchema
): boolean {
if (!rule) return true;
const path = rule.path || schema.eventTypePath || 'type';
const value = path ? getValueByPath(entry, path) : undefined;
if (rule.exists) {
if (value === undefined || value === null || value === '') return false;
}
if (rule.equals !== undefined) {
return value === rule.equals;
}
if (rule.in && Array.isArray(rule.in)) {
return rule.in.includes(value);
}
if (rule.contains !== undefined) {
return typeof value === 'string' && value.includes(rule.contains);
}
if (rule.regex) {
try {
const regex = new RegExp(rule.regex);
return regex.test(String(value ?? ''));
} catch {
return false;
}
}
return true;
}
+371
View File
@@ -0,0 +1,371 @@
import { sessionInitHandler } from '../../cli/handlers/session-init.js';
import { observationHandler } from '../../cli/handlers/observation.js';
import { fileEditHandler } from '../../cli/handlers/file-edit.js';
import { sessionCompleteHandler } from '../../cli/handlers/session-complete.js';
import { ensureWorkerRunning, getWorkerPort } from '../../shared/worker-utils.js';
import { logger } from '../../utils/logger.js';
import { getProjectContext, getProjectName } from '../../utils/project-name.js';
import { writeAgentsMd } from '../../utils/agents-md-utils.js';
import { resolveFieldSpec, resolveFields, matchesRule } from './field-utils.js';
import { expandHomePath } from './config.js';
import type { TranscriptSchema, WatchTarget, SchemaEvent } from './types.js';
interface SessionState {
sessionId: string;
cwd?: string;
project?: string;
lastUserMessage?: string;
lastAssistantMessage?: string;
pendingTools: Map<string, { name?: string; input?: unknown }>;
}
interface PendingTool {
id?: string;
name?: string;
input?: unknown;
response?: unknown;
}
export class TranscriptEventProcessor {
private sessions = new Map<string, SessionState>();
async processEntry(
entry: unknown,
watch: WatchTarget,
schema: TranscriptSchema,
sessionIdOverride?: string | null
): Promise<void> {
for (const event of schema.events) {
if (!matchesRule(entry, event.match, schema)) continue;
await this.handleEvent(entry, watch, schema, event, sessionIdOverride ?? undefined);
}
}
private getSessionKey(watch: WatchTarget, sessionId: string): string {
return `${watch.name}:${sessionId}`;
}
private getOrCreateSession(watch: WatchTarget, sessionId: string): SessionState {
const key = this.getSessionKey(watch, sessionId);
let session = this.sessions.get(key);
if (!session) {
session = {
sessionId,
pendingTools: new Map()
};
this.sessions.set(key, session);
}
return session;
}
private resolveSessionId(
entry: unknown,
watch: WatchTarget,
schema: TranscriptSchema,
event: SchemaEvent,
sessionIdOverride?: string
): string | null {
const ctx = { watch, schema } as any;
const fieldSpec = event.fields?.sessionId ?? (schema.sessionIdPath ? { path: schema.sessionIdPath } : undefined);
const resolved = resolveFieldSpec(fieldSpec, entry, ctx);
if (typeof resolved === 'string' && resolved.trim()) return resolved;
if (typeof resolved === 'number') return String(resolved);
if (sessionIdOverride && sessionIdOverride.trim()) return sessionIdOverride;
return null;
}
private resolveCwd(
entry: unknown,
watch: WatchTarget,
schema: TranscriptSchema,
event: SchemaEvent,
session: SessionState
): string | undefined {
const ctx = { watch, schema, session } as any;
const fieldSpec = event.fields?.cwd ?? (schema.cwdPath ? { path: schema.cwdPath } : undefined);
const resolved = resolveFieldSpec(fieldSpec, entry, ctx);
if (typeof resolved === 'string' && resolved.trim()) return resolved;
if (watch.workspace) return watch.workspace;
return session.cwd;
}
private resolveProject(
entry: unknown,
watch: WatchTarget,
schema: TranscriptSchema,
event: SchemaEvent,
session: SessionState
): string | undefined {
const ctx = { watch, schema, session } as any;
const fieldSpec = event.fields?.project ?? (schema.projectPath ? { path: schema.projectPath } : undefined);
const resolved = resolveFieldSpec(fieldSpec, entry, ctx);
if (typeof resolved === 'string' && resolved.trim()) return resolved;
if (watch.project) return watch.project;
if (session.cwd) return getProjectName(session.cwd);
return session.project;
}
private async handleEvent(
entry: unknown,
watch: WatchTarget,
schema: TranscriptSchema,
event: SchemaEvent,
sessionIdOverride?: string
): Promise<void> {
const sessionId = this.resolveSessionId(entry, watch, schema, event, sessionIdOverride);
if (!sessionId) {
logger.debug('TRANSCRIPT', 'Skipping event without sessionId', { event: event.name, watch: watch.name });
return;
}
const session = this.getOrCreateSession(watch, sessionId);
const cwd = this.resolveCwd(entry, watch, schema, event, session);
if (cwd) session.cwd = cwd;
const project = this.resolveProject(entry, watch, schema, event, session);
if (project) session.project = project;
const fields = resolveFields(event.fields, entry, { watch, schema, session });
switch (event.action) {
case 'session_context':
this.applySessionContext(session, fields);
break;
case 'session_init':
await this.handleSessionInit(session, fields);
if (watch.context?.updateOn?.includes('session_start')) {
await this.updateContext(session, watch);
}
break;
case 'user_message':
if (typeof fields.message === 'string') session.lastUserMessage = fields.message;
if (typeof fields.prompt === 'string') session.lastUserMessage = fields.prompt;
break;
case 'assistant_message':
if (typeof fields.message === 'string') session.lastAssistantMessage = fields.message;
break;
case 'tool_use':
await this.handleToolUse(session, fields);
break;
case 'tool_result':
await this.handleToolResult(session, fields);
break;
case 'observation':
await this.sendObservation(session, fields);
break;
case 'file_edit':
await this.sendFileEdit(session, fields);
break;
case 'session_end':
await this.handleSessionEnd(session, watch);
break;
default:
break;
}
}
private applySessionContext(session: SessionState, fields: Record<string, unknown>): void {
const cwd = typeof fields.cwd === 'string' ? fields.cwd : undefined;
const project = typeof fields.project === 'string' ? fields.project : undefined;
if (cwd) session.cwd = cwd;
if (project) session.project = project;
}
private async handleSessionInit(session: SessionState, fields: Record<string, unknown>): Promise<void> {
const prompt = typeof fields.prompt === 'string' ? fields.prompt : '';
const cwd = session.cwd ?? process.cwd();
if (prompt) {
session.lastUserMessage = prompt;
}
await sessionInitHandler.execute({
sessionId: session.sessionId,
cwd,
prompt,
platform: 'transcript'
});
}
private async handleToolUse(session: SessionState, fields: Record<string, unknown>): Promise<void> {
const toolId = typeof fields.toolId === 'string' ? fields.toolId : undefined;
const toolName = typeof fields.toolName === 'string' ? fields.toolName : undefined;
const toolInput = this.maybeParseJson(fields.toolInput);
const toolResponse = this.maybeParseJson(fields.toolResponse);
const pending: PendingTool = { id: toolId, name: toolName, input: toolInput, response: toolResponse };
if (toolId) {
session.pendingTools.set(toolId, { name: pending.name, input: pending.input });
}
if (toolName === 'apply_patch' && typeof toolInput === 'string') {
const files = this.parseApplyPatchFiles(toolInput);
for (const filePath of files) {
await this.sendFileEdit(session, {
filePath,
edits: [{ type: 'apply_patch', patch: toolInput }]
});
}
}
if (toolResponse !== undefined && toolName) {
await this.sendObservation(session, {
toolName,
toolInput,
toolResponse
});
}
}
private async handleToolResult(session: SessionState, fields: Record<string, unknown>): Promise<void> {
const toolId = typeof fields.toolId === 'string' ? fields.toolId : undefined;
const toolName = typeof fields.toolName === 'string' ? fields.toolName : undefined;
const toolResponse = this.maybeParseJson(fields.toolResponse);
let toolInput: unknown = this.maybeParseJson(fields.toolInput);
let name = toolName;
if (toolId && session.pendingTools.has(toolId)) {
const pending = session.pendingTools.get(toolId)!;
toolInput = pending.input ?? toolInput;
name = name ?? pending.name;
session.pendingTools.delete(toolId);
}
if (name) {
await this.sendObservation(session, {
toolName: name,
toolInput,
toolResponse
});
}
}
private async sendObservation(session: SessionState, fields: Record<string, unknown>): Promise<void> {
const toolName = typeof fields.toolName === 'string' ? fields.toolName : undefined;
if (!toolName) return;
await observationHandler.execute({
sessionId: session.sessionId,
cwd: session.cwd ?? process.cwd(),
toolName,
toolInput: this.maybeParseJson(fields.toolInput),
toolResponse: this.maybeParseJson(fields.toolResponse),
platform: 'transcript'
});
}
private async sendFileEdit(session: SessionState, fields: Record<string, unknown>): Promise<void> {
const filePath = typeof fields.filePath === 'string' ? fields.filePath : undefined;
if (!filePath) return;
await fileEditHandler.execute({
sessionId: session.sessionId,
cwd: session.cwd ?? process.cwd(),
filePath,
edits: Array.isArray(fields.edits) ? fields.edits : undefined,
platform: 'transcript'
});
}
private maybeParseJson(value: unknown): unknown {
if (typeof value !== 'string') return value;
const trimmed = value.trim();
if (!trimmed) return value;
if (!(trimmed.startsWith('{') || trimmed.startsWith('['))) return value;
try {
return JSON.parse(trimmed);
} catch {
return value;
}
}
private parseApplyPatchFiles(patch: string): string[] {
const files: string[] = [];
const lines = patch.split('\n');
for (const line of lines) {
const trimmed = line.trim();
if (trimmed.startsWith('*** Update File: ')) {
files.push(trimmed.replace('*** Update File: ', '').trim());
} else if (trimmed.startsWith('*** Add File: ')) {
files.push(trimmed.replace('*** Add File: ', '').trim());
} else if (trimmed.startsWith('*** Delete File: ')) {
files.push(trimmed.replace('*** Delete File: ', '').trim());
} else if (trimmed.startsWith('*** Move to: ')) {
files.push(trimmed.replace('*** Move to: ', '').trim());
} else if (trimmed.startsWith('+++ ')) {
const path = trimmed.replace('+++ ', '').replace(/^b\//, '').trim();
if (path && path !== '/dev/null') files.push(path);
}
}
return Array.from(new Set(files));
}
private async handleSessionEnd(session: SessionState, watch: WatchTarget): Promise<void> {
await this.queueSummary(session);
await sessionCompleteHandler.execute({
sessionId: session.sessionId,
cwd: session.cwd ?? process.cwd(),
platform: 'transcript'
});
await this.updateContext(session, watch);
session.pendingTools.clear();
const key = this.getSessionKey(watch, session.sessionId);
this.sessions.delete(key);
}
private async queueSummary(session: SessionState): Promise<void> {
const workerReady = await ensureWorkerRunning();
if (!workerReady) return;
const port = getWorkerPort();
const lastAssistantMessage = session.lastAssistantMessage ?? '';
try {
await fetch(`http://127.0.0.1:${port}/api/sessions/summarize`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
contentSessionId: session.sessionId,
last_assistant_message: lastAssistantMessage
})
});
} catch (error) {
logger.warn('TRANSCRIPT', 'Summary request failed', {
error: error instanceof Error ? error.message : String(error)
});
}
}
private async updateContext(session: SessionState, watch: WatchTarget): Promise<void> {
if (!watch.context) return;
if (watch.context.mode !== 'agents') return;
const workerReady = await ensureWorkerRunning();
if (!workerReady) return;
const cwd = session.cwd ?? watch.workspace;
if (!cwd) return;
const context = getProjectContext(cwd);
const projectsParam = context.allProjects.join(',');
const port = getWorkerPort();
try {
const response = await fetch(
`http://127.0.0.1:${port}/api/context/inject?projects=${encodeURIComponent(projectsParam)}`
);
if (!response.ok) return;
const content = (await response.text()).trim();
if (!content) return;
const agentsPath = expandHomePath(watch.context.path ?? `${cwd}/AGENTS.md`);
writeAgentsMd(agentsPath, content);
logger.debug('TRANSCRIPT', 'Updated AGENTS.md context', { agentsPath, watch: watch.name });
} catch (error) {
logger.warn('TRANSCRIPT', 'Failed to update AGENTS.md context', {
error: error instanceof Error ? error.message : String(error)
});
}
}
}
+40
View File
@@ -0,0 +1,40 @@
import { existsSync, readFileSync, writeFileSync, mkdirSync } from 'fs';
import { dirname } from 'path';
import { logger } from '../../utils/logger.js';
export interface TranscriptWatchState {
offsets: Record<string, number>;
}
export function loadWatchState(statePath: string): TranscriptWatchState {
try {
if (!existsSync(statePath)) {
return { offsets: {} };
}
const raw = readFileSync(statePath, 'utf-8');
const parsed = JSON.parse(raw) as TranscriptWatchState;
if (!parsed.offsets) return { offsets: {} };
return parsed;
} catch (error) {
logger.warn('TRANSCRIPT', 'Failed to load watch state, starting fresh', {
statePath,
error: error instanceof Error ? error.message : String(error)
});
return { offsets: {} };
}
}
export function saveWatchState(statePath: string, state: TranscriptWatchState): void {
try {
const dir = dirname(statePath);
if (!existsSync(dir)) {
mkdirSync(dir, { recursive: true });
}
writeFileSync(statePath, JSON.stringify(state, null, 2));
} catch (error) {
logger.warn('TRANSCRIPT', 'Failed to save watch state', {
statePath,
error: error instanceof Error ? error.message : String(error)
});
}
}
+70
View File
@@ -0,0 +1,70 @@
export type FieldSpec =
| string
| {
path?: string;
value?: unknown;
coalesce?: FieldSpec[];
default?: unknown;
};
export interface MatchRule {
path?: string;
equals?: unknown;
in?: unknown[];
contains?: string;
exists?: boolean;
regex?: string;
}
export type EventAction =
| 'session_init'
| 'session_context'
| 'user_message'
| 'assistant_message'
| 'tool_use'
| 'tool_result'
| 'observation'
| 'file_edit'
| 'session_end';
export interface SchemaEvent {
name: string;
match?: MatchRule;
action: EventAction;
fields?: Record<string, FieldSpec>;
}
export interface TranscriptSchema {
name: string;
version?: string;
description?: string;
eventTypePath?: string;
sessionIdPath?: string;
cwdPath?: string;
projectPath?: string;
events: SchemaEvent[];
}
export interface WatchContextConfig {
mode: 'agents';
path?: string;
updateOn?: Array<'session_start' | 'session_end'>;
}
export interface WatchTarget {
name: string;
path: string;
schema: string | TranscriptSchema;
workspace?: string;
project?: string;
context?: WatchContextConfig;
rescanIntervalMs?: number;
startAtEnd?: boolean;
}
export interface TranscriptWatchConfig {
version: 1;
schemas?: Record<string, TranscriptSchema>;
watches: WatchTarget[];
stateFile?: string;
}
+224
View File
@@ -0,0 +1,224 @@
import { existsSync, statSync, watch as fsWatch, createReadStream } from 'fs';
import { basename, join } from 'path';
import { globSync } from 'glob';
import { logger } from '../../utils/logger.js';
import { expandHomePath } from './config.js';
import { loadWatchState, saveWatchState, type TranscriptWatchState } from './state.js';
import type { TranscriptWatchConfig, TranscriptSchema, WatchTarget } from './types.js';
import { TranscriptEventProcessor } from './processor.js';
interface TailState {
offset: number;
partial: string;
}
class FileTailer {
private watcher: ReturnType<typeof fsWatch> | null = null;
private tailState: TailState;
constructor(
private filePath: string,
initialOffset: number,
private onLine: (line: string) => Promise<void>,
private onOffset: (offset: number) => void
) {
this.tailState = { offset: initialOffset, partial: '' };
}
start(): void {
this.readNewData().catch(() => undefined);
this.watcher = fsWatch(this.filePath, { persistent: true }, () => {
this.readNewData().catch(() => undefined);
});
}
close(): void {
this.watcher?.close();
this.watcher = null;
}
private async readNewData(): Promise<void> {
if (!existsSync(this.filePath)) return;
let size = 0;
try {
size = statSync(this.filePath).size;
} catch {
return;
}
if (size < this.tailState.offset) {
this.tailState.offset = 0;
}
if (size === this.tailState.offset) return;
const stream = createReadStream(this.filePath, {
start: this.tailState.offset,
end: size - 1,
encoding: 'utf8'
});
let data = '';
for await (const chunk of stream) {
data += chunk as string;
}
this.tailState.offset = size;
this.onOffset(this.tailState.offset);
const combined = this.tailState.partial + data;
const lines = combined.split('\n');
this.tailState.partial = lines.pop() ?? '';
for (const line of lines) {
const trimmed = line.trim();
if (!trimmed) continue;
await this.onLine(trimmed);
}
}
}
export class TranscriptWatcher {
private processor = new TranscriptEventProcessor();
private tailers = new Map<string, FileTailer>();
private state: TranscriptWatchState;
private rescanTimers: Array<NodeJS.Timeout> = [];
constructor(private config: TranscriptWatchConfig, private statePath: string) {
this.state = loadWatchState(statePath);
}
async start(): Promise<void> {
for (const watch of this.config.watches) {
await this.setupWatch(watch);
}
}
stop(): void {
for (const tailer of this.tailers.values()) {
tailer.close();
}
this.tailers.clear();
for (const timer of this.rescanTimers) {
clearInterval(timer);
}
this.rescanTimers = [];
}
private async setupWatch(watch: WatchTarget): Promise<void> {
const schema = this.resolveSchema(watch);
if (!schema) {
logger.warn('TRANSCRIPT', 'Missing schema for watch', { watch: watch.name });
return;
}
const resolvedPath = expandHomePath(watch.path);
const files = this.resolveWatchFiles(resolvedPath);
for (const filePath of files) {
await this.addTailer(filePath, watch, schema);
}
const rescanIntervalMs = watch.rescanIntervalMs ?? 5000;
const timer = setInterval(async () => {
const newFiles = this.resolveWatchFiles(resolvedPath);
for (const filePath of newFiles) {
if (!this.tailers.has(filePath)) {
await this.addTailer(filePath, watch, schema);
}
}
}, rescanIntervalMs);
this.rescanTimers.push(timer);
}
private resolveSchema(watch: WatchTarget): TranscriptSchema | null {
if (typeof watch.schema === 'string') {
return this.config.schemas?.[watch.schema] ?? null;
}
return watch.schema;
}
private resolveWatchFiles(inputPath: string): string[] {
if (this.hasGlob(inputPath)) {
return globSync(inputPath, { nodir: true, absolute: true });
}
if (existsSync(inputPath)) {
try {
const stat = statSync(inputPath);
if (stat.isDirectory()) {
const pattern = join(inputPath, '**', '*.jsonl');
return globSync(pattern, { nodir: true, absolute: true });
}
return [inputPath];
} catch {
return [];
}
}
return [];
}
private hasGlob(inputPath: string): boolean {
return /[*?[\]{}()]/.test(inputPath);
}
private async addTailer(filePath: string, watch: WatchTarget, schema: TranscriptSchema): Promise<void> {
if (this.tailers.has(filePath)) return;
const sessionIdOverride = this.extractSessionIdFromPath(filePath);
let offset = this.state.offsets[filePath] ?? 0;
if (offset === 0 && watch.startAtEnd) {
try {
offset = statSync(filePath).size;
} catch {
offset = 0;
}
}
const tailer = new FileTailer(
filePath,
offset,
async (line: string) => {
await this.handleLine(line, watch, schema, filePath, sessionIdOverride);
},
(newOffset: number) => {
this.state.offsets[filePath] = newOffset;
saveWatchState(this.statePath, this.state);
}
);
tailer.start();
this.tailers.set(filePath, tailer);
logger.info('TRANSCRIPT', 'Watching transcript file', {
file: filePath,
watch: watch.name,
schema: schema.name
});
}
private async handleLine(
line: string,
watch: WatchTarget,
schema: TranscriptSchema,
filePath: string,
sessionIdOverride?: string | null
): Promise<void> {
try {
const entry = JSON.parse(line);
await this.processor.processEntry(entry, watch, schema, sessionIdOverride ?? undefined);
} catch (error) {
logger.debug('TRANSCRIPT', 'Failed to parse transcript line', {
watch: watch.name,
file: basename(filePath)
}, error as Error);
}
}
private extractSessionIdFromPath(filePath: string): string | null {
const match = filePath.match(/[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}/i);
return match ? match[0] : null;
}
}
+519 -81
View File
@@ -10,12 +10,54 @@
*/
import path from 'path';
import { existsSync, writeFileSync, unlinkSync, statSync } from 'fs';
import { Client } from '@modelcontextprotocol/sdk/client/index.js';
import { StdioClientTransport } from '@modelcontextprotocol/sdk/client/stdio.js';
import { getWorkerPort, getWorkerHost } from '../shared/worker-utils.js';
import { HOOK_TIMEOUTS } from '../shared/hook-constants.js';
import { SettingsDefaultsManager } from '../shared/SettingsDefaultsManager.js';
import { getAuthMethodDescription } from '../shared/EnvManager.js';
import { logger } from '../utils/logger.js';
import { ChromaServerManager } from './sync/ChromaServerManager.js';
// Windows: avoid repeated spawn popups when startup fails (issue #921)
const WINDOWS_SPAWN_COOLDOWN_MS = 2 * 60 * 1000;
function getWorkerSpawnLockPath(): string {
return path.join(SettingsDefaultsManager.get('CLAUDE_MEM_DATA_DIR'), '.worker-start-attempted');
}
function shouldSkipSpawnOnWindows(): boolean {
if (process.platform !== 'win32') return false;
const lockPath = getWorkerSpawnLockPath();
if (!existsSync(lockPath)) return false;
try {
const modifiedTimeMs = statSync(lockPath).mtimeMs;
return Date.now() - modifiedTimeMs < WINDOWS_SPAWN_COOLDOWN_MS;
} catch {
return false;
}
}
function markWorkerSpawnAttempted(): void {
if (process.platform !== 'win32') return;
try {
writeFileSync(getWorkerSpawnLockPath(), '', 'utf-8');
} catch {
// Best-effort lock file — failure to write shouldn't block startup
}
}
function clearWorkerSpawnAttempted(): void {
if (process.platform !== 'win32') return;
try {
const lockPath = getWorkerSpawnLockPath();
if (existsSync(lockPath)) unlinkSync(lockPath);
} catch {
// Best-effort cleanup
}
}
// Version injected at build time by esbuild define
declare const __DEFAULT_PACKAGE_VERSION__: string;
const packageVersion = typeof __DEFAULT_PACKAGE_VERSION__ !== 'undefined' ? __DEFAULT_PACKAGE_VERSION__ : '0.0.0-dev';
@@ -27,6 +69,7 @@ import {
removePidFile,
getPlatformTimeout,
cleanupOrphanedProcesses,
cleanStalePidFile,
spawnDaemon,
createSignalHandler
} from './infrastructure/ProcessManager.js';
@@ -53,8 +96,8 @@ import { DatabaseManager } from './worker/DatabaseManager.js';
import { SessionManager } from './worker/SessionManager.js';
import { SSEBroadcaster } from './worker/SSEBroadcaster.js';
import { SDKAgent } from './worker/SDKAgent.js';
import { GeminiAgent } from './worker/GeminiAgent.js';
import { OpenRouterAgent } from './worker/OpenRouterAgent.js';
import { GeminiAgent, isGeminiSelected, isGeminiAvailable } from './worker/GeminiAgent.js';
import { OpenRouterAgent, isOpenRouterSelected, isOpenRouterAvailable } from './worker/OpenRouterAgent.js';
import { PaginationHelper } from './worker/PaginationHelper.js';
import { SettingsManager } from './worker/SettingsManager.js';
import { SearchManager } from './worker/SearchManager.js';
@@ -69,6 +112,7 @@ import { DataRoutes } from './worker/http/routes/DataRoutes.js';
import { SearchRoutes } from './worker/http/routes/SearchRoutes.js';
import { SettingsRoutes } from './worker/http/routes/SettingsRoutes.js';
import { LogsRoutes } from './worker/http/routes/LogsRoutes.js';
import { MemoryRoutes } from './worker/http/routes/MemoryRoutes.js';
// Process management for zombie cleanup (Issue #737)
import { startOrphanReaper, reapOrphanedProcesses } from './worker/ProcessRegistry.js';
@@ -131,6 +175,14 @@ export class WorkerService {
// Orphan reaper cleanup function (Issue #737)
private stopOrphanReaper: (() => void) | null = null;
// AI interaction tracking for health endpoint
private lastAiInteraction: {
timestamp: number;
success: boolean;
provider: string;
error?: string;
} | null = null;
constructor() {
// Initialize the promise that will resolve when background initialization completes
this.initializationComplete = new Promise((resolve) => {
@@ -154,6 +206,7 @@ export class WorkerService {
this.broadcastProcessingStatus();
});
// Initialize MCP client
// Empty capabilities object: this client only calls tools, doesn't expose any
this.mcpClient = new Client({
@@ -166,7 +219,24 @@ export class WorkerService {
getInitializationComplete: () => this.initializationCompleteFlag,
getMcpReady: () => this.mcpReady,
onShutdown: () => this.shutdown(),
onRestart: () => this.shutdown()
onRestart: () => this.shutdown(),
workerPath: __filename,
getAiStatus: () => {
let provider = 'claude';
if (isOpenRouterSelected() && isOpenRouterAvailable()) provider = 'openrouter';
else if (isGeminiSelected() && isGeminiAvailable()) provider = 'gemini';
return {
provider,
authMethod: getAuthMethodDescription(),
lastInteraction: this.lastAiInteraction
? {
timestamp: this.lastAiInteraction.timestamp,
success: this.lastAiInteraction.success,
...(this.lastAiInteraction.error && { error: this.lastAiInteraction.error }),
}
: null,
};
},
});
// Register route handlers
@@ -191,35 +261,74 @@ export class WorkerService {
this.isShuttingDown = shutdownRef.value;
handler('SIGINT');
});
// SIGHUP: sent by kernel when controlling terminal closes.
// Daemon mode: ignore it (survive parent shell exit).
// Interactive mode: treat like SIGTERM (graceful shutdown).
if (process.platform !== 'win32') {
if (process.argv.includes('--daemon')) {
process.on('SIGHUP', () => {
logger.debug('SYSTEM', 'Ignoring SIGHUP in daemon mode');
});
} else {
process.on('SIGHUP', () => {
this.isShuttingDown = shutdownRef.value;
handler('SIGHUP');
});
}
}
}
/**
* Register all route handlers with the server
*/
private registerRoutes(): void {
// Standard routes
this.server.registerRoutes(new ViewerRoutes(this.sseBroadcaster, this.dbManager, this.sessionManager));
this.server.registerRoutes(new SessionRoutes(this.sessionManager, this.dbManager, this.sdkAgent, this.geminiAgent, this.openRouterAgent, this.sessionEventBroadcaster, this));
this.server.registerRoutes(new DataRoutes(this.paginationHelper, this.dbManager, this.sessionManager, this.sseBroadcaster, this, this.startTime));
this.server.registerRoutes(new SettingsRoutes(this.settingsManager));
this.server.registerRoutes(new LogsRoutes());
// IMPORTANT: Middleware must be registered BEFORE routes (Express processes in order)
// Early handler for /api/context/inject to avoid 404 during startup
// Early handler for /api/context/inject — fail open if not yet initialized
this.server.app.get('/api/context/inject', async (req, res, next) => {
const timeoutMs = 300000; // 5 minute timeout for slow systems
const timeoutPromise = new Promise((_, reject) =>
setTimeout(() => reject(new Error('Initialization timeout')), timeoutMs)
);
await Promise.race([this.initializationComplete, timeoutPromise]);
if (!this.searchRoutes) {
res.status(503).json({ error: 'Search routes not initialized' });
if (!this.initializationCompleteFlag || !this.searchRoutes) {
logger.warn('SYSTEM', 'Context requested before initialization complete, returning empty');
res.status(200).json({ content: [{ type: 'text', text: '' }] });
return;
}
next(); // Delegate to SearchRoutes handler
});
// Guard ALL /api/* routes during initialization — wait for DB with timeout
// Exceptions: /api/health, /api/readiness, /api/version (handled by Server.ts core routes)
// and /api/context/inject (handled above with fail-open)
this.server.app.use('/api', async (req, res, next) => {
if (this.initializationCompleteFlag) {
next();
return;
}
const timeoutMs = 30000;
const timeoutPromise = new Promise<void>((_, reject) =>
setTimeout(() => reject(new Error('Database initialization timeout')), timeoutMs)
);
try {
await Promise.race([this.initializationComplete, timeoutPromise]);
next();
} catch (error) {
logger.error('HTTP', `Request to ${req.method} ${req.path} rejected — DB not initialized`, {}, error as Error);
res.status(503).json({
error: 'Service initializing',
message: 'Database is still initializing, please retry'
});
}
});
// Standard routes (registered AFTER guard middleware)
this.server.registerRoutes(new ViewerRoutes(this.sseBroadcaster, this.dbManager, this.sessionManager));
this.server.registerRoutes(new SessionRoutes(this.sessionManager, this.dbManager, this.sdkAgent, this.geminiAgent, this.openRouterAgent, this.sessionEventBroadcaster, this));
this.server.registerRoutes(new DataRoutes(this.paginationHelper, this.dbManager, this.sessionManager, this.sseBroadcaster, this, this.startTime));
this.server.registerRoutes(new SettingsRoutes(this.settingsManager));
this.server.registerRoutes(new LogsRoutes());
this.server.registerRoutes(new MemoryRoutes(this.dbManager, 'claude-mem'));
}
/**
@@ -293,13 +402,12 @@ export class WorkerService {
await this.dbManager.initialize();
// Recover stuck messages from previous crashes
// Reset any messages that were processing when worker died
const { PendingMessageStore } = await import('./sqlite/PendingMessageStore.js');
const pendingStore = new PendingMessageStore(this.dbManager.getSessionStore().db, 3);
const STUCK_THRESHOLD_MS = 5 * 60 * 1000;
const resetCount = pendingStore.resetStuckMessages(STUCK_THRESHOLD_MS);
const resetCount = pendingStore.resetStaleProcessingMessages(0); // 0 = reset ALL processing
if (resetCount > 0) {
logger.info('SYSTEM', `Recovered ${resetCount} stuck messages from previous session`, { thresholdMinutes: 5 });
logger.info('SYSTEM', `Reset ${resetCount} stale processing messages to pending`);
}
// Initialize search services
@@ -366,8 +474,24 @@ export class WorkerService {
}
}
/**
* Get the appropriate agent based on provider settings.
* Same logic as SessionRoutes.getActiveAgent() for consistency.
*/
private getActiveAgent(): SDKAgent | GeminiAgent | OpenRouterAgent {
if (isOpenRouterSelected() && isOpenRouterAvailable()) {
return this.openRouterAgent;
}
if (isGeminiSelected() && isGeminiAvailable()) {
return this.geminiAgent;
}
return this.sdkAgent;
}
/**
* Start a session processor
* On SDK resume failure (terminated session), falls back to Gemini/OpenRouter if available,
* otherwise marks messages abandoned and removes session so queue does not grow unbounded.
*/
private startSessionProcessor(
session: ReturnType<typeof this.sessionManager.getSession>,
@@ -376,21 +500,203 @@ export class WorkerService {
if (!session) return;
const sid = session.sessionDbId;
logger.info('SYSTEM', `Starting generator (${source})`, { sessionId: sid });
const agent = this.getActiveAgent();
const providerName = agent.constructor.name;
session.generatorPromise = this.sdkAgent.startSession(session, this)
.catch(error => {
// Before starting generator, check if AbortController is already aborted
// This can happen after a previous generator was aborted but the session still has pending work
if (session.abortController.signal.aborted) {
logger.debug('SYSTEM', 'Replacing aborted AbortController before starting generator', {
sessionId: session.sessionDbId
});
session.abortController = new AbortController();
}
// Track whether generator failed with an unrecoverable error to prevent infinite restart loops
let hadUnrecoverableError = false;
let sessionFailed = false;
logger.info('SYSTEM', `Starting generator (${source}) using ${providerName}`, { sessionId: sid });
session.generatorPromise = agent.startSession(session, this)
.catch(async (error: unknown) => {
const errorMessage = (error as Error)?.message || '';
// Detect unrecoverable errors that should NOT trigger restart
// These errors will fail immediately on retry, causing infinite loops
const unrecoverablePatterns = [
'Claude executable not found',
'CLAUDE_CODE_PATH',
'ENOENT',
'spawn',
'Invalid API key',
];
if (unrecoverablePatterns.some(pattern => errorMessage.includes(pattern))) {
hadUnrecoverableError = true;
this.lastAiInteraction = {
timestamp: Date.now(),
success: false,
provider: providerName,
error: errorMessage,
};
logger.error('SDK', 'Unrecoverable generator error - will NOT restart', {
sessionId: session.sessionDbId,
project: session.project,
errorMessage
});
return;
}
// Fallback for terminated SDK sessions (provider abstraction)
if (this.isSessionTerminatedError(error)) {
logger.warn('SDK', 'SDK resume failed, falling back to standalone processing', {
sessionId: session.sessionDbId,
project: session.project,
reason: error instanceof Error ? error.message : String(error)
});
return this.runFallbackForTerminatedSession(session, error);
}
// Detect stale resume failures - SDK session context was lost
if ((errorMessage.includes('aborted by user') || errorMessage.includes('No conversation found'))
&& session.memorySessionId) {
logger.warn('SDK', 'Detected stale resume failure, clearing memorySessionId for fresh start', {
sessionId: session.sessionDbId,
memorySessionId: session.memorySessionId,
errorMessage
});
// Clear stale memorySessionId and force fresh init on next attempt
this.dbManager.getSessionStore().updateMemorySessionId(session.sessionDbId, null);
session.memorySessionId = null;
session.forceInit = true;
}
logger.error('SDK', 'Session generator failed', {
sessionId: session.sessionDbId,
project: session.project
project: session.project,
provider: providerName
}, error as Error);
sessionFailed = true;
this.lastAiInteraction = {
timestamp: Date.now(),
success: false,
provider: providerName,
error: errorMessage,
};
throw error;
})
.finally(() => {
session.generatorPromise = null;
// Record successful AI interaction if no error occurred
if (!sessionFailed && !hadUnrecoverableError) {
this.lastAiInteraction = {
timestamp: Date.now(),
success: true,
provider: providerName,
};
}
// Do NOT restart after unrecoverable errors - prevents infinite loops
if (hadUnrecoverableError) {
logger.warn('SYSTEM', 'Skipping restart due to unrecoverable error', {
sessionId: session.sessionDbId
});
this.broadcastProcessingStatus();
return;
}
// Check if there's pending work that needs processing with a fresh AbortController
const { PendingMessageStore } = require('./sqlite/PendingMessageStore.js');
const pendingStore = new PendingMessageStore(this.dbManager.getSessionStore().db, 3);
const pendingCount = pendingStore.getPendingCount(session.sessionDbId);
if (pendingCount > 0) {
logger.info('SYSTEM', 'Pending work remains after generator exit, restarting with fresh AbortController', {
sessionId: session.sessionDbId,
pendingCount
});
// Reset AbortController for restart
session.abortController = new AbortController();
// Restart processor
this.startSessionProcessor(session, 'pending-work-restart');
}
this.broadcastProcessingStatus();
});
}
/**
* Match errors that indicate the Claude Code process/session is gone (resume impossible).
* Used to trigger graceful fallback instead of leaving pending messages stuck forever.
*/
private isSessionTerminatedError(error: unknown): boolean {
const msg = error instanceof Error ? error.message : String(error);
const normalized = msg.toLowerCase();
return (
normalized.includes('process aborted by user') ||
normalized.includes('processtransport') ||
normalized.includes('not ready for writing') ||
normalized.includes('session generator failed') ||
normalized.includes('claude code process')
);
}
/**
* When SDK resume fails due to terminated session: try Gemini then OpenRouter to drain
* pending messages; if no fallback available, mark messages abandoned and remove session.
*/
private async runFallbackForTerminatedSession(
session: ReturnType<typeof this.sessionManager.getSession>,
_originalError: unknown
): Promise<void> {
if (!session) return;
const sessionDbId = session.sessionDbId;
// Fallback agents need memorySessionId for storeObservations
if (!session.memorySessionId) {
const syntheticId = `fallback-${sessionDbId}-${Date.now()}`;
session.memorySessionId = syntheticId;
this.dbManager.getSessionStore().updateMemorySessionId(sessionDbId, syntheticId);
}
if (isGeminiAvailable()) {
try {
await this.geminiAgent.startSession(session, this);
return;
} catch (e) {
logger.warn('SDK', 'Fallback Gemini failed, trying OpenRouter', {
sessionId: sessionDbId,
error: e instanceof Error ? e.message : String(e)
});
}
}
if (isOpenRouterAvailable()) {
try {
await this.openRouterAgent.startSession(session, this);
return;
} catch (e) {
logger.warn('SDK', 'Fallback OpenRouter failed', {
sessionId: sessionDbId,
error: e instanceof Error ? e.message : String(e)
});
}
}
// No fallback or both failed: mark messages abandoned and remove session so queue doesn't grow
const pendingStore = this.sessionManager.getPendingMessageStore();
const abandoned = pendingStore.markAllSessionMessagesAbandoned(sessionDbId);
if (abandoned > 0) {
logger.warn('SDK', 'No fallback available; marked pending messages abandoned', {
sessionId: sessionDbId,
abandoned
});
}
this.sessionManager.removeSessionImmediate(sessionDbId);
this.sessionEventBroadcaster.broadcastSessionCompleted(sessionDbId);
}
/**
* Process pending session queues
*/
@@ -402,6 +708,46 @@ export class WorkerService {
}> {
const { PendingMessageStore } = await import('./sqlite/PendingMessageStore.js');
const pendingStore = new PendingMessageStore(this.dbManager.getSessionStore().db, 3);
const sessionStore = this.dbManager.getSessionStore();
// Clean up stale 'active' sessions before processing
// Sessions older than 6 hours without activity are likely orphaned
const STALE_SESSION_THRESHOLD_MS = 6 * 60 * 60 * 1000;
const staleThreshold = Date.now() - STALE_SESSION_THRESHOLD_MS;
try {
const staleSessionIds = sessionStore.db.prepare(`
SELECT id FROM sdk_sessions
WHERE status = 'active' AND started_at_epoch < ?
`).all(staleThreshold) as { id: number }[];
if (staleSessionIds.length > 0) {
const ids = staleSessionIds.map(r => r.id);
const placeholders = ids.map(() => '?').join(',');
sessionStore.db.prepare(`
UPDATE sdk_sessions
SET status = 'failed', completed_at_epoch = ?
WHERE id IN (${placeholders})
`).run(Date.now(), ...ids);
logger.info('SYSTEM', `Marked ${ids.length} stale sessions as failed`);
const msgResult = sessionStore.db.prepare(`
UPDATE pending_messages
SET status = 'failed', failed_at_epoch = ?
WHERE status = 'pending'
AND session_db_id IN (${placeholders})
`).run(Date.now(), ...ids);
if (msgResult.changes > 0) {
logger.info('SYSTEM', `Marked ${msgResult.changes} pending messages from stale sessions as failed`);
}
}
} catch (error) {
logger.error('SYSTEM', 'Failed to clean up stale sessions', {}, error as Error);
}
const orphanedSessionIds = pendingStore.getSessionsWithPendingMessages();
const result = {
@@ -486,6 +832,86 @@ export class WorkerService {
}
}
// ============================================================================
// Reusable Worker Startup Logic
// ============================================================================
/**
* Ensures the worker is started and healthy.
* This function can be called by both 'start' and 'hook' commands.
*
* @param port - The port the worker should run on
* @returns true if worker is healthy (existing or newly started), false on failure
*/
async function ensureWorkerStarted(port: number): Promise<boolean> {
// Clean stale PID file first (cheap: 1 fs read + 1 signal-0 check)
cleanStalePidFile();
// Check if worker is already running and healthy
if (await waitForHealth(port, 1000)) {
const versionCheck = await checkVersionMatch(port);
if (!versionCheck.matches) {
logger.info('SYSTEM', 'Worker version mismatch detected - auto-restarting', {
pluginVersion: versionCheck.pluginVersion,
workerVersion: versionCheck.workerVersion
});
await httpShutdown(port);
const freed = await waitForPortFree(port, getPlatformTimeout(HOOK_TIMEOUTS.PORT_IN_USE_WAIT));
if (!freed) {
logger.error('SYSTEM', 'Port did not free up after shutdown for version mismatch restart', { port });
return false;
}
removePidFile();
} else {
logger.info('SYSTEM', 'Worker already running and healthy');
return true;
}
}
// Check if port is in use by something else
const portInUse = await isPortInUse(port);
if (portInUse) {
logger.info('SYSTEM', 'Port in use, waiting for worker to become healthy');
const healthy = await waitForHealth(port, getPlatformTimeout(HOOK_TIMEOUTS.PORT_IN_USE_WAIT));
if (healthy) {
logger.info('SYSTEM', 'Worker is now healthy');
return true;
}
logger.error('SYSTEM', 'Port in use but worker not responding to health checks');
return false;
}
// Windows: skip spawn if a recent attempt already failed (prevents repeated bun.exe popups, issue #921)
if (shouldSkipSpawnOnWindows()) {
logger.warn('SYSTEM', 'Worker unavailable on Windows — skipping spawn (recent attempt failed within cooldown)');
return false;
}
// Spawn new worker daemon
logger.info('SYSTEM', 'Starting worker daemon');
markWorkerSpawnAttempted();
const pid = spawnDaemon(__filename, port);
if (pid === undefined) {
logger.error('SYSTEM', 'Failed to spawn worker daemon');
return false;
}
// PID file is written by the worker itself after listen() succeeds
// This is race-free and works correctly on Windows where cmd.exe PID is useless
const healthy = await waitForHealth(port, getPlatformTimeout(HOOK_TIMEOUTS.POST_SPAWN_WAIT));
if (!healthy) {
removePidFile();
logger.error('SYSTEM', 'Worker failed to start (health check timeout)');
return false;
}
clearWorkerSpawnAttempted();
logger.info('SYSTEM', 'Worker started successfully');
return true;
}
// ============================================================================
// CLI Entry Point
// ============================================================================
@@ -504,58 +930,12 @@ async function main() {
switch (command) {
case 'start': {
if (await waitForHealth(port, 1000)) {
const versionCheck = await checkVersionMatch(port);
if (!versionCheck.matches) {
logger.info('SYSTEM', 'Worker version mismatch detected - auto-restarting', {
pluginVersion: versionCheck.pluginVersion,
workerVersion: versionCheck.workerVersion
});
await httpShutdown(port);
const freed = await waitForPortFree(port, getPlatformTimeout(15000));
if (!freed) {
logger.error('SYSTEM', 'Port did not free up after shutdown for version mismatch restart', { port });
exitWithStatus('error', 'Port did not free after version mismatch restart');
}
removePidFile();
} else {
logger.info('SYSTEM', 'Worker already running and healthy');
exitWithStatus('ready');
}
const success = await ensureWorkerStarted(port);
if (success) {
exitWithStatus('ready');
} else {
exitWithStatus('error', 'Failed to start worker');
}
const portInUse = await isPortInUse(port);
if (portInUse) {
logger.info('SYSTEM', 'Port in use, waiting for worker to become healthy');
const healthy = await waitForHealth(port, getPlatformTimeout(15000));
if (healthy) {
logger.info('SYSTEM', 'Worker is now healthy');
exitWithStatus('ready');
}
logger.error('SYSTEM', 'Port in use but worker not responding to health checks');
exitWithStatus('error', 'Port in use but worker not responding');
}
logger.info('SYSTEM', 'Starting worker daemon');
const pid = spawnDaemon(__filename, port);
if (pid === undefined) {
logger.error('SYSTEM', 'Failed to spawn worker daemon');
exitWithStatus('error', 'Failed to spawn worker daemon');
}
// PID file is written by the worker itself after listen() succeeds
// This is race-free and works correctly on Windows where cmd.exe PID is useless
const healthy = await waitForHealth(port, getPlatformTimeout(30000));
if (!healthy) {
removePidFile();
logger.error('SYSTEM', 'Worker failed to start (health check timeout)');
exitWithStatus('error', 'Worker failed to start (health check timeout)');
}
logger.info('SYSTEM', 'Worker started successfully');
exitWithStatus('ready');
}
case 'stop': {
@@ -592,7 +972,7 @@ async function main() {
// PID file is written by the worker itself after listen() succeeds
// This is race-free and works correctly on Windows where cmd.exe PID is useless
const healthy = await waitForHealth(port, getPlatformTimeout(30000));
const healthy = await waitForHealth(port, getPlatformTimeout(HOOK_TIMEOUTS.POST_SPAWN_WAIT));
if (!healthy) {
removePidFile();
logger.error('SYSTEM', 'Worker failed to restart');
@@ -626,21 +1006,79 @@ async function main() {
}
case 'hook': {
// Auto-start worker if not running
const workerReady = await ensureWorkerStarted(port);
if (!workerReady) {
logger.warn('SYSTEM', 'Worker failed to start before hook, handler will retry');
}
// Existing logic unchanged
const platform = process.argv[3];
const event = process.argv[4];
if (!platform || !event) {
console.error('Usage: claude-mem hook <platform> <event>');
console.error('Platforms: claude-code, cursor, raw');
console.error('Events: context, session-init, observation, summarize, user-message');
console.error('Events: context, session-init, observation, summarize, session-complete');
process.exit(1);
}
// Check if worker is already running on port
const portInUse = await isPortInUse(port);
let startedWorkerInProcess = false;
if (!portInUse) {
// Port free - start worker IN THIS PROCESS (no spawn!)
// This process becomes the worker and stays alive
try {
logger.info('SYSTEM', 'Starting worker in-process for hook', { event });
const worker = new WorkerService();
await worker.start();
startedWorkerInProcess = true;
// Worker is now running in this process on the port
} catch (error) {
logger.failure('SYSTEM', 'Worker failed to start in hook', {}, error as Error);
removePidFile();
process.exit(0);
}
}
// If port in use, we'll use HTTP to the existing worker
const { hookCommand } = await import('../cli/hook-command.js');
await hookCommand(platform, event);
// If we started the worker in this process, skip process.exit() so we stay alive as the worker
await hookCommand(platform, event, { skipExit: startedWorkerInProcess });
// Note: if we started worker in-process, this process stays alive as the worker
// The break allows the event loop to continue serving requests
break;
}
case 'generate': {
const dryRun = process.argv.includes('--dry-run');
const { generateClaudeMd } = await import('../cli/claude-md-commands.js');
const result = await generateClaudeMd(dryRun);
process.exit(result);
}
case 'clean': {
const dryRun = process.argv.includes('--dry-run');
const { cleanClaudeMd } = await import('../cli/claude-md-commands.js');
const result = await cleanClaudeMd(dryRun);
process.exit(result);
}
case '--daemon':
default: {
// Prevent daemon from dying silently on unhandled errors.
// The HTTP server can continue serving even if a background task throws.
process.on('unhandledRejection', (reason) => {
logger.error('SYSTEM', 'Unhandled rejection in daemon', {
reason: reason instanceof Error ? reason.message : String(reason)
});
});
process.on('uncaughtException', (error) => {
logger.error('SYSTEM', 'Uncaught exception in daemon', {}, error as Error);
// Don't exit — keep the HTTP server running
});
const worker = new WorkerService();
worker.start().catch((error) => {
logger.failure('SYSTEM', 'Worker failed to start', {}, error as Error);
+5
View File
@@ -33,6 +33,11 @@ export interface ActiveSession {
earliestPendingTimestamp: number | null; // Original timestamp of earliest pending message (for accurate observation timestamps)
conversationHistory: ConversationMessage[]; // Shared conversation history for provider switching
currentProvider: 'claude' | 'gemini' | 'openrouter' | null; // Track which provider is currently running
consecutiveRestarts: number; // Track consecutive restart attempts to prevent infinite loops
forceInit?: boolean; // Force fresh SDK session (skip resume)
// CLAIM-CONFIRM FIX: Track IDs of messages currently being processed
// These IDs will be confirmed (deleted) after successful storage
processingMessageIds: number[];
}
export interface PendingMessage {
+3 -2
View File
@@ -7,11 +7,12 @@
import { execSync, spawnSync } from 'child_process';
import { existsSync, unlinkSync } from 'fs';
import { homedir } from 'os';
import { join } from 'path';
import { logger } from '../../utils/logger.js';
import { MARKETPLACE_ROOT } from '../../shared/paths.js';
const INSTALLED_PLUGIN_PATH = join(homedir(), '.claude', 'plugins', 'marketplaces', 'thedotmack');
// Alias for code clarity - this is the installed plugin path
const INSTALLED_PLUGIN_PATH = MARKETPLACE_ROOT;
/**
* Validate branch name to prevent command injection
-2
View File
@@ -1,8 +1,6 @@
<claude-mem-context>
# Recent Activity
<!-- This section is auto-generated by claude-mem. Edit content outside the tags. -->
### Dec 10, 2025
| ID | Time | T | Title | Read |
+33 -6
View File
@@ -17,6 +17,7 @@ import { SessionManager } from './SessionManager.js';
import { logger } from '../../utils/logger.js';
import { buildInitPrompt, buildObservationPrompt, buildSummaryPrompt, buildContinuationPrompt } from '../../sdk/prompts.js';
import { SettingsDefaultsManager } from '../../shared/SettingsDefaultsManager.js';
import { getCredential } from '../../shared/EnvManager.js';
import type { ActiveSession, ConversationMessage } from '../worker-types.js';
import { ModeManager } from '../domain/ModeManager.js';
import {
@@ -37,7 +38,7 @@ export type GeminiModel =
| 'gemini-2.5-pro'
| 'gemini-2.0-flash'
| 'gemini-2.0-flash-lite'
| 'gemini-3-flash';
| 'gemini-3-flash-preview';
// Free tier RPM limits by model (requests per minute)
const GEMINI_RPM_LIMITS: Record<GeminiModel, number> = {
@@ -46,7 +47,7 @@ const GEMINI_RPM_LIMITS: Record<GeminiModel, number> = {
'gemini-2.5-pro': 5,
'gemini-2.0-flash': 15,
'gemini-2.0-flash-lite': 30,
'gemini-3-flash': 5,
'gemini-3-flash-preview': 5,
};
// Track last request time for rate limiting
@@ -133,6 +134,14 @@ export class GeminiAgent {
throw new Error('Gemini API key not configured. Set CLAUDE_MEM_GEMINI_API_KEY in settings or GEMINI_API_KEY environment variable.');
}
// Generate synthetic memorySessionId (Gemini is stateless, doesn't return session IDs)
if (!session.memorySessionId) {
const syntheticMemorySessionId = `gemini-${session.contentSessionId}-${Date.now()}`;
session.memorySessionId = syntheticMemorySessionId;
this.dbManager.getSessionStore().updateMemorySessionId(session.sessionDbId, syntheticMemorySessionId);
logger.info('SESSION', `MEMORY_ID_GENERATED | sessionDbId=${session.sessionDbId} | provider=Gemini`);
}
// Load active mode
const mode = ModeManager.getInstance().getActiveMode();
@@ -177,6 +186,10 @@ export class GeminiAgent {
let lastCwd: string | undefined;
for await (const message of this.sessionManager.getMessageIterator(session.sessionDbId)) {
// CLAIM-CONFIRM: Track message ID for confirmProcessed() after successful storage
// The message is now in 'processing' status in DB until ResponseProcessor calls confirmProcessed()
session.processingMessageIds.push(message._persistentId);
// Capture cwd from each message for worktree support
if (message.cwd) {
lastCwd = message.cwd;
@@ -191,6 +204,12 @@ export class GeminiAgent {
session.lastPromptNumber = message.prompt_number;
}
// CRITICAL: Check memorySessionId BEFORE making expensive LLM call
// This prevents wasting tokens when we won't be able to store the result anyway
if (!session.memorySessionId) {
throw new Error('Cannot process observations: memorySessionId not yet captured. This session may need to be reinitialized.');
}
// Build observation prompt
const obsPrompt = buildObservationPrompt({
id: 0,
@@ -229,6 +248,11 @@ export class GeminiAgent {
);
} else if (message.type === 'summarize') {
// CRITICAL: Check memorySessionId BEFORE making expensive LLM call
if (!session.memorySessionId) {
throw new Error('Cannot process summary: memorySessionId not yet captured. This session may need to be reinitialized.');
}
// Build summary prompt
const summaryPrompt = buildSummaryPrompt({
id: session.sessionDbId,
@@ -367,13 +391,15 @@ export class GeminiAgent {
/**
* Get Gemini configuration from settings or environment
* Issue #733: Uses centralized ~/.claude-mem/.env for credentials, not random project .env files
*/
private getGeminiConfig(): { apiKey: string; model: GeminiModel; rateLimitingEnabled: boolean } {
const settingsPath = path.join(homedir(), '.claude-mem', 'settings.json');
const settings = SettingsDefaultsManager.loadFromFile(settingsPath);
// API key: check settings first, then environment variable
const apiKey = settings.CLAUDE_MEM_GEMINI_API_KEY || process.env.GEMINI_API_KEY || '';
// API key: check settings first, then centralized claude-mem .env (NOT process.env)
// This prevents Issue #733 where random project .env files could interfere
const apiKey = settings.CLAUDE_MEM_GEMINI_API_KEY || getCredential('GEMINI_API_KEY') || '';
// Model: from settings or default, with validation
const defaultModel: GeminiModel = 'gemini-2.5-flash';
@@ -384,7 +410,7 @@ export class GeminiAgent {
'gemini-2.5-pro',
'gemini-2.0-flash',
'gemini-2.0-flash-lite',
'gemini-3-flash',
'gemini-3-flash-preview',
];
let model: GeminiModel;
@@ -407,11 +433,12 @@ export class GeminiAgent {
/**
* Check if Gemini is available (has API key configured)
* Issue #733: Uses centralized ~/.claude-mem/.env, not random project .env files
*/
export function isGeminiAvailable(): boolean {
const settingsPath = path.join(homedir(), '.claude-mem', 'settings.json');
const settings = SettingsDefaultsManager.loadFromFile(settingsPath);
return !!(settings.CLAUDE_MEM_GEMINI_API_KEY || process.env.GEMINI_API_KEY);
return !!(settings.CLAUDE_MEM_GEMINI_API_KEY || getCredential('GEMINI_API_KEY'));
}
/**
+30 -3
View File
@@ -17,6 +17,7 @@ import { logger } from '../../utils/logger.js';
import { buildInitPrompt, buildObservationPrompt, buildSummaryPrompt, buildContinuationPrompt } from '../../sdk/prompts.js';
import { SettingsDefaultsManager } from '../../shared/SettingsDefaultsManager.js';
import { USER_SETTINGS_PATH } from '../../shared/paths.js';
import { getCredential } from '../../shared/EnvManager.js';
import type { ActiveSession, ConversationMessage } from '../worker-types.js';
import { ModeManager } from '../domain/ModeManager.js';
import {
@@ -91,6 +92,14 @@ export class OpenRouterAgent {
throw new Error('OpenRouter API key not configured. Set CLAUDE_MEM_OPENROUTER_API_KEY in settings or OPENROUTER_API_KEY environment variable.');
}
// Generate synthetic memorySessionId (OpenRouter is stateless, doesn't return session IDs)
if (!session.memorySessionId) {
const syntheticMemorySessionId = `openrouter-${session.contentSessionId}-${Date.now()}`;
session.memorySessionId = syntheticMemorySessionId;
this.dbManager.getSessionStore().updateMemorySessionId(session.sessionDbId, syntheticMemorySessionId);
logger.info('SESSION', `MEMORY_ID_GENERATED | sessionDbId=${session.sessionDbId} | provider=OpenRouter`);
}
// Load active mode
const mode = ModeManager.getInstance().getActiveMode();
@@ -136,6 +145,10 @@ export class OpenRouterAgent {
// Process pending messages
for await (const message of this.sessionManager.getMessageIterator(session.sessionDbId)) {
// CLAIM-CONFIRM: Track message ID for confirmProcessed() after successful storage
// The message is now in 'processing' status in DB until ResponseProcessor calls confirmProcessed()
session.processingMessageIds.push(message._persistentId);
// Capture cwd from messages for proper worktree support
if (message.cwd) {
lastCwd = message.cwd;
@@ -149,6 +162,12 @@ export class OpenRouterAgent {
session.lastPromptNumber = message.prompt_number;
}
// CRITICAL: Check memorySessionId BEFORE making expensive LLM call
// This prevents wasting tokens when we won't be able to store the result anyway
if (!session.memorySessionId) {
throw new Error('Cannot process observations: memorySessionId not yet captured. This session may need to be reinitialized.');
}
// Build observation prompt
const obsPrompt = buildObservationPrompt({
id: 0,
@@ -187,6 +206,11 @@ export class OpenRouterAgent {
);
} else if (message.type === 'summarize') {
// CRITICAL: Check memorySessionId BEFORE making expensive LLM call
if (!session.memorySessionId) {
throw new Error('Cannot process summary: memorySessionId not yet captured. This session may need to be reinitialized.');
}
// Build summary prompt
const summaryPrompt = buildSummaryPrompt({
id: session.sessionDbId,
@@ -409,13 +433,15 @@ export class OpenRouterAgent {
/**
* Get OpenRouter configuration from settings or environment
* Issue #733: Uses centralized ~/.claude-mem/.env for credentials, not random project .env files
*/
private getOpenRouterConfig(): { apiKey: string; model: string; siteUrl?: string; appName?: string } {
const settingsPath = USER_SETTINGS_PATH;
const settings = SettingsDefaultsManager.loadFromFile(settingsPath);
// API key: check settings first, then environment variable
const apiKey = settings.CLAUDE_MEM_OPENROUTER_API_KEY || process.env.OPENROUTER_API_KEY || '';
// API key: check settings first, then centralized claude-mem .env (NOT process.env)
// This prevents Issue #733 where random project .env files could interfere
const apiKey = settings.CLAUDE_MEM_OPENROUTER_API_KEY || getCredential('OPENROUTER_API_KEY') || '';
// Model: from settings or default
const model = settings.CLAUDE_MEM_OPENROUTER_MODEL || 'xiaomi/mimo-v2-flash:free';
@@ -430,11 +456,12 @@ export class OpenRouterAgent {
/**
* Check if OpenRouter is available (has API key configured)
* Issue #733: Uses centralized ~/.claude-mem/.env, not random project .env files
*/
export function isOpenRouterAvailable(): boolean {
const settingsPath = USER_SETTINGS_PATH;
const settings = SettingsDefaultsManager.loadFromFile(settingsPath);
return !!(settings.CLAUDE_MEM_OPENROUTER_API_KEY || process.env.OPENROUTER_API_KEY);
return !!(settings.CLAUDE_MEM_OPENROUTER_API_KEY || getCredential('OPENROUTER_API_KEY'));
}
/**
+86
View File
@@ -121,6 +121,86 @@ export async function ensureProcessExit(tracked: TrackedProcess, timeoutMs: numb
unregisterProcess(pid);
}
/**
* Kill idle daemon children (claude processes spawned by worker-service)
*
* These are SDK-spawned claude processes that completed their work but
* didn't terminate properly. They remain as children of the worker-service
* daemon, consuming memory without doing useful work.
*
* Criteria for cleanup:
* - Process name is "claude"
* - Parent PID is the worker-service daemon (this process)
* - Process has 0% CPU (idle)
* - Process has been running for more than 2 minutes
*/
async function killIdleDaemonChildren(): Promise<number> {
if (process.platform === 'win32') {
// Windows: Different process model, skip for now
return 0;
}
const daemonPid = process.pid;
let killed = 0;
try {
const { stdout } = await execAsync(
'ps -eo pid,ppid,%cpu,etime,comm 2>/dev/null | grep "claude$" || true'
);
for (const line of stdout.trim().split('\n')) {
if (!line) continue;
const parts = line.trim().split(/\s+/);
if (parts.length < 5) continue;
const [pidStr, ppidStr, cpuStr, etime] = parts;
const pid = parseInt(pidStr, 10);
const ppid = parseInt(ppidStr, 10);
const cpu = parseFloat(cpuStr);
// Skip if not a child of this daemon
if (ppid !== daemonPid) continue;
// Skip if actively using CPU
if (cpu > 0) continue;
// Parse elapsed time to minutes
// Formats: MM:SS, HH:MM:SS, D-HH:MM:SS
let minutes = 0;
const dayMatch = etime.match(/^(\d+)-(\d+):(\d+):(\d+)$/);
const hourMatch = etime.match(/^(\d+):(\d+):(\d+)$/);
const minMatch = etime.match(/^(\d+):(\d+)$/);
if (dayMatch) {
minutes = parseInt(dayMatch[1], 10) * 24 * 60 +
parseInt(dayMatch[2], 10) * 60 +
parseInt(dayMatch[3], 10);
} else if (hourMatch) {
minutes = parseInt(hourMatch[1], 10) * 60 +
parseInt(hourMatch[2], 10);
} else if (minMatch) {
minutes = parseInt(minMatch[1], 10);
}
// Kill if idle for more than 2 minutes
if (minutes >= 2) {
logger.info('PROCESS', `Killing idle daemon child PID ${pid} (idle ${minutes}m)`, { pid, minutes });
try {
process.kill(pid, 'SIGKILL');
killed++;
} catch {
// Already dead or permission denied
}
}
}
} catch {
// No matches or command error
}
return killed;
}
/**
* Kill system-level orphans (ppid=1 on Unix)
* These are Claude processes whose parent died unexpectedly
@@ -179,6 +259,9 @@ export async function reapOrphanedProcesses(activeSessionIds: Set<number>): Prom
// System-level: find ppid=1 orphans
killed += await killSystemOrphans();
// Daemon children: find idle SDK processes that didn't terminate
killed += await killIdleDaemonChildren();
return killed;
}
@@ -187,6 +270,9 @@ export async function reapOrphanedProcesses(activeSessionIds: Set<number>): Prom
*
* The SDK's spawnClaudeCodeProcess option allows us to intercept subprocess
* creation and capture the PID before the SDK hides it.
*
* NOTE: Session isolation is handled via the `cwd` option in SDKAgent.ts,
* NOT via CLAUDE_CONFIG_DIR (which breaks authentication).
*/
export function createPidCapturingSpawn(sessionDbId: number) {
return (spawnOptions: {
+79 -18
View File
@@ -16,7 +16,8 @@ import { SessionManager } from './SessionManager.js';
import { logger } from '../../utils/logger.js';
import { buildInitPrompt, buildObservationPrompt, buildSummaryPrompt, buildContinuationPrompt } from '../../sdk/prompts.js';
import { SettingsDefaultsManager } from '../../shared/SettingsDefaultsManager.js';
import { USER_SETTINGS_PATH } from '../../shared/paths.js';
import { USER_SETTINGS_PATH, OBSERVER_SESSIONS_DIR, ensureDir } from '../../shared/paths.js';
import { buildIsolatedEnv, getAuthMethodDescription } from '../../shared/EnvManager.js';
import type { ActiveSession, SDKUserMessage } from '../worker-types.js';
import { ModeManager } from '../domain/ModeManager.js';
import { processAgentResponse, type WorkerRef } from './agents/index.js';
@@ -71,24 +72,42 @@ export class SDKAgent {
// CRITICAL: Only resume if:
// 1. memorySessionId exists (was captured from a previous SDK response)
// 2. lastPromptNumber > 1 (this is a continuation within the same SDK session)
// 3. forceInit is NOT set (stale session recovery clears this)
// On worker restart or crash recovery, memorySessionId may exist from a previous
// SDK session but we must NOT resume because the SDK context was lost.
// NEVER use contentSessionId for resume - that would inject messages into the user's transcript!
const hasRealMemorySessionId = !!session.memorySessionId;
const shouldResume = hasRealMemorySessionId && session.lastPromptNumber > 1 && !session.forceInit;
// Clear forceInit after using it
if (session.forceInit) {
logger.info('SDK', 'forceInit flag set, starting fresh SDK session', {
sessionDbId: session.sessionDbId,
previousMemorySessionId: session.memorySessionId
});
session.forceInit = false;
}
// Build isolated environment from ~/.claude-mem/.env
// This prevents Issue #733: random ANTHROPIC_API_KEY from project .env files
// being used instead of the configured auth method (CLI subscription or explicit API key)
const isolatedEnv = buildIsolatedEnv();
const authMethod = getAuthMethodDescription();
logger.info('SDK', 'Starting SDK query', {
sessionDbId: session.sessionDbId,
contentSessionId: session.contentSessionId,
memorySessionId: session.memorySessionId,
hasRealMemorySessionId,
resume_parameter: hasRealMemorySessionId ? session.memorySessionId : '(none - fresh start)',
lastPromptNumber: session.lastPromptNumber
shouldResume,
resume_parameter: shouldResume ? session.memorySessionId : '(none - fresh start)',
lastPromptNumber: session.lastPromptNumber,
authMethod
});
// Debug-level alignment logs for detailed tracing
if (session.lastPromptNumber > 1) {
const willResume = hasRealMemorySessionId;
logger.debug('SDK', `[ALIGNMENT] Resume Decision | contentSessionId=${session.contentSessionId} | memorySessionId=${session.memorySessionId} | prompt#=${session.lastPromptNumber} | hasRealMemorySessionId=${hasRealMemorySessionId} | willResume=${willResume} | resumeWith=${willResume ? session.memorySessionId : 'NONE'}`);
logger.debug('SDK', `[ALIGNMENT] Resume Decision | contentSessionId=${session.contentSessionId} | memorySessionId=${session.memorySessionId} | prompt#=${session.lastPromptNumber} | hasRealMemorySessionId=${hasRealMemorySessionId} | shouldResume=${shouldResume} | resumeWith=${shouldResume ? session.memorySessionId : 'NONE'}`);
} else {
// INIT prompt - never resume even if memorySessionId exists (stale from previous session)
const hasStaleMemoryId = hasRealMemorySessionId;
@@ -101,39 +120,58 @@ export class SDKAgent {
// Run Agent SDK query loop
// Only resume if we have a captured memory session ID
// Use custom spawn to capture PIDs for zombie process cleanup (Issue #737)
// Use dedicated cwd to isolate observer sessions from user's `claude --resume` list
ensureDir(OBSERVER_SESSIONS_DIR);
// CRITICAL: Pass isolated env to prevent Issue #733 (API key pollution from project .env files)
const queryResult = query({
prompt: messageGenerator,
options: {
model: modelId,
// Only resume if BOTH: (1) we have a memorySessionId AND (2) this isn't the first prompt
// On worker restart, memorySessionId may exist from a previous SDK session but we
// need to start fresh since the SDK context was lost
...(hasRealMemorySessionId && session.lastPromptNumber > 1 && { resume: session.memorySessionId }),
// Isolate observer sessions - they'll appear under project "observer-sessions"
// instead of polluting user's actual project resume lists
cwd: OBSERVER_SESSIONS_DIR,
// Only resume if shouldResume is true (memorySessionId exists, not first prompt, not forceInit)
...(shouldResume && { resume: session.memorySessionId }),
disallowedTools,
abortController: session.abortController,
pathToClaudeCodeExecutable: claudePath,
// Custom spawn function captures PIDs to fix zombie process accumulation
spawnClaudeCodeProcess: createPidCapturingSpawn(session.sessionDbId)
spawnClaudeCodeProcess: createPidCapturingSpawn(session.sessionDbId),
env: isolatedEnv // Use isolated credentials from ~/.claude-mem/.env, not process.env
}
});
// Process SDK messages
for await (const message of queryResult) {
// Capture memory session ID from first SDK message (any type has session_id)
// This enables resume for subsequent generator starts within the same user session
if (!session.memorySessionId && message.session_id) {
// Capture or update memory session ID from SDK message
// IMPORTANT: The SDK may return a DIFFERENT session_id on resume than what we sent!
// We must always sync the DB to match what the SDK actually uses.
//
// MULTI-TERMINAL COLLISION FIX (FK constraint bug):
// Use ensureMemorySessionIdRegistered() instead of updateMemorySessionId() because:
// 1. It's idempotent - safe to call multiple times
// 2. It verifies the update happened (SELECT before UPDATE)
// 3. Consistent with ResponseProcessor's usage pattern
// This ensures FK constraint compliance BEFORE any observations are stored.
if (message.session_id && message.session_id !== session.memorySessionId) {
const previousId = session.memorySessionId;
session.memorySessionId = message.session_id;
// Persist to database for cross-restart recovery
this.dbManager.getSessionStore().updateMemorySessionId(
// Persist to database IMMEDIATELY for FK constraint compliance
// This must happen BEFORE any observations referencing this ID are stored
this.dbManager.getSessionStore().ensureMemorySessionIdRegistered(
session.sessionDbId,
message.session_id
);
// Verify the update by reading back from DB
const verification = this.dbManager.getSessionStore().getSessionById(session.sessionDbId);
const dbVerified = verification?.memory_session_id === message.session_id;
logger.info('SESSION', `MEMORY_ID_CAPTURED | sessionDbId=${session.sessionDbId} | memorySessionId=${message.session_id} | dbVerified=${dbVerified}`, {
const logMessage = previousId
? `MEMORY_ID_CHANGED | sessionDbId=${session.sessionDbId} | from=${previousId} | to=${message.session_id} | dbVerified=${dbVerified}`
: `MEMORY_ID_CAPTURED | sessionDbId=${session.sessionDbId} | memorySessionId=${message.session_id} | dbVerified=${dbVerified}`;
logger.info('SESSION', logMessage, {
sessionId: session.sessionDbId,
memorySessionId: message.session_id
memorySessionId: message.session_id,
previousId
});
if (!dbVerified) {
logger.error('SESSION', `MEMORY_ID_MISMATCH | sessionDbId=${session.sessionDbId} | expected=${message.session_id} | got=${verification?.memory_session_id}`, {
@@ -141,7 +179,7 @@ export class SDKAgent {
});
}
// Debug-level alignment log for detailed tracing
logger.debug('SDK', `[ALIGNMENT] Captured | contentSessionId=${session.contentSessionId} → memorySessionId=${message.session_id} | Future prompts will resume with this ID`);
logger.debug('SDK', `[ALIGNMENT] ${previousId ? 'Updated' : 'Captured'} | contentSessionId=${session.contentSessionId} → memorySessionId=${message.session_id} | Future prompts will resume with this ID`);
}
// Handle assistant messages
@@ -151,6 +189,14 @@ export class SDKAgent {
? content.filter((c: any) => c.type === 'text').map((c: any) => c.text).join('\n')
: typeof content === 'string' ? content : '';
// Check for context overflow - prevents infinite retry loops
if (textContent.includes('prompt is too long') ||
textContent.includes('context window')) {
logger.error('SDK', 'Context overflow detected - terminating session');
session.abortController.abort();
return;
}
const responseSize = textContent.length;
// Capture token state BEFORE updating (for delta calculation)
@@ -195,6 +241,17 @@ export class SDKAgent {
}, truncatedResponse);
}
// Detect fatal context overflow and terminate gracefully (issue #870)
if (typeof textContent === 'string' && textContent.includes('Prompt is too long')) {
throw new Error('Claude session context overflow: prompt is too long');
}
// Detect invalid API key — SDK returns this as response text, not an error.
// Throw so it surfaces in health endpoint and prevents silent failures.
if (typeof textContent === 'string' && textContent.includes('Invalid API key')) {
throw new Error('Invalid API key: check your API key configuration in ~/.claude-mem/settings.json or ~/.claude-mem/.env');
}
// Parse and process response using shared ResponseProcessor
await processAgentResponse(
textContent,
@@ -297,6 +354,10 @@ export class SDKAgent {
// Consume pending messages from SessionManager (event-driven, no polling)
for await (const message of this.sessionManager.getMessageIterator(session.sessionDbId)) {
// CLAIM-CONFIRM: Track message ID for confirmProcessed() after successful storage
// The message is now in 'processing' status in DB until ResponseProcessor calls confirmProcessed()
session.processingMessageIds.push(message._persistentId);
// Capture cwd from each message for worktree support
if (message.cwd) {
cwdTracker.lastCwd = message.cwd;
+53 -6
View File
@@ -106,6 +106,15 @@ export class SessionManager {
memory_session_id: dbSession.memory_session_id
});
// Log warning if we're discarding a stale memory_session_id (Issue #817)
if (dbSession.memory_session_id) {
logger.warn('SESSION', `Discarding stale memory_session_id from previous worker instance (Issue #817)`, {
sessionDbId,
staleMemorySessionId: dbSession.memory_session_id,
reason: 'SDK context lost on worker restart - will capture new ID'
});
}
// Use currentUserPrompt if provided, otherwise fall back to database (first prompt)
const userPrompt = currentUserPrompt || dbSession.user_prompt;
@@ -124,11 +133,15 @@ export class SessionManager {
}
// Create active session
// Load memorySessionId from database if previously captured (enables resume across restarts)
// CRITICAL: Do NOT load memorySessionId from database here (Issue #817)
// When creating a new in-memory session, any database memory_session_id is STALE
// because the SDK context was lost when the worker restarted. The SDK agent will
// capture a new memorySessionId on the first response and persist it.
// Loading stale memory_session_id causes "No conversation found" crashes on resume.
session = {
sessionDbId,
contentSessionId: dbSession.content_session_id,
memorySessionId: dbSession.memory_session_id || null,
memorySessionId: null, // Always start fresh - SDK will capture new ID
project: dbSession.project,
userPrompt,
pendingMessages: [],
@@ -140,13 +153,16 @@ export class SessionManager {
cumulativeOutputTokens: 0,
earliestPendingTimestamp: null,
conversationHistory: [], // Initialize empty - will be populated by agents
currentProvider: null // Will be set when generator starts
currentProvider: null, // Will be set when generator starts
consecutiveRestarts: 0, // Track consecutive restart attempts to prevent infinite loops
processingMessageIds: [] // CLAIM-CONFIRM: Track message IDs for confirmProcessed()
};
logger.debug('SESSION', 'Creating new session object', {
logger.debug('SESSION', 'Creating new session object (memorySessionId cleared to prevent stale resume)', {
sessionDbId,
contentSessionId: dbSession.content_session_id,
memorySessionId: dbSession.memory_session_id || '(none - fresh session)',
dbMemorySessionId: dbSession.memory_session_id || '(none in DB)',
memorySessionId: '(cleared - will capture fresh from SDK)',
lastPromptNumber: promptNumber || this.dbManager.getSessionStore().getPromptNumberFromUserPrompts(dbSession.content_session_id)
});
@@ -303,6 +319,28 @@ export class SessionManager {
}
}
/**
* Remove session from in-memory maps and notify without awaiting generator.
* Used when SDK resume fails and we give up (no fallback): avoids deadlock
* from deleteSession() awaiting the same generator promise we're inside.
*/
removeSessionImmediate(sessionDbId: number): void {
const session = this.sessions.get(sessionDbId);
if (!session) return;
this.sessions.delete(sessionDbId);
this.sessionQueues.delete(sessionDbId);
logger.info('SESSION', 'Session removed (orphaned after SDK termination)', {
sessionId: sessionDbId,
project: session.project
});
if (this.onSessionDeletedCallback) {
this.onSessionDeletedCallback();
}
}
/**
* Shutdown all active sessions
*/
@@ -378,7 +416,16 @@ export class SessionManager {
const processor = new SessionQueueProcessor(this.getPendingStore(), emitter);
// Use the robust iterator - messages are deleted on claim (no tracking needed)
for await (const message of processor.createIterator(sessionDbId, session.abortController.signal)) {
// CRITICAL: Pass onIdleTimeout callback that triggers abort to kill the subprocess
// Without this, the iterator returns but the Claude subprocess stays alive as a zombie
for await (const message of processor.createIterator({
sessionDbId,
signal: session.abortController.signal,
onIdleTimeout: () => {
logger.info('SESSION', 'Triggering abort due to idle timeout to kill subprocess', { sessionDbId });
session.abortController.abort();
}
})) {
// Track earliest timestamp for accurate observation timestamps
// This ensures backlog messages get their original timestamps, not current time
if (session.earliestPendingTimestamp === null) {
-7
View File
@@ -1,7 +0,0 @@
<claude-mem-context>
# Recent Activity
<!-- This section is auto-generated by claude-mem. Edit content outside the tags. -->
*No recent activity*
</claude-mem-context>
+44 -14
View File
@@ -16,6 +16,8 @@ import { parseObservations, parseSummary, type ParsedObservation, type ParsedSum
import { updateCursorContextForProject } from '../../integrations/CursorHooksInstaller.js';
import { updateFolderClaudeMdFiles } from '../../../utils/claude-md-utils.js';
import { getWorkerPort } from '../../../shared/worker-utils.js';
import { SettingsDefaultsManager } from '../../../shared/SettingsDefaultsManager.js';
import { USER_SETTINGS_PATH } from '../../../shared/paths.js';
import type { ActiveSession } from '../../worker-types.js';
import type { DatabaseManager } from '../DatabaseManager.js';
import type { SessionManager } from '../SessionManager.js';
@@ -74,6 +76,14 @@ export async function processAgentResponse(
throw new Error('Cannot store observations: memorySessionId not yet captured');
}
// SAFETY NET (Issue #846 / Multi-terminal FK fix):
// The PRIMARY fix is in SDKAgent.ts where ensureMemorySessionIdRegistered() is called
// immediately when the SDK returns a memory_session_id. This call is a defensive safety net
// in case the DB was somehow not updated (race condition, crash, etc.).
// In multi-terminal scenarios, createSDKSession() now resets memory_session_id to NULL
// for each new generator, ensuring clean isolation.
sessionStore.ensureMemorySessionIdRegistered(session.sessionDbId, session.memorySessionId);
// Log pre-storage with session ID chain for verification
logger.info('DB', `STORING | sessionDbId=${session.sessionDbId} | memorySessionId=${session.memorySessionId} | obsCount=${observations.length} | hasSummary=${!!summaryForStore}`, {
sessionId: session.sessionDbId,
@@ -98,6 +108,18 @@ export async function processAgentResponse(
memorySessionId: session.memorySessionId
});
// CLAIM-CONFIRM: Now that storage succeeded, confirm all processing messages (delete from queue)
// This is the critical step that prevents message loss on generator crash
const pendingStore = sessionManager.getPendingMessageStore();
for (const messageId of session.processingMessageIds) {
pendingStore.confirmProcessed(messageId);
}
if (session.processingMessageIds.length > 0) {
logger.debug('QUEUE', `CONFIRMED_BATCH | sessionDbId=${session.sessionDbId} | count=${session.processingMessageIds.length} | ids=[${session.processingMessageIds.join(',')}]`);
}
// Clear the tracking array after confirmation
session.processingMessageIds = [];
// AFTER transaction commits - async operations (can fail safely without data loss)
await syncAndBroadcastObservations(
observations,
@@ -215,21 +237,29 @@ async function syncAndBroadcastObservations(
// Update folder CLAUDE.md files for touched folders (fire-and-forget)
// This runs per-observation batch to ensure folders are updated as work happens
const allFilePaths: string[] = [];
for (const obs of observations) {
allFilePaths.push(...(obs.files_modified || []));
allFilePaths.push(...(obs.files_read || []));
}
// Only runs if CLAUDE_MEM_FOLDER_CLAUDEMD_ENABLED is true (default: false)
const settings = SettingsDefaultsManager.loadFromFile(USER_SETTINGS_PATH);
// Handle both string 'true' and boolean true from JSON settings
const settingValue = settings.CLAUDE_MEM_FOLDER_CLAUDEMD_ENABLED;
const folderClaudeMdEnabled = settingValue === 'true' || settingValue === true;
if (allFilePaths.length > 0) {
updateFolderClaudeMdFiles(
allFilePaths,
session.project,
getWorkerPort(),
projectRoot
).catch(error => {
logger.warn('FOLDER_INDEX', 'CLAUDE.md update failed (non-critical)', { project: session.project }, error as Error);
});
if (folderClaudeMdEnabled) {
const allFilePaths: string[] = [];
for (const obs of observations) {
allFilePaths.push(...(obs.files_modified || []));
allFilePaths.push(...(obs.files_read || []));
}
if (allFilePaths.length > 0) {
updateFolderClaudeMdFiles(
allFilePaths,
session.project,
getWorkerPort(),
projectRoot
).catch(error => {
logger.warn('FOLDER_INDEX', 'CLAUDE.md update failed (non-critical)', { project: session.project }, error as Error);
});
}
}
}
-7
View File
@@ -1,7 +0,0 @@
<claude-mem-context>
# Recent Activity
<!-- This section is auto-generated by claude-mem. Edit content outside the tags. -->
*No recent activity*
</claude-mem-context>
-7
View File
@@ -1,7 +0,0 @@
<claude-mem-context>
# Recent Activity
<!-- This section is auto-generated by claude-mem. Edit content outside the tags. -->
*No recent activity*
</claude-mem-context>
+15 -2
View File
@@ -24,8 +24,21 @@ export function createMiddleware(
// JSON parsing with 50mb limit
middlewares.push(express.json({ limit: '50mb' }));
// CORS
middlewares.push(cors());
// CORS - restrict to localhost origins only
middlewares.push(cors({
origin: (origin, callback) => {
// Allow: requests without Origin header (hooks, curl, CLI tools)
// Allow: localhost and 127.0.0.1 origins
if (!origin ||
origin.startsWith('http://localhost:') ||
origin.startsWith('http://127.0.0.1:')) {
callback(null, true);
} else {
callback(new Error('CORS not allowed'));
}
},
credentials: false
}));
// HTTP request/response logging
middlewares.push((req: Request, res: Response, next: NextFunction) => {
-97
View File
@@ -1,97 +0,0 @@
<claude-mem-context>
# Recent Activity
<!-- This section is auto-generated by claude-mem. Edit content outside the tags. -->
### Dec 5, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #20734 | 9:08 PM | 🔵 | SearchRoutes Context Injection Endpoint with Dynamic Import | ~614 |
| #20548 | 8:21 PM | 🔵 | Context generator imported from services directory in worker | ~334 |
| #20547 | " | 🔵 | Context injection route implementation in SearchRoutes.ts | ~289 |
### Dec 7, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #21742 | 10:16 PM | 🔵 | SessionRoutes Analysis: Identified 10+ Scattered Broadcast Calls | ~540 |
### Dec 8, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #22301 | 9:44 PM | 🔵 | Privacy Validation in Observation Processing | ~399 |
| #22296 | 9:43 PM | 🔵 | SessionRoutes HTTP Endpoints and SDK Agent Lifecycle | ~442 |
| #22222 | 8:29 PM | 🔵 | Found waiting logic in SessionRoutes but it may not be working correctly | ~359 |
| #22005 | 5:40 PM | 🔵 | handleObservationsByClaudeId Current Implementation | ~443 |
| #22004 | " | 🔵 | Legacy Observation Handling Pattern Identified | ~337 |
| #22003 | " | 🔵 | SessionRoutes Architecture Confirmed | ~354 |
| #21969 | 5:22 PM | 🟣 | Worker Routes Pass tool_use_id to SessionManager Queue | ~290 |
| #21968 | " | ✅ | Worker Endpoint Extracts toolUseId from Observation Request | ~243 |
| #21962 | 5:21 PM | 🟣 | Implemented handleGetObservationsForToolUse Endpoint Handler | ~325 |
| #21961 | " | 🟣 | Added GET Endpoint for Fetching Observations by Tool Use ID | ~272 |
| #21951 | 5:18 PM | 🔵 | Worker SessionRoutes Architecture and Endpoints Reviewed | ~418 |
| #21948 | 5:09 PM | 🟣 | Implemented PreToolUse Endpoint Handler | ~334 |
| #21947 | 5:07 PM | 🟣 | Added PreToolUse Route Registration | ~287 |
### Dec 9, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #23143 | 6:42 PM | ✅ | Updated Skip Tools Logic to Use USER_SETTINGS_PATH Constant | ~150 |
| #23142 | " | ✅ | Fixed Settings Path Import in SessionRoutes | ~148 |
| #23140 | 6:41 PM | 🟣 | Implemented Skip Tools Filtering in Observations Endpoint | ~386 |
| #23138 | " | ✅ | Added SettingsDefaultsManager and Paths Imports to SessionRoutes | ~222 |
| #23136 | " | 🔵 | SessionRoutes handleObservationsByClaudeId Handler Structure | ~329 |
| #23007 | 4:02 PM | 🔵 | Settings Write Implementation Using Nested Schema | ~398 |
| #22859 | 2:28 PM | 🔴 | Fixed Python Version Validation to Support 3.10+ | ~322 |
| #22854 | 2:27 PM | 🔵 | Located Python Version Validation Regex in SettingsRoutes | ~316 |
### Dec 10, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #23593 | 5:52 PM | 🔵 | SearchRoutes Handler Pattern | ~268 |
| #23588 | 5:51 PM | 🔵 | Search Routes HTTP API Integration | ~281 |
### Dec 14, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #26253 | 8:31 PM | 🔵 | SearchRoutes Confirms Context Endpoints Use generateContext, Search Uses SearchManager | ~397 |
| #25689 | 4:23 PM | 🔵 | SessionRoutes queueSummarize receives messages but doesn't persist them to database | ~496 |
### Dec 15, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #27043 | 6:04 PM | 🔵 | Subagent confirms no version switcher UI exists, only orphaned backend infrastructure | ~539 |
| #27041 | 6:03 PM | 🔵 | Branch switching code isolated to two backend files, no frontend UI components | ~473 |
| #27037 | 6:02 PM | 🔵 | Branch switching functionality exists in SettingsRoutes with UI switcher removal intent | ~463 |
### Dec 16, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #27414 | 3:25 PM | 🔵 | Batch Observations Endpoint Already Implemented | ~330 |
### Dec 19, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #30077 | 8:05 PM | 🔵 | SessionRoutes HTTP API Manages SDK Agent Lifecycle and Message Queue | ~516 |
### Dec 26, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #32949 | 10:55 PM | 🔵 | Complete settings persistence flow for Xiaomi MIMO v2 Flash model | ~320 |
| #32939 | 10:53 PM | 🔵 | Settings API routes handle model configuration persistence | ~288 |
### Dec 30, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #34491 | 2:28 PM | 🔵 | SessionRoutes Implements Multi-Provider Agent Management | ~635 |
</claude-mem-context>
@@ -0,0 +1,93 @@
/**
* Memory Routes
*
* Handles manual memory/observation saving.
* POST /api/memory/save - Save a manual memory observation
*/
import express, { Request, Response } from 'express';
import { BaseRouteHandler } from '../BaseRouteHandler.js';
import { logger } from '../../../../utils/logger.js';
import type { DatabaseManager } from '../../DatabaseManager.js';
export class MemoryRoutes extends BaseRouteHandler {
constructor(
private dbManager: DatabaseManager,
private defaultProject: string
) {
super();
}
setupRoutes(app: express.Application): void {
app.post('/api/memory/save', this.handleSaveMemory.bind(this));
}
/**
* POST /api/memory/save - Save a manual memory/observation
* Body: { text: string, title?: string, project?: string }
*/
private handleSaveMemory = this.wrapHandler(async (req: Request, res: Response): Promise<void> => {
const { text, title, project } = req.body;
const targetProject = project || this.defaultProject;
if (!text || typeof text !== 'string' || text.trim().length === 0) {
this.badRequest(res, 'text is required and must be non-empty');
return;
}
const sessionStore = this.dbManager.getSessionStore();
const chromaSync = this.dbManager.getChromaSync();
// 1. Get or create manual session for project
const memorySessionId = sessionStore.getOrCreateManualSession(targetProject);
// 2. Build observation
const observation = {
type: 'discovery', // Use existing valid type
title: title || text.substring(0, 60).trim() + (text.length > 60 ? '...' : ''),
subtitle: 'Manual memory',
facts: [] as string[],
narrative: text,
concepts: [] as string[],
files_read: [] as string[],
files_modified: [] as string[]
};
// 3. Store to SQLite
const result = sessionStore.storeObservation(
memorySessionId,
targetProject,
observation,
0, // promptNumber
0 // discoveryTokens
);
logger.info('HTTP', 'Manual observation saved', {
id: result.id,
project: targetProject,
title: observation.title
});
// 4. Sync to ChromaDB (async, fire-and-forget)
chromaSync.syncObservation(
result.id,
memorySessionId,
targetProject,
observation,
0,
result.createdAtEpoch,
0
).catch(err => {
logger.error('CHROMA', 'ChromaDB sync failed', { id: result.id }, err as Error);
});
// 5. Return success
res.json({
success: true,
id: result.id,
title: observation.title,
project: targetProject,
message: `Memory saved as observation #${result.id}`
});
});
}
@@ -24,6 +24,8 @@ import { USER_SETTINGS_PATH } from '../../../../shared/paths.js';
export class SessionRoutes extends BaseRouteHandler {
private completionHandler: SessionCompletionHandler;
private spawnInProgress = new Map<number, boolean>();
private crashRecoveryScheduled = new Set<number>();
constructor(
private sessionManager: SessionManager,
@@ -91,10 +93,17 @@ export class SessionRoutes extends BaseRouteHandler {
const session = this.sessionManager.getSession(sessionDbId);
if (!session) return;
// GUARD: Prevent duplicate spawns
if (this.spawnInProgress.get(sessionDbId)) {
logger.debug('SESSION', 'Spawn already in progress, skipping', { sessionDbId, source });
return;
}
const selectedProvider = this.getSelectedProvider();
// Start generator if not running
if (!session.generatorPromise) {
this.spawnInProgress.set(sessionDbId, true);
this.startGeneratorWithProvider(session, selectedProvider, source);
return;
}
@@ -122,12 +131,26 @@ export class SessionRoutes extends BaseRouteHandler {
): void {
if (!session) return;
// Reset AbortController if it was previously aborted
// This fixes the bug where a session gets stuck in an infinite "Generator aborted" loop
// after its AbortController was aborted (e.g., from a previous generator exit)
if (session.abortController.signal.aborted) {
logger.debug('SESSION', 'Resetting aborted AbortController before starting generator', {
sessionId: session.sessionDbId
});
session.abortController = new AbortController();
}
const agent = provider === 'openrouter' ? this.openRouterAgent : (provider === 'gemini' ? this.geminiAgent : this.sdkAgent);
const agentName = provider === 'openrouter' ? 'OpenRouter' : (provider === 'gemini' ? 'Gemini' : 'Claude SDK');
// Use database count for accurate telemetry (in-memory array is always empty due to FK constraint fix)
const pendingStore = this.sessionManager.getPendingMessageStore();
const actualQueueDepth = pendingStore.getPendingCount(session.sessionDbId);
logger.info('SESSION', `Generator auto-starting (${source}) using ${agentName}`, {
sessionId: session.sessionDbId,
queueDepth: session.pendingMessages.length,
queueDepth: actualQueueDepth,
historyLength: session.conversationHistory.length
});
@@ -163,6 +186,7 @@ export class SessionRoutes extends BaseRouteHandler {
})
.finally(() => {
const sessionDbId = session.sessionDbId;
this.spawnInProgress.delete(sessionDbId);
const wasAborted = session.abortController.signal.aborted;
if (wasAborted) {
@@ -175,16 +199,43 @@ export class SessionRoutes extends BaseRouteHandler {
session.currentProvider = null;
this.workerService.broadcastProcessingStatus();
// Crash recovery: If not aborted and still has work, restart
// Crash recovery: If not aborted and still has work, restart (with limit)
if (!wasAborted) {
try {
const pendingStore = this.sessionManager.getPendingMessageStore();
const pendingCount = pendingStore.getPendingCount(sessionDbId);
// CRITICAL: Limit consecutive restarts to prevent infinite loops
// This prevents runaway API costs when there's a persistent error (e.g., memorySessionId not captured)
const MAX_CONSECUTIVE_RESTARTS = 3;
if (pendingCount > 0) {
// GUARD: Prevent duplicate crash recovery spawns
if (this.crashRecoveryScheduled.has(sessionDbId)) {
logger.debug('SESSION', 'Crash recovery already scheduled', { sessionDbId });
return;
}
session.consecutiveRestarts = (session.consecutiveRestarts || 0) + 1;
if (session.consecutiveRestarts > MAX_CONSECUTIVE_RESTARTS) {
logger.error('SESSION', `CRITICAL: Generator restart limit exceeded - stopping to prevent runaway costs`, {
sessionId: sessionDbId,
pendingCount,
consecutiveRestarts: session.consecutiveRestarts,
maxRestarts: MAX_CONSECUTIVE_RESTARTS,
action: 'Generator will NOT restart. Check logs for root cause. Messages remain in pending state.'
});
// Don't restart - abort to prevent further API calls
session.abortController.abort();
return;
}
logger.info('SESSION', `Restarting generator after crash/exit with pending work`, {
sessionId: sessionDbId,
pendingCount
pendingCount,
consecutiveRestarts: session.consecutiveRestarts,
maxRestarts: MAX_CONSECUTIVE_RESTARTS
});
// Abort OLD controller before replacing to prevent child process leaks
@@ -192,16 +243,24 @@ export class SessionRoutes extends BaseRouteHandler {
session.abortController = new AbortController();
oldController.abort();
// Small delay before restart
this.crashRecoveryScheduled.add(sessionDbId);
// Exponential backoff: 1s, 2s, 4s for subsequent restarts
const backoffMs = Math.min(1000 * Math.pow(2, session.consecutiveRestarts - 1), 8000);
// Delay before restart with exponential backoff
setTimeout(() => {
this.crashRecoveryScheduled.delete(sessionDbId);
const stillExists = this.sessionManager.getSession(sessionDbId);
if (stillExists && !stillExists.generatorPromise) {
this.startGeneratorWithProvider(stillExists, this.getSelectedProvider(), 'crash-recovery');
}
}, 1000);
}, backoffMs);
} else {
// No pending work - abort to kill the child process
session.abortController.abort();
// Reset restart counter on successful completion
session.consecutiveRestarts = 0;
logger.debug('SESSION', 'Aborted controller after natural completion', {
sessionId: sessionDbId
});
@@ -231,6 +290,7 @@ export class SessionRoutes extends BaseRouteHandler {
app.post('/api/sessions/init', this.handleSessionInitByClaudeId.bind(this));
app.post('/api/sessions/observations', this.handleObservationsByClaudeId.bind(this));
app.post('/api/sessions/summarize', this.handleSummarizeByClaudeId.bind(this));
app.post('/api/sessions/complete', this.handleCompleteByClaudeId.bind(this));
}
/**
@@ -291,8 +351,8 @@ export class SessionRoutes extends BaseRouteHandler {
});
}
// Start agent in background using the helper method
this.startGeneratorWithProvider(session, this.getSelectedProvider(), 'init');
// Idempotent: ensure generator is running (matches handleObservations / handleSummarize)
this.ensureGeneratorRunning(sessionDbId, 'init');
// Broadcast session started event
this.eventBroadcaster.broadcastSessionStarted(sessionDbId, session.project);
@@ -362,11 +422,15 @@ export class SessionRoutes extends BaseRouteHandler {
return;
}
// Use database count for accurate queue length (in-memory array is always empty due to FK constraint fix)
const pendingStore = this.sessionManager.getPendingMessageStore();
const queueLength = pendingStore.getPendingCount(sessionDbId);
res.json({
status: 'active',
sessionDbId,
project: session.project,
queueLength: session.pendingMessages.length,
queueLength,
uptime: Date.now() - session.startTime
});
});
@@ -531,6 +595,54 @@ export class SessionRoutes extends BaseRouteHandler {
res.json({ status: 'queued' });
});
/**
* Complete session by contentSessionId (session-complete hook uses this)
* POST /api/sessions/complete
* Body: { contentSessionId }
*
* Removes session from active sessions map, allowing orphan reaper to
* clean up any remaining subprocesses.
*
* Fixes Issue #842: Sessions stay in map forever, reaper thinks all active.
*/
private handleCompleteByClaudeId = this.wrapHandler(async (req: Request, res: Response): Promise<void> => {
const { contentSessionId } = req.body;
logger.info('HTTP', '→ POST /api/sessions/complete', { contentSessionId });
if (!contentSessionId) {
return this.badRequest(res, 'Missing contentSessionId');
}
const store = this.dbManager.getSessionStore();
// Look up sessionDbId from contentSessionId (createSDKSession is idempotent)
// Pass empty strings - we only need the ID lookup, not to create a new session
const sessionDbId = store.createSDKSession(contentSessionId, '', '');
// Check if session is in the active sessions map
const activeSession = this.sessionManager.getSession(sessionDbId);
if (!activeSession) {
// Session may not be in memory (already completed or never initialized)
logger.debug('SESSION', 'session-complete: Session not in active map', {
contentSessionId,
sessionDbId
});
res.json({ status: 'skipped', reason: 'not_active' });
return;
}
// Complete the session (removes from active sessions map)
await this.completionHandler.completeByDbId(sessionDbId);
logger.info('SESSION', 'Session completed via API', {
contentSessionId,
sessionDbId
});
res.json({ status: 'completed', sessionDbId });
});
/**
* Initialize session by contentSessionId (new-hook uses this)
* POST /api/sessions/init
@@ -121,6 +121,7 @@ export class SettingsRoutes extends BaseRouteHandler {
// Feature Toggles
'CLAUDE_MEM_CONTEXT_SHOW_LAST_SUMMARY',
'CLAUDE_MEM_CONTEXT_SHOW_LAST_MESSAGE',
'CLAUDE_MEM_FOLDER_CLAUDEMD_ENABLED',
];
for (const key of settingKeys) {
@@ -241,9 +242,9 @@ export class SettingsRoutes extends BaseRouteHandler {
// Validate CLAUDE_MEM_GEMINI_MODEL
if (settings.CLAUDE_MEM_GEMINI_MODEL) {
const validGeminiModels = ['gemini-2.5-flash-lite', 'gemini-2.5-flash', 'gemini-3-flash'];
const validGeminiModels = ['gemini-2.5-flash-lite', 'gemini-2.5-flash', 'gemini-3-flash-preview'];
if (!validGeminiModels.includes(settings.CLAUDE_MEM_GEMINI_MODEL)) {
return { valid: false, error: 'CLAUDE_MEM_GEMINI_MODEL must be one of: gemini-2.5-flash-lite, gemini-2.5-flash, gemini-3-flash' };
return { valid: false, error: 'CLAUDE_MEM_GEMINI_MODEL must be one of: gemini-2.5-flash-lite, gemini-2.5-flash, gemini-3-flash-preview' };
}
}
-7
View File
@@ -1,7 +0,0 @@
<claude-mem-context>
# Recent Activity
<!-- This section is auto-generated by claude-mem. Edit content outside the tags. -->
*No recent activity*
</claude-mem-context>
@@ -1,21 +0,0 @@
<claude-mem-context>
# Recent Activity
<!-- This section is auto-generated by claude-mem. Edit content outside the tags. -->
### Jan 3, 2026
**DateFilter.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #36670 | 11:37 PM | ✅ | Resolved merge conflicts by accepting branch changes for 39 files | ~435 |
| #36523 | 9:34 PM | 🔴 | Fixed TypeScript Type Import Issues in Worker Services | ~386 |
| #36519 | " | 🔴 | Fixed Type Import Issues Preventing Worker Tests | ~308 |
| #36516 | 9:33 PM | 🔴 | Fixed TypeScript Type Import Issues in Worker Search Modules | ~377 |
| #36390 | 8:50 PM | 🔄 | Comprehensive Monolith Refactor with Modular Architecture | ~724 |
**ProjectFilter.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #36529 | 9:34 PM | 🔵 | Search Module Architecture Discovery | ~302 |
</claude-mem-context>
@@ -1,7 +0,0 @@
<claude-mem-context>
# Recent Activity
<!-- This section is auto-generated by claude-mem. Edit content outside the tags. -->
*No recent activity*
</claude-mem-context>
@@ -167,6 +167,13 @@ export class ChromaSearchStrategy extends BaseSearchStrategy implements SearchSt
/**
* Filter results by recency (90-day window)
*
* IMPORTANT: ChromaSync.queryChroma() returns deduplicated `ids` (unique sqlite_ids)
* but the `metadatas` array may contain multiple entries per sqlite_id (e.g., one
* observation can have narrative + multiple facts as separate Chroma documents).
*
* This method iterates over the deduplicated `ids` and finds the first matching
* metadata for each ID to avoid array misalignment issues.
*/
private filterByRecency(chromaResults: {
ids: number[];
@@ -174,10 +181,19 @@ export class ChromaSearchStrategy extends BaseSearchStrategy implements SearchSt
}): Array<{ id: number; meta: ChromaMetadata }> {
const cutoff = Date.now() - SEARCH_CONSTANTS.RECENCY_WINDOW_MS;
return chromaResults.metadatas
.map((meta, idx) => ({
id: chromaResults.ids[idx],
meta
// Build a map from sqlite_id to first metadata for efficient lookup
const metadataByIdMap = new Map<number, ChromaMetadata>();
for (const meta of chromaResults.metadatas) {
if (meta?.sqlite_id !== undefined && !metadataByIdMap.has(meta.sqlite_id)) {
metadataByIdMap.set(meta.sqlite_id, meta);
}
}
// Iterate over deduplicated ids and get corresponding metadata
return chromaResults.ids
.map(id => ({
id,
meta: metadataByIdMap.get(id) as ChromaMetadata
}))
.filter(item => item.meta && item.meta.created_at_epoch > cutoff);
}
-103
View File
@@ -1,103 +0,0 @@
<claude-mem-context>
# Recent Activity
<!-- This section is auto-generated by claude-mem. Edit content outside the tags. -->
### Dec 7, 2025
**SessionCompletionHandler.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #21829 | 11:05 PM | 🔄 | Massive refactor adds 8,671 lines and removes 5,585 lines across 60 files | ~619 |
| #21825 | 11:00 PM | 🔵 | SessionCompletionHandler methods called 3 times in SessionRoutes | ~342 |
| #21824 | 10:59 PM | 🔵 | SessionEventBroadcaster methods called 7 times across SessionRoutes and SessionCompletionHandler | ~398 |
| #21822 | " | 🔵 | SessionEventBroadcaster instantiated in WorkerService and injected into routes and handlers | ~372 |
| #21818 | 10:58 PM | 🔵 | SessionCompletionHandler is instantiated in SessionRoutes | ~282 |
| #21817 | " | 🔵 | SessionCompletionHandler consolidates session completion logic | ~414 |
| #21807 | 10:49 PM | ⚖️ | KISS Audit Identified 587 Lines of Ceremonial Complexity | ~699 |
| #21794 | 10:46 PM | 🔵 | SessionCompletionHandler Consolidates Duplicate Completion Logic | ~341 |
| #21764 | 10:23 PM | ✅ | Phase 4 Build and Deployment Successful | ~376 |
| #21759 | 10:21 PM | 🟣 | SessionCompletionHandler Service Created | ~426 |
### Dec 11, 2025
**SessionCompletionHandler.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #23962 | 1:59 PM | 🔵 | Services Layer Implements Full Backend Architecture | ~490 |
### Dec 14, 2025
**SessionCompletionHandler.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #26088 | 7:32 PM | 🔵 | API Endpoint Architecture Discovery | ~416 |
### Dec 20, 2025
**SessionCompletionHandler.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #30725 | 5:12 PM | 🔵 | Revealed extensive work-in-progress changes across hook and worker systems | ~479 |
| #30569 | 4:56 PM | 🔄 | SessionCompletionHandler Broadcasting Implementation | ~264 |
| #30568 | " | 🔄 | SessionCompletionHandler Event Broadcasting Refactor | ~282 |
| #30566 | " | 🔵 | Session Completion Handler Consolidation | ~323 |
### Dec 24, 2025
**SessionCompletionHandler.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #32350 | 8:42 PM | 🔵 | Detailed Cleanup Hook Evolution Documentation Retrieved | ~597 |
| #32316 | 8:41 PM | 🔄 | Removed markSessionComplete method from DatabaseManager | ~251 |
| #32194 | 7:42 PM | 🔵 | Session completion handler implementation analysis | ~329 |
| #32193 | " | 🔵 | Session completion endpoint usage across codebase | ~278 |
| #32182 | 7:15 PM | 🔄 | Removed markSessionComplete database call from session completion flow | ~316 |
| #32179 | 7:11 PM | 🔄 | SessionCompletionHandler switched to direct SQL query | ~273 |
| #32153 | 6:40 PM | 🔵 | Session Identifier Architecture Across Codebase | ~529 |
### Dec 25, 2025
**SessionCompletionHandler.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #32597 | 8:40 PM | 🔵 | Identified session completion mechanism and potential method discrepancy | ~470 |
| #32456 | 5:41 PM | ✅ | Completed merge of main branch into feature/titans-phase1-3 | ~354 |
| #32198 | 7:41 PM | 🔄 | Removed redundant SessionEnd cleanup hook | ~317 |
### Dec 27, 2025
**SessionCompletionHandler.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #33099 | 7:10 PM | 🔵 | SessionCompletionHandler Manual Session Termination Flow | ~348 |
### Dec 28, 2025
**SessionCompletionHandler.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #33328 | 3:10 PM | 🟣 | Merged centralized logger and session continuity diagnostics to main | ~397 |
| #33280 | 3:07 PM | 🔄 | Logger coverage refactor for background services | ~428 |
### Dec 30, 2025
**SessionCompletionHandler.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #34388 | 1:40 PM | 🔵 | SessionCompletionHandler Relies on SessionManager Abort Without Process Cleanup | ~309 |
### Dec 31, 2025
**SessionCompletionHandler.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #34707 | 4:45 PM | 🔵 | SessionCompletionHandler Aborts SDK Agent During Cleanup | ~291 |
### Jan 2, 2026
**SessionCompletionHandler.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #35951 | 4:42 PM | 🔵 | Multi-Layer Service Architecture Discovery | ~395 |
</claude-mem-context>
-77
View File
@@ -1,77 +0,0 @@
<claude-mem-context>
# Recent Activity
<!-- This section is auto-generated by claude-mem. Edit content outside the tags. -->
### Dec 7, 2025
**PrivacyCheckValidator.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #21829 | 11:05 PM | 🔄 | Massive refactor adds 8,671 lines and removes 5,585 lines across 60 files | ~619 |
| #21820 | 10:59 PM | 🔵 | PrivacyCheckValidator used twice in SessionRoutes for observation and summarize endpoints | ~303 |
| #21814 | 10:58 PM | 🔵 | PrivacyCheckValidator centralizes user prompt privacy validation | ~359 |
| #21807 | 10:49 PM | ⚖️ | KISS Audit Identified 587 Lines of Ceremonial Complexity | ~699 |
| #21797 | 10:46 PM | 🔵 | PrivacyCheckValidator Implements Single Validation Method | ~349 |
| #21770 | 10:36 PM | 🟣 | Implemented PrivacyCheckValidator for Centralized Privacy Validation | ~318 |
### Dec 8, 2025
**PrivacyCheckValidator.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #22274 | 9:22 PM | 🔵 | Event-Driven Architecture for SDK Response Coordination Fully Mapped | ~1136 |
| #22270 | 9:12 PM | 🔵 | DRY violations identified in endless-mode-v7.1 branch | ~553 |
### Dec 9, 2025
**PrivacyCheckValidator.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #22808 | 2:01 PM | 🔵 | Logger Utility Pattern Identified | ~300 |
| #22750 | 1:27 PM | 🔵 | PrivacyCheckValidator Centralizes Privacy Logic | ~450 |
### Dec 11, 2025
**PrivacyCheckValidator.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #23962 | 1:59 PM | 🔵 | Services Layer Implements Full Backend Architecture | ~490 |
### Dec 20, 2025
**PrivacyCheckValidator.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #30609 | 5:01 PM | 🔄 | Phase 4: Eliminated Over-Engineering in Hook/Worker System | ~504 |
| #30598 | 5:00 PM | 🔄 | Removed PrivacyCheckValidator module | ~201 |
| #30549 | 4:53 PM | 🔵 | PrivacyCheckValidator for User Prompt Filtering | ~325 |
### Dec 24, 2025
**PrivacyCheckValidator.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #32153 | 6:40 PM | 🔵 | Session Identifier Architecture Across Codebase | ~529 |
### Dec 25, 2025
**PrivacyCheckValidator.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #32580 | 8:22 PM | 🔵 | Grep for resetStuckMessages and processing | ~242 |
### Dec 28, 2025
**PrivacyCheckValidator.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #33439 | 10:15 PM | 🔄 | Extended Session ID Renaming to Additional Codebase Components | ~352 |
### Jan 2, 2026
**PrivacyCheckValidator.ts**
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #35951 | 4:42 PM | 🔵 | Multi-Layer Service Architecture Discovery | ~395 |
</claude-mem-context>