Merge pull request #179 from thedotmack/refactor/mcp-to-worker

refactor: Major architectural restructuring - hooks to HTTP clients, modular worker service
This commit is contained in:
Alex Newman
2025-12-08 15:15:17 -05:00
committed by GitHub
59 changed files with 8680 additions and 5596 deletions
+961 -3
View File
File diff suppressed because it is too large Load Diff
+3 -2
View File
@@ -31,7 +31,7 @@
},
"scripts": {
"build": "node scripts/build-hooks.js",
"test": "node --test tests/",
"test": "vitest",
"test:parser": "npx tsx src/sdk/parser.test.ts",
"test:context": "echo '{\"session_id\":\"test-'$(date +%s)'\",\"cwd\":\"'$(pwd)'\",\"source\":\"startup\"}' | node plugin/scripts/context-hook.js 2>/dev/null",
"test:context:verbose": "echo '{\"session_id\":\"test-'$(date +%s)'\",\"cwd\":\"'$(pwd)'\",\"source\":\"startup\"}' | node plugin/scripts/context-hook.js",
@@ -67,6 +67,7 @@
"@types/react-dom": "^18.3.0",
"esbuild": "^0.25.12",
"tsx": "^4.20.6",
"typescript": "^5.3.0"
"typescript": "^5.3.0",
"vitest": "^4.0.15"
}
}
+1 -1
View File
@@ -2,7 +2,7 @@
"mcpServers": {
"claude-mem-search": {
"type": "stdio",
"command": "${CLAUDE_PLUGIN_ROOT}/scripts/search-server.cjs"
"command": "${CLAUDE_PLUGIN_ROOT}/scripts/mcp-server.cjs"
}
}
}
+2 -2
View File
@@ -8,12 +8,12 @@
{
"type": "command",
"command": "node \"${CLAUDE_PLUGIN_ROOT}/../scripts/smart-install.js\" && node ${CLAUDE_PLUGIN_ROOT}/scripts/context-hook.js",
"timeout": 300
"timeout": 5
},
{
"type": "command",
"command": "node ${CLAUDE_PLUGIN_ROOT}/scripts/user-message-hook.js",
"timeout": 10
"timeout": 5
}
]
}
+4 -410
View File
@@ -1,411 +1,5 @@
#!/usr/bin/env node
import{stdin as f}from"process";import w from"better-sqlite3";import{join as m,dirname as k,basename as W}from"path";import{homedir as I}from"os";import{existsSync as K,mkdirSync as x}from"fs";import{fileURLToPath as U}from"url";function M(){return typeof __dirname<"u"?__dirname:k(U(import.meta.url))}var q=M(),u=process.env.CLAUDE_MEM_DATA_DIR||m(I(),".claude-mem"),R=process.env.CLAUDE_CONFIG_DIR||m(I(),".claude"),J=m(u,"archives"),Q=m(u,"logs"),z=m(u,"trash"),Z=m(u,"backups"),ee=m(u,"settings.json"),O=m(u,"claude-mem.db"),se=m(u,"vector-db"),te=m(R,"settings.json"),re=m(R,"commands"),ne=m(R,"CLAUDE.md");function A(c){x(c,{recursive:!0})}var h=(n=>(n[n.DEBUG=0]="DEBUG",n[n.INFO=1]="INFO",n[n.WARN=2]="WARN",n[n.ERROR=3]="ERROR",n[n.SILENT=4]="SILENT",n))(h||{}),N=class{level;useColor;constructor(){let e=process.env.CLAUDE_MEM_LOG_LEVEL?.toUpperCase()||"INFO";this.level=h[e]??1,this.useColor=process.stdout.isTTY??!1}correlationId(e,s){return`obs-${e}-${s}`}sessionId(e){return`session-${e}`}formatData(e){if(e==null)return"";if(typeof e=="string")return e;if(typeof e=="number"||typeof e=="boolean")return e.toString();if(typeof e=="object"){if(e instanceof Error)return this.level===0?`${e.message}
${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let s=Object.keys(e);return s.length===0?"{}":s.length<=3?JSON.stringify(e):`{${s.length} keys: ${s.slice(0,3).join(", ")}...}`}return String(e)}formatTool(e,s){if(!s)return e;try{let t=typeof s=="string"?JSON.parse(s):s;if(e==="Bash"&&t.command){let r=t.command.length>50?t.command.substring(0,50)+"...":t.command;return`${e}(${r})`}if(e==="Read"&&t.file_path){let r=t.file_path.split("/").pop()||t.file_path;return`${e}(${r})`}if(e==="Edit"&&t.file_path){let r=t.file_path.split("/").pop()||t.file_path;return`${e}(${r})`}if(e==="Write"&&t.file_path){let r=t.file_path.split("/").pop()||t.file_path;return`${e}(${r})`}return e}catch{return e}}log(e,s,t,r,n){if(e<this.level)return;let o=new Date().toISOString().replace("T"," ").substring(0,23),i=h[e].padEnd(5),d=s.padEnd(6),_="";r?.correlationId?_=`[${r.correlationId}] `:r?.sessionId&&(_=`[session-${r.sessionId}] `);let T="";n!=null&&(this.level===0&&typeof n=="object"?T=`
`+JSON.stringify(n,null,2):T=" "+this.formatData(n));let E="";if(r){let{sessionId:l,sdkSessionId:S,correlationId:p,...a}=r;Object.keys(a).length>0&&(E=` {${Object.entries(a).map(([y,D])=>`${y}=${D}`).join(", ")}}`)}let b=`[${o}] [${i}] [${d}] ${_}${t}${E}${T}`;e===3?console.error(b):console.log(b)}debug(e,s,t,r){this.log(0,e,s,t,r)}info(e,s,t,r){this.log(1,e,s,t,r)}warn(e,s,t,r){this.log(2,e,s,t,r)}error(e,s,t,r){this.log(3,e,s,t,r)}dataIn(e,s,t,r){this.info(e,`\u2192 ${s}`,t,r)}dataOut(e,s,t,r){this.info(e,`\u2190 ${s}`,t,r)}success(e,s,t,r){this.info(e,`\u2713 ${s}`,t,r)}failure(e,s,t,r){this.error(e,`\u2717 ${s}`,t,r)}timing(e,s,t,r){this.info(e,`\u23F1 ${s}`,r,{duration:`${t}ms`})}},L=new N;var g=class{db;constructor(){A(u),this.db=new w(O),this.db.pragma("journal_mode = WAL"),this.db.pragma("synchronous = NORMAL"),this.db.pragma("foreign_keys = ON"),this.initializeSchema(),this.ensureWorkerPortColumn(),this.ensurePromptTrackingColumns(),this.removeSessionSummariesUniqueConstraint(),this.addObservationHierarchicalFields(),this.makeObservationsTextNullable(),this.createUserPromptsTable(),this.ensureDiscoveryTokensColumn()}initializeSchema(){try{this.db.exec(`
CREATE TABLE IF NOT EXISTS schema_versions (
id INTEGER PRIMARY KEY,
version INTEGER UNIQUE NOT NULL,
applied_at TEXT NOT NULL
)
`);let e=this.db.prepare("SELECT version FROM schema_versions ORDER BY version").all();(e.length>0?Math.max(...e.map(t=>t.version)):0)===0&&(console.error("[SessionStore] Initializing fresh database with migration004..."),this.db.exec(`
CREATE TABLE IF NOT EXISTS sdk_sessions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
claude_session_id TEXT UNIQUE NOT NULL,
sdk_session_id TEXT UNIQUE,
project TEXT NOT NULL,
user_prompt TEXT,
started_at TEXT NOT NULL,
started_at_epoch INTEGER NOT NULL,
completed_at TEXT,
completed_at_epoch INTEGER,
status TEXT CHECK(status IN ('active', 'completed', 'failed')) NOT NULL DEFAULT 'active'
);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_claude_id ON sdk_sessions(claude_session_id);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_sdk_id ON sdk_sessions(sdk_session_id);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_project ON sdk_sessions(project);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_status ON sdk_sessions(status);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_started ON sdk_sessions(started_at_epoch DESC);
CREATE TABLE IF NOT EXISTS observations (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sdk_session_id TEXT NOT NULL,
project TEXT NOT NULL,
text TEXT NOT NULL,
type TEXT NOT NULL CHECK(type IN ('decision', 'bugfix', 'feature', 'refactor', 'discovery')),
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
FOREIGN KEY(sdk_session_id) REFERENCES sdk_sessions(sdk_session_id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_observations_sdk_session ON observations(sdk_session_id);
CREATE INDEX IF NOT EXISTS idx_observations_project ON observations(project);
CREATE INDEX IF NOT EXISTS idx_observations_type ON observations(type);
CREATE INDEX IF NOT EXISTS idx_observations_created ON observations(created_at_epoch DESC);
CREATE TABLE IF NOT EXISTS session_summaries (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sdk_session_id TEXT UNIQUE NOT NULL,
project TEXT NOT NULL,
request TEXT,
investigated TEXT,
learned TEXT,
completed TEXT,
next_steps TEXT,
files_read TEXT,
files_edited TEXT,
notes TEXT,
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
FOREIGN KEY(sdk_session_id) REFERENCES sdk_sessions(sdk_session_id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_session_summaries_sdk_session ON session_summaries(sdk_session_id);
CREATE INDEX IF NOT EXISTS idx_session_summaries_project ON session_summaries(project);
CREATE INDEX IF NOT EXISTS idx_session_summaries_created ON session_summaries(created_at_epoch DESC);
`),this.db.prepare("INSERT INTO schema_versions (version, applied_at) VALUES (?, ?)").run(4,new Date().toISOString()),console.error("[SessionStore] Migration004 applied successfully"))}catch(e){throw console.error("[SessionStore] Schema initialization error:",e.message),e}}ensureWorkerPortColumn(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(5))return;this.db.pragma("table_info(sdk_sessions)").some(r=>r.name==="worker_port")||(this.db.exec("ALTER TABLE sdk_sessions ADD COLUMN worker_port INTEGER"),console.error("[SessionStore] Added worker_port column to sdk_sessions table")),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(5,new Date().toISOString())}catch(e){console.error("[SessionStore] Migration error:",e.message)}}ensurePromptTrackingColumns(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(6))return;this.db.pragma("table_info(sdk_sessions)").some(d=>d.name==="prompt_counter")||(this.db.exec("ALTER TABLE sdk_sessions ADD COLUMN prompt_counter INTEGER DEFAULT 0"),console.error("[SessionStore] Added prompt_counter column to sdk_sessions table")),this.db.pragma("table_info(observations)").some(d=>d.name==="prompt_number")||(this.db.exec("ALTER TABLE observations ADD COLUMN prompt_number INTEGER"),console.error("[SessionStore] Added prompt_number column to observations table")),this.db.pragma("table_info(session_summaries)").some(d=>d.name==="prompt_number")||(this.db.exec("ALTER TABLE session_summaries ADD COLUMN prompt_number INTEGER"),console.error("[SessionStore] Added prompt_number column to session_summaries table")),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(6,new Date().toISOString())}catch(e){console.error("[SessionStore] Prompt tracking migration error:",e.message)}}removeSessionSummariesUniqueConstraint(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(7))return;if(!this.db.pragma("index_list(session_summaries)").some(r=>r.unique===1)){this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(7,new Date().toISOString());return}console.error("[SessionStore] Removing UNIQUE constraint from session_summaries.sdk_session_id..."),this.db.exec("BEGIN TRANSACTION");try{this.db.exec(`
CREATE TABLE session_summaries_new (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sdk_session_id TEXT NOT NULL,
project TEXT NOT NULL,
request TEXT,
investigated TEXT,
learned TEXT,
completed TEXT,
next_steps TEXT,
files_read TEXT,
files_edited TEXT,
notes TEXT,
prompt_number INTEGER,
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
FOREIGN KEY(sdk_session_id) REFERENCES sdk_sessions(sdk_session_id) ON DELETE CASCADE
)
`),this.db.exec(`
INSERT INTO session_summaries_new
SELECT id, sdk_session_id, project, request, investigated, learned,
completed, next_steps, files_read, files_edited, notes,
prompt_number, created_at, created_at_epoch
FROM session_summaries
`),this.db.exec("DROP TABLE session_summaries"),this.db.exec("ALTER TABLE session_summaries_new RENAME TO session_summaries"),this.db.exec(`
CREATE INDEX idx_session_summaries_sdk_session ON session_summaries(sdk_session_id);
CREATE INDEX idx_session_summaries_project ON session_summaries(project);
CREATE INDEX idx_session_summaries_created ON session_summaries(created_at_epoch DESC);
`),this.db.exec("COMMIT"),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(7,new Date().toISOString()),console.error("[SessionStore] Successfully removed UNIQUE constraint from session_summaries.sdk_session_id")}catch(r){throw this.db.exec("ROLLBACK"),r}}catch(e){console.error("[SessionStore] Migration error (remove UNIQUE constraint):",e.message)}}addObservationHierarchicalFields(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(8))return;if(this.db.pragma("table_info(observations)").some(r=>r.name==="title")){this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(8,new Date().toISOString());return}console.error("[SessionStore] Adding hierarchical fields to observations table..."),this.db.exec(`
ALTER TABLE observations ADD COLUMN title TEXT;
ALTER TABLE observations ADD COLUMN subtitle TEXT;
ALTER TABLE observations ADD COLUMN facts TEXT;
ALTER TABLE observations ADD COLUMN narrative TEXT;
ALTER TABLE observations ADD COLUMN concepts TEXT;
ALTER TABLE observations ADD COLUMN files_read TEXT;
ALTER TABLE observations ADD COLUMN files_modified TEXT;
`),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(8,new Date().toISOString()),console.error("[SessionStore] Successfully added hierarchical fields to observations table")}catch(e){console.error("[SessionStore] Migration error (add hierarchical fields):",e.message)}}makeObservationsTextNullable(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(9))return;let t=this.db.pragma("table_info(observations)").find(r=>r.name==="text");if(!t||t.notnull===0){this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(9,new Date().toISOString());return}console.error("[SessionStore] Making observations.text nullable..."),this.db.exec("BEGIN TRANSACTION");try{this.db.exec(`
CREATE TABLE observations_new (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sdk_session_id TEXT NOT NULL,
project TEXT NOT NULL,
text TEXT,
type TEXT NOT NULL CHECK(type IN ('decision', 'bugfix', 'feature', 'refactor', 'discovery', 'change')),
title TEXT,
subtitle TEXT,
facts TEXT,
narrative TEXT,
concepts TEXT,
files_read TEXT,
files_modified TEXT,
prompt_number INTEGER,
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
FOREIGN KEY(sdk_session_id) REFERENCES sdk_sessions(sdk_session_id) ON DELETE CASCADE
)
`),this.db.exec(`
INSERT INTO observations_new
SELECT id, sdk_session_id, project, text, type, title, subtitle, facts,
narrative, concepts, files_read, files_modified, prompt_number,
created_at, created_at_epoch
FROM observations
`),this.db.exec("DROP TABLE observations"),this.db.exec("ALTER TABLE observations_new RENAME TO observations"),this.db.exec(`
CREATE INDEX idx_observations_sdk_session ON observations(sdk_session_id);
CREATE INDEX idx_observations_project ON observations(project);
CREATE INDEX idx_observations_type ON observations(type);
CREATE INDEX idx_observations_created ON observations(created_at_epoch DESC);
`),this.db.exec("COMMIT"),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(9,new Date().toISOString()),console.error("[SessionStore] Successfully made observations.text nullable")}catch(r){throw this.db.exec("ROLLBACK"),r}}catch(e){console.error("[SessionStore] Migration error (make text nullable):",e.message)}}createUserPromptsTable(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(10))return;if(this.db.pragma("table_info(user_prompts)").length>0){this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(10,new Date().toISOString());return}console.error("[SessionStore] Creating user_prompts table with FTS5 support..."),this.db.exec("BEGIN TRANSACTION");try{this.db.exec(`
CREATE TABLE user_prompts (
id INTEGER PRIMARY KEY AUTOINCREMENT,
claude_session_id TEXT NOT NULL,
prompt_number INTEGER NOT NULL,
prompt_text TEXT NOT NULL,
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
FOREIGN KEY(claude_session_id) REFERENCES sdk_sessions(claude_session_id) ON DELETE CASCADE
);
CREATE INDEX idx_user_prompts_claude_session ON user_prompts(claude_session_id);
CREATE INDEX idx_user_prompts_created ON user_prompts(created_at_epoch DESC);
CREATE INDEX idx_user_prompts_prompt_number ON user_prompts(prompt_number);
CREATE INDEX idx_user_prompts_lookup ON user_prompts(claude_session_id, prompt_number);
`),this.db.exec(`
CREATE VIRTUAL TABLE user_prompts_fts USING fts5(
prompt_text,
content='user_prompts',
content_rowid='id'
);
`),this.db.exec(`
CREATE TRIGGER user_prompts_ai AFTER INSERT ON user_prompts BEGIN
INSERT INTO user_prompts_fts(rowid, prompt_text)
VALUES (new.id, new.prompt_text);
END;
CREATE TRIGGER user_prompts_ad AFTER DELETE ON user_prompts BEGIN
INSERT INTO user_prompts_fts(user_prompts_fts, rowid, prompt_text)
VALUES('delete', old.id, old.prompt_text);
END;
CREATE TRIGGER user_prompts_au AFTER UPDATE ON user_prompts BEGIN
INSERT INTO user_prompts_fts(user_prompts_fts, rowid, prompt_text)
VALUES('delete', old.id, old.prompt_text);
INSERT INTO user_prompts_fts(rowid, prompt_text)
VALUES (new.id, new.prompt_text);
END;
`),this.db.exec("COMMIT"),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(10,new Date().toISOString()),console.error("[SessionStore] Successfully created user_prompts table with FTS5 support")}catch(t){throw this.db.exec("ROLLBACK"),t}}catch(e){console.error("[SessionStore] Migration error (create user_prompts table):",e.message)}}ensureDiscoveryTokensColumn(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(11))return;this.db.pragma("table_info(observations)").some(o=>o.name==="discovery_tokens")||(this.db.exec("ALTER TABLE observations ADD COLUMN discovery_tokens INTEGER DEFAULT 0"),console.error("[SessionStore] Added discovery_tokens column to observations table")),this.db.pragma("table_info(session_summaries)").some(o=>o.name==="discovery_tokens")||(this.db.exec("ALTER TABLE session_summaries ADD COLUMN discovery_tokens INTEGER DEFAULT 0"),console.error("[SessionStore] Added discovery_tokens column to session_summaries table")),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(11,new Date().toISOString())}catch(e){throw console.error("[SessionStore] Discovery tokens migration error:",e.message),e}}getRecentSummaries(e,s=10){return this.db.prepare(`
SELECT
request, investigated, learned, completed, next_steps,
files_read, files_edited, notes, prompt_number, created_at
FROM session_summaries
WHERE project = ?
ORDER BY created_at_epoch DESC
LIMIT ?
`).all(e,s)}getRecentSummariesWithSessionInfo(e,s=3){return this.db.prepare(`
SELECT
sdk_session_id, request, learned, completed, next_steps,
prompt_number, created_at
FROM session_summaries
WHERE project = ?
ORDER BY created_at_epoch DESC
LIMIT ?
`).all(e,s)}getRecentObservations(e,s=20){return this.db.prepare(`
SELECT type, text, prompt_number, created_at
FROM observations
WHERE project = ?
ORDER BY created_at_epoch DESC
LIMIT ?
`).all(e,s)}getAllRecentObservations(e=100){return this.db.prepare(`
SELECT id, type, title, subtitle, text, project, prompt_number, created_at, created_at_epoch
FROM observations
ORDER BY created_at_epoch DESC
LIMIT ?
`).all(e)}getAllRecentSummaries(e=50){return this.db.prepare(`
SELECT id, request, investigated, learned, completed, next_steps,
files_read, files_edited, notes, project, prompt_number,
created_at, created_at_epoch
FROM session_summaries
ORDER BY created_at_epoch DESC
LIMIT ?
`).all(e)}getAllRecentUserPrompts(e=100){return this.db.prepare(`
SELECT
up.id,
up.claude_session_id,
s.project,
up.prompt_number,
up.prompt_text,
up.created_at,
up.created_at_epoch
FROM user_prompts up
LEFT JOIN sdk_sessions s ON up.claude_session_id = s.claude_session_id
ORDER BY up.created_at_epoch DESC
LIMIT ?
`).all(e)}getAllProjects(){return this.db.prepare(`
SELECT DISTINCT project
FROM sdk_sessions
WHERE project IS NOT NULL AND project != ''
ORDER BY project ASC
`).all().map(t=>t.project)}getRecentSessionsWithStatus(e,s=3){return this.db.prepare(`
SELECT * FROM (
SELECT
s.sdk_session_id,
s.status,
s.started_at,
s.started_at_epoch,
s.user_prompt,
CASE WHEN sum.sdk_session_id IS NOT NULL THEN 1 ELSE 0 END as has_summary
FROM sdk_sessions s
LEFT JOIN session_summaries sum ON s.sdk_session_id = sum.sdk_session_id
WHERE s.project = ? AND s.sdk_session_id IS NOT NULL
GROUP BY s.sdk_session_id
ORDER BY s.started_at_epoch DESC
LIMIT ?
)
ORDER BY started_at_epoch ASC
`).all(e,s)}getObservationsForSession(e){return this.db.prepare(`
SELECT title, subtitle, type, prompt_number
FROM observations
WHERE sdk_session_id = ?
ORDER BY created_at_epoch ASC
`).all(e)}getObservationById(e){return this.db.prepare(`
SELECT *
FROM observations
WHERE id = ?
`).get(e)||null}getObservationsByIds(e,s={}){if(e.length===0)return[];let{orderBy:t="date_desc",limit:r}=s,n=t==="date_asc"?"ASC":"DESC",o=r?`LIMIT ${r}`:"",i=e.map(()=>"?").join(",");return this.db.prepare(`
SELECT *
FROM observations
WHERE id IN (${i})
ORDER BY created_at_epoch ${n}
${o}
`).all(...e)}getSummaryForSession(e){return this.db.prepare(`
SELECT
request, investigated, learned, completed, next_steps,
files_read, files_edited, notes, prompt_number, created_at
FROM session_summaries
WHERE sdk_session_id = ?
ORDER BY created_at_epoch DESC
LIMIT 1
`).get(e)||null}getFilesForSession(e){let t=this.db.prepare(`
SELECT files_read, files_modified
FROM observations
WHERE sdk_session_id = ?
`).all(e),r=new Set,n=new Set;for(let o of t){if(o.files_read)try{let i=JSON.parse(o.files_read);Array.isArray(i)&&i.forEach(d=>r.add(d))}catch{}if(o.files_modified)try{let i=JSON.parse(o.files_modified);Array.isArray(i)&&i.forEach(d=>n.add(d))}catch{}}return{filesRead:Array.from(r),filesModified:Array.from(n)}}getSessionById(e){return this.db.prepare(`
SELECT id, claude_session_id, sdk_session_id, project, user_prompt
FROM sdk_sessions
WHERE id = ?
LIMIT 1
`).get(e)||null}findActiveSDKSession(e){return this.db.prepare(`
SELECT id, sdk_session_id, project, worker_port
FROM sdk_sessions
WHERE claude_session_id = ? AND status = 'active'
LIMIT 1
`).get(e)||null}findAnySDKSession(e){return this.db.prepare(`
SELECT id
FROM sdk_sessions
WHERE claude_session_id = ?
LIMIT 1
`).get(e)||null}reactivateSession(e,s){this.db.prepare(`
UPDATE sdk_sessions
SET status = 'active', user_prompt = ?, worker_port = NULL
WHERE id = ?
`).run(s,e)}incrementPromptCounter(e){return this.db.prepare(`
UPDATE sdk_sessions
SET prompt_counter = COALESCE(prompt_counter, 0) + 1
WHERE id = ?
`).run(e),this.db.prepare(`
SELECT prompt_counter FROM sdk_sessions WHERE id = ?
`).get(e)?.prompt_counter||1}getPromptCounter(e){return this.db.prepare(`
SELECT prompt_counter FROM sdk_sessions WHERE id = ?
`).get(e)?.prompt_counter||0}createSDKSession(e,s,t){let r=new Date,n=r.getTime(),i=this.db.prepare(`
INSERT OR IGNORE INTO sdk_sessions
(claude_session_id, sdk_session_id, project, user_prompt, started_at, started_at_epoch, status)
VALUES (?, ?, ?, ?, ?, ?, 'active')
`).run(e,e,s,t,r.toISOString(),n);return i.lastInsertRowid===0||i.changes===0?(s&&s.trim()!==""&&this.db.prepare(`
UPDATE sdk_sessions
SET project = ?, user_prompt = ?
WHERE claude_session_id = ?
`).run(s,t,e),this.db.prepare(`
SELECT id FROM sdk_sessions WHERE claude_session_id = ? LIMIT 1
`).get(e).id):i.lastInsertRowid}updateSDKSessionId(e,s){return this.db.prepare(`
UPDATE sdk_sessions
SET sdk_session_id = ?
WHERE id = ? AND sdk_session_id IS NULL
`).run(s,e).changes===0?(L.debug("DB","sdk_session_id already set, skipping update",{sessionId:e,sdkSessionId:s}),!1):!0}setWorkerPort(e,s){this.db.prepare(`
UPDATE sdk_sessions
SET worker_port = ?
WHERE id = ?
`).run(s,e)}getWorkerPort(e){return this.db.prepare(`
SELECT worker_port
FROM sdk_sessions
WHERE id = ?
LIMIT 1
`).get(e)?.worker_port||null}saveUserPrompt(e,s,t){let r=new Date,n=r.getTime();return this.db.prepare(`
INSERT INTO user_prompts
(claude_session_id, prompt_number, prompt_text, created_at, created_at_epoch)
VALUES (?, ?, ?, ?, ?)
`).run(e,s,t,r.toISOString(),n).lastInsertRowid}getUserPrompt(e,s){return this.db.prepare(`
SELECT prompt_text
FROM user_prompts
WHERE claude_session_id = ? AND prompt_number = ?
LIMIT 1
`).get(e,s)?.prompt_text??null}storeObservation(e,s,t,r,n=0){let o=new Date,i=o.getTime();this.db.prepare(`
SELECT id FROM sdk_sessions WHERE sdk_session_id = ?
`).get(e)||(this.db.prepare(`
INSERT INTO sdk_sessions
(claude_session_id, sdk_session_id, project, started_at, started_at_epoch, status)
VALUES (?, ?, ?, ?, ?, 'active')
`).run(e,e,s,o.toISOString(),i),console.error(`[SessionStore] Auto-created session record for session_id: ${e}`));let E=this.db.prepare(`
INSERT INTO observations
(sdk_session_id, project, type, title, subtitle, facts, narrative, concepts,
files_read, files_modified, prompt_number, discovery_tokens, created_at, created_at_epoch)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`).run(e,s,t.type,t.title,t.subtitle,JSON.stringify(t.facts),t.narrative,JSON.stringify(t.concepts),JSON.stringify(t.files_read),JSON.stringify(t.files_modified),r||null,n,o.toISOString(),i);return{id:Number(E.lastInsertRowid),createdAtEpoch:i}}storeSummary(e,s,t,r,n=0){let o=new Date,i=o.getTime();this.db.prepare(`
SELECT id FROM sdk_sessions WHERE sdk_session_id = ?
`).get(e)||(this.db.prepare(`
INSERT INTO sdk_sessions
(claude_session_id, sdk_session_id, project, started_at, started_at_epoch, status)
VALUES (?, ?, ?, ?, ?, 'active')
`).run(e,e,s,o.toISOString(),i),console.error(`[SessionStore] Auto-created session record for session_id: ${e}`));let E=this.db.prepare(`
INSERT INTO session_summaries
(sdk_session_id, project, request, investigated, learned, completed,
next_steps, notes, prompt_number, discovery_tokens, created_at, created_at_epoch)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`).run(e,s,t.request,t.investigated,t.learned,t.completed,t.next_steps,t.notes,r||null,n,o.toISOString(),i);return{id:Number(E.lastInsertRowid),createdAtEpoch:i}}markSessionCompleted(e){let s=new Date,t=s.getTime();this.db.prepare(`
UPDATE sdk_sessions
SET status = 'completed', completed_at = ?, completed_at_epoch = ?
WHERE id = ?
`).run(s.toISOString(),t,e)}markSessionFailed(e){let s=new Date,t=s.getTime();this.db.prepare(`
UPDATE sdk_sessions
SET status = 'failed', completed_at = ?, completed_at_epoch = ?
WHERE id = ?
`).run(s.toISOString(),t,e)}getSessionSummariesByIds(e,s={}){if(e.length===0)return[];let{orderBy:t="date_desc",limit:r}=s,n=t==="date_asc"?"ASC":"DESC",o=r?`LIMIT ${r}`:"",i=e.map(()=>"?").join(",");return this.db.prepare(`
SELECT * FROM session_summaries
WHERE id IN (${i})
ORDER BY created_at_epoch ${n}
${o}
`).all(...e)}getUserPromptsByIds(e,s={}){if(e.length===0)return[];let{orderBy:t="date_desc",limit:r}=s,n=t==="date_asc"?"ASC":"DESC",o=r?`LIMIT ${r}`:"",i=e.map(()=>"?").join(",");return this.db.prepare(`
SELECT
up.*,
s.project,
s.sdk_session_id
FROM user_prompts up
JOIN sdk_sessions s ON up.claude_session_id = s.claude_session_id
WHERE up.id IN (${i})
ORDER BY up.created_at_epoch ${n}
${o}
`).all(...e)}getTimelineAroundTimestamp(e,s=10,t=10,r){return this.getTimelineAroundObservation(null,e,s,t,r)}getTimelineAroundObservation(e,s,t=10,r=10,n){let o=n?"AND project = ?":"",i=n?[n]:[],d,_;if(e!==null){let l=`
SELECT id, created_at_epoch
FROM observations
WHERE id <= ? ${o}
ORDER BY id DESC
LIMIT ?
`,S=`
SELECT id, created_at_epoch
FROM observations
WHERE id >= ? ${o}
ORDER BY id ASC
LIMIT ?
`;try{let p=this.db.prepare(l).all(e,...i,t+1),a=this.db.prepare(S).all(e,...i,r+1);if(p.length===0&&a.length===0)return{observations:[],sessions:[],prompts:[]};d=p.length>0?p[p.length-1].created_at_epoch:s,_=a.length>0?a[a.length-1].created_at_epoch:s}catch(p){return console.error("[SessionStore] Error getting boundary observations:",p.message),{observations:[],sessions:[],prompts:[]}}}else{let l=`
SELECT created_at_epoch
FROM observations
WHERE created_at_epoch <= ? ${o}
ORDER BY created_at_epoch DESC
LIMIT ?
`,S=`
SELECT created_at_epoch
FROM observations
WHERE created_at_epoch >= ? ${o}
ORDER BY created_at_epoch ASC
LIMIT ?
`;try{let p=this.db.prepare(l).all(s,...i,t),a=this.db.prepare(S).all(s,...i,r+1);if(p.length===0&&a.length===0)return{observations:[],sessions:[],prompts:[]};d=p.length>0?p[p.length-1].created_at_epoch:s,_=a.length>0?a[a.length-1].created_at_epoch:s}catch(p){return console.error("[SessionStore] Error getting boundary timestamps:",p.message),{observations:[],sessions:[],prompts:[]}}}let T=`
SELECT *
FROM observations
WHERE created_at_epoch >= ? AND created_at_epoch <= ? ${o}
ORDER BY created_at_epoch ASC
`,E=`
SELECT *
FROM session_summaries
WHERE created_at_epoch >= ? AND created_at_epoch <= ? ${o}
ORDER BY created_at_epoch ASC
`,b=`
SELECT up.*, s.project, s.sdk_session_id
FROM user_prompts up
JOIN sdk_sessions s ON up.claude_session_id = s.claude_session_id
WHERE up.created_at_epoch >= ? AND up.created_at_epoch <= ? ${o.replace("project","s.project")}
ORDER BY up.created_at_epoch ASC
`;try{let l=this.db.prepare(T).all(d,_,...i),S=this.db.prepare(E).all(d,_,...i),p=this.db.prepare(b).all(d,_,...i);return{observations:l,sessions:S.map(a=>({id:a.id,sdk_session_id:a.sdk_session_id,project:a.project,request:a.request,completed:a.completed,next_steps:a.next_steps,created_at:a.created_at,created_at_epoch:a.created_at_epoch})),prompts:p.map(a=>({id:a.id,claude_session_id:a.claude_session_id,project:a.project,prompt:a.prompt_text,created_at:a.created_at,created_at_epoch:a.created_at_epoch}))}}catch(l){return console.error("[SessionStore] Error querying timeline records:",l.message),{observations:[],sessions:[],prompts:[]}}}close(){this.db.close()}};import F from"path";import{homedir as X}from"os";import{existsSync as B,readFileSync as P}from"fs";function v(){try{let c=F.join(X(),".claude-mem","settings.json");if(B(c)){let e=JSON.parse(P(c,"utf-8")),s=parseInt(e.env?.CLAUDE_MEM_WORKER_PORT,10);if(!isNaN(s))return s}}catch{}return parseInt(process.env.CLAUDE_MEM_WORKER_PORT||"37777",10)}async function C(c){console.error("[claude-mem cleanup] Hook fired",{input:c?{session_id:c.session_id,cwd:c.cwd,reason:c.reason}:null}),c||(console.log("No input provided - this script is designed to run as a Claude Code SessionEnd hook"),console.log(`
Expected input format:`),console.log(JSON.stringify({session_id:"string",cwd:"string",transcript_path:"string",hook_event_name:"SessionEnd",reason:"exit"},null,2)),process.exit(0));let{session_id:e,reason:s}=c;console.error("[claude-mem cleanup] Searching for active SDK session",{session_id:e,reason:s});let t=new g,r=t.findActiveSDKSession(e);r||(console.error("[claude-mem cleanup] No active SDK session found",{session_id:e}),t.close(),console.log('{"continue": true, "suppressOutput": true}'),process.exit(0)),console.error("[claude-mem cleanup] Active SDK session found",{session_id:r.id,sdk_session_id:r.sdk_session_id,project:r.project,worker_port:r.worker_port}),t.markSessionCompleted(r.id),console.error("[claude-mem cleanup] Session marked as completed in database"),t.close();try{let n=r.worker_port||v();await fetch(`http://127.0.0.1:${n}/sessions/${r.id}/complete`,{method:"POST",signal:AbortSignal.timeout(1e3)}),console.error("[claude-mem cleanup] Worker notified to stop processing indicator")}catch(n){console.error("[claude-mem cleanup] Failed to notify worker (non-critical):",n)}console.error("[claude-mem cleanup] Cleanup completed successfully"),console.log('{"continue": true, "suppressOutput": true}'),process.exit(0)}if(f.isTTY)C(void 0);else{let c="";f.on("data",e=>c+=e),f.on("end",async()=>{let e=c?JSON.parse(c):void 0;await C(e)})}
import{stdin as S}from"process";import h from"path";import{homedir as y}from"os";import{join as o,dirname as C,basename as v}from"path";import{homedir as g}from"os";import{fileURLToPath as D}from"url";function M(){return typeof __dirname<"u"?__dirname:C(D(import.meta.url))}var H=M(),s=process.env.CLAUDE_MEM_DATA_DIR||o(g(),".claude-mem"),l=process.env.CLAUDE_CONFIG_DIR||o(g(),".claude"),X=o(s,"archives"),B=o(s,"logs"),V=o(s,"trash"),$=o(s,"backups"),j=o(s,"settings.json"),G=o(s,"claude-mem.db"),K=o(s,"vector-db"),Y=o(l,"settings.json"),J=o(l,"commands"),q=o(l,"CLAUDE.md");import{readFileSync as R,existsSync as U}from"fs";var N=["bugfix","feature","refactor","discovery","decision","change"],L=["how-it-works","why-it-exists","what-changed","problem-solution","gotcha","pattern","trade-off"];var m=N.join(","),d=L.join(",");var p=class{static DEFAULTS={CLAUDE_MEM_MODEL:"claude-haiku-4-5",CLAUDE_MEM_CONTEXT_OBSERVATIONS:"50",CLAUDE_MEM_WORKER_PORT:"37777",CLAUDE_MEM_CONTEXT_SHOW_READ_TOKENS:"true",CLAUDE_MEM_CONTEXT_SHOW_WORK_TOKENS:"true",CLAUDE_MEM_CONTEXT_SHOW_SAVINGS_AMOUNT:"true",CLAUDE_MEM_CONTEXT_SHOW_SAVINGS_PERCENT:"true",CLAUDE_MEM_CONTEXT_OBSERVATION_TYPES:m,CLAUDE_MEM_CONTEXT_OBSERVATION_CONCEPTS:d,CLAUDE_MEM_CONTEXT_FULL_COUNT:"5",CLAUDE_MEM_CONTEXT_FULL_FIELD:"narrative",CLAUDE_MEM_CONTEXT_SESSION_COUNT:"10",CLAUDE_MEM_CONTEXT_SHOW_LAST_SUMMARY:"true",CLAUDE_MEM_CONTEXT_SHOW_LAST_MESSAGE:"false"};static getAllDefaults(){return{...this.DEFAULTS}}static get(t){return process.env[t]||this.DEFAULTS[t]}static getInt(t){let r=this.get(t);return parseInt(r,10)}static getBool(t){return this.get(t)==="true"}static loadFromFile(t){if(!U(t))return this.getAllDefaults();let r=R(t,"utf-8"),n=JSON.parse(r).env||{},i={...this.DEFAULTS};for(let _ of Object.keys(this.DEFAULTS))n[_]!==void 0&&(i[_]=n[_]);return i}};function O(){let e=h.join(y(),".claude-mem","settings.json"),t=p.loadFromFile(e);return parseInt(t.CLAUDE_MEM_WORKER_PORT,10)}import{appendFileSync as I}from"fs";import{homedir as x}from"os";import{join as P}from"path";var k=P(x(),".claude-mem","silent.log");function c(e,t,r=""){let a=new Date().toISOString(),u=((new Error().stack||"").split(`
`)[2]||"").match(/at\s+(?:.*\s+)?\(?([^:]+):(\d+):(\d+)\)?/),A=u?`${u[1].split("/").pop()}:${u[2]}`:"unknown",E=`[${a}] [${A}] ${e}`;if(t!==void 0)try{E+=` ${JSON.stringify(t)}`}catch(T){E+=` [stringify error: ${T}]`}E+=`
`;try{I(k,E)}catch(T){console.error("[silent-debug] Failed to write to log:",T)}return r}async function f(e){c("[cleanup-hook] Hook fired",{session_id:e?.session_id,cwd:e?.cwd,reason:e?.reason}),e||(console.log("No input provided - this script is designed to run as a Claude Code SessionEnd hook"),console.log(`
Expected input format:`),console.log(JSON.stringify({session_id:"string",cwd:"string",transcript_path:"string",hook_event_name:"SessionEnd",reason:"exit"},null,2)),process.exit(0));let{session_id:t,reason:r}=e,a=O();try{let n=await fetch(`http://127.0.0.1:${a}/api/sessions/complete`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({claudeSessionId:t,reason:r}),signal:AbortSignal.timeout(2e3)});if(n.ok){let i=await n.json();c("[cleanup-hook] Session cleanup completed",i)}else c("[cleanup-hook] Session not found or already cleaned up")}catch(n){c("[cleanup-hook] Worker not reachable (non-critical)",{error:n.message})}console.log('{"continue": true, "suppressOutput": true}'),process.exit(0)}if(S.isTTY)f(void 0);else{let e="";S.on("data",t=>e+=t),S.on("end",async()=>{let t=e?JSON.parse(e):void 0;await f(t)})}
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
+15
View File
File diff suppressed because one or more lines are too long
+55 -45
View File
@@ -1,7 +1,7 @@
#!/usr/bin/env node
import ie from"path";import{stdin as X}from"process";import Y from"better-sqlite3";import{join as l,dirname as B,basename as ce}from"path";import{homedir as C}from"os";import{existsSync as le,mkdirSync as $}from"fs";import{fileURLToPath as j}from"url";function W(){return typeof __dirname<"u"?__dirname:B(j(import.meta.url))}var G=W(),E=process.env.CLAUDE_MEM_DATA_DIR||l(C(),".claude-mem"),f=process.env.CLAUDE_CONFIG_DIR||l(C(),".claude"),Te=l(E,"archives"),ge=l(E,"logs"),be=l(E,"trash"),Se=l(E,"backups"),Re=l(E,"settings.json"),D=l(E,"claude-mem.db"),he=l(E,"vector-db"),fe=l(f,"settings.json"),Ne=l(f,"commands"),Oe=l(f,"CLAUDE.md");function k(a){$(a,{recursive:!0})}function N(){return l(G,"..","..")}var O=(n=>(n[n.DEBUG=0]="DEBUG",n[n.INFO=1]="INFO",n[n.WARN=2]="WARN",n[n.ERROR=3]="ERROR",n[n.SILENT=4]="SILENT",n))(O||{}),I=class{level;useColor;constructor(){let e=process.env.CLAUDE_MEM_LOG_LEVEL?.toUpperCase()||"INFO";this.level=O[e]??1,this.useColor=process.stdout.isTTY??!1}correlationId(e,s){return`obs-${e}-${s}`}sessionId(e){return`session-${e}`}formatData(e){if(e==null)return"";if(typeof e=="string")return e;if(typeof e=="number"||typeof e=="boolean")return e.toString();if(typeof e=="object"){if(e instanceof Error)return this.level===0?`${e.message}
${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let s=Object.keys(e);return s.length===0?"{}":s.length<=3?JSON.stringify(e):`{${s.length} keys: ${s.slice(0,3).join(", ")}...}`}return String(e)}formatTool(e,s){if(!s)return e;try{let t=typeof s=="string"?JSON.parse(s):s;if(e==="Bash"&&t.command){let r=t.command.length>50?t.command.substring(0,50)+"...":t.command;return`${e}(${r})`}if(e==="Read"&&t.file_path){let r=t.file_path.split("/").pop()||t.file_path;return`${e}(${r})`}if(e==="Edit"&&t.file_path){let r=t.file_path.split("/").pop()||t.file_path;return`${e}(${r})`}if(e==="Write"&&t.file_path){let r=t.file_path.split("/").pop()||t.file_path;return`${e}(${r})`}return e}catch{return e}}log(e,s,t,r,n){if(e<this.level)return;let i=new Date().toISOString().replace("T"," ").substring(0,23),o=O[e].padEnd(5),p=s.padEnd(6),c="";r?.correlationId?c=`[${r.correlationId}] `:r?.sessionId&&(c=`[session-${r.sessionId}] `);let _="";n!=null&&(this.level===0&&typeof n=="object"?_=`
`+JSON.stringify(n,null,2):_=" "+this.formatData(n));let m="";if(r){let{sessionId:g,sdkSessionId:S,correlationId:u,...d}=r;Object.keys(d).length>0&&(m=` {${Object.entries(d).map(([P,H])=>`${P}=${H}`).join(", ")}}`)}let T=`[${i}] [${o}] [${p}] ${c}${t}${m}${_}`;e===3?console.error(T):console.log(T)}debug(e,s,t,r){this.log(0,e,s,t,r)}info(e,s,t,r){this.log(1,e,s,t,r)}warn(e,s,t,r){this.log(2,e,s,t,r)}error(e,s,t,r){this.log(3,e,s,t,r)}dataIn(e,s,t,r){this.info(e,`\u2192 ${s}`,t,r)}dataOut(e,s,t,r){this.info(e,`\u2190 ${s}`,t,r)}success(e,s,t,r){this.info(e,`\u2713 ${s}`,t,r)}failure(e,s,t,r){this.error(e,`\u2717 ${s}`,t,r)}timing(e,s,t,r){this.info(e,`\u23F1 ${s}`,r,{duration:`${t}ms`})}},x=new I;var R=class{db;constructor(){k(E),this.db=new Y(D),this.db.pragma("journal_mode = WAL"),this.db.pragma("synchronous = NORMAL"),this.db.pragma("foreign_keys = ON"),this.initializeSchema(),this.ensureWorkerPortColumn(),this.ensurePromptTrackingColumns(),this.removeSessionSummariesUniqueConstraint(),this.addObservationHierarchicalFields(),this.makeObservationsTextNullable(),this.createUserPromptsTable(),this.ensureDiscoveryTokensColumn()}initializeSchema(){try{this.db.exec(`
import _e from"path";import{stdin as H}from"process";import K from"better-sqlite3";import{join as m,dirname as j,basename as le}from"path";import{homedir as v}from"os";import{existsSync as be,mkdirSync as $}from"fs";import{fileURLToPath as G}from"url";function Y(){return typeof __dirname<"u"?__dirname:j(G(import.meta.url))}var V=Y(),l=process.env.CLAUDE_MEM_DATA_DIR||m(v(),".claude-mem"),N=process.env.CLAUDE_CONFIG_DIR||m(v(),".claude"),Oe=m(l,"archives"),he=m(l,"logs"),Ne=m(l,"trash"),fe=m(l,"backups"),Ie=m(l,"settings.json"),y=m(l,"claude-mem.db"),Ae=m(l,"vector-db"),Le=m(N,"settings.json"),Ce=m(N,"commands"),De=m(N,"CLAUDE.md");function k(a){$(a,{recursive:!0})}function f(){return m(V,"..","..")}var I=(o=>(o[o.DEBUG=0]="DEBUG",o[o.INFO=1]="INFO",o[o.WARN=2]="WARN",o[o.ERROR=3]="ERROR",o[o.SILENT=4]="SILENT",o))(I||{}),A=class{level;useColor;constructor(){let e=process.env.CLAUDE_MEM_LOG_LEVEL?.toUpperCase()||"INFO";this.level=I[e]??1,this.useColor=process.stdout.isTTY??!1}correlationId(e,s){return`obs-${e}-${s}`}sessionId(e){return`session-${e}`}formatData(e){if(e==null)return"";if(typeof e=="string")return e;if(typeof e=="number"||typeof e=="boolean")return e.toString();if(typeof e=="object"){if(e instanceof Error)return this.level===0?`${e.message}
${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let s=Object.keys(e);return s.length===0?"{}":s.length<=3?JSON.stringify(e):`{${s.length} keys: ${s.slice(0,3).join(", ")}...}`}return String(e)}formatTool(e,s){if(!s)return e;try{let t=typeof s=="string"?JSON.parse(s):s;if(e==="Bash"&&t.command){let r=t.command.length>50?t.command.substring(0,50)+"...":t.command;return`${e}(${r})`}if(e==="Read"&&t.file_path){let r=t.file_path.split("/").pop()||t.file_path;return`${e}(${r})`}if(e==="Edit"&&t.file_path){let r=t.file_path.split("/").pop()||t.file_path;return`${e}(${r})`}if(e==="Write"&&t.file_path){let r=t.file_path.split("/").pop()||t.file_path;return`${e}(${r})`}return e}catch{return e}}log(e,s,t,r,o){if(e<this.level)return;let n=new Date().toISOString().replace("T"," ").substring(0,23),i=I[e].padEnd(5),p=s.padEnd(6),d="";r?.correlationId?d=`[${r.correlationId}] `:r?.sessionId&&(d=`[session-${r.sessionId}] `);let E="";o!=null&&(this.level===0&&typeof o=="object"?E=`
`+JSON.stringify(o,null,2):E=" "+this.formatData(o));let _="";if(r){let{sessionId:S,sdkSessionId:b,correlationId:u,...c}=r;Object.keys(c).length>0&&(_=` {${Object.entries(c).map(([B,W])=>`${B}=${W}`).join(", ")}}`)}let T=`[${n}] [${i}] [${p}] ${d}${t}${_}${E}`;e===3?console.error(T):console.log(T)}debug(e,s,t,r){this.log(0,e,s,t,r)}info(e,s,t,r){this.log(1,e,s,t,r)}warn(e,s,t,r){this.log(2,e,s,t,r)}error(e,s,t,r){this.log(3,e,s,t,r)}dataIn(e,s,t,r){this.info(e,`\u2192 ${s}`,t,r)}dataOut(e,s,t,r){this.info(e,`\u2190 ${s}`,t,r)}success(e,s,t,r){this.info(e,`\u2713 ${s}`,t,r)}failure(e,s,t,r){this.error(e,`\u2717 ${s}`,t,r)}timing(e,s,t,r){this.info(e,`\u23F1 ${s}`,r,{duration:`${t}ms`})}},x=new A;var R=class{db;constructor(){k(l),this.db=new K(y),this.db.pragma("journal_mode = WAL"),this.db.pragma("synchronous = NORMAL"),this.db.pragma("foreign_keys = ON"),this.initializeSchema(),this.ensureWorkerPortColumn(),this.ensurePromptTrackingColumns(),this.removeSessionSummariesUniqueConstraint(),this.addObservationHierarchicalFields(),this.makeObservationsTextNullable(),this.createUserPromptsTable(),this.ensureDiscoveryTokensColumn()}initializeSchema(){try{this.db.exec(`
CREATE TABLE IF NOT EXISTS schema_versions (
id INTEGER PRIMARY KEY,
version INTEGER UNIQUE NOT NULL,
@@ -167,7 +167,7 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let s=Obje
INSERT INTO user_prompts_fts(rowid, prompt_text)
VALUES (new.id, new.prompt_text);
END;
`),this.db.exec("COMMIT"),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(10,new Date().toISOString()),console.error("[SessionStore] Successfully created user_prompts table with FTS5 support")}catch(t){throw this.db.exec("ROLLBACK"),t}}catch(e){console.error("[SessionStore] Migration error (create user_prompts table):",e.message)}}ensureDiscoveryTokensColumn(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(11))return;this.db.pragma("table_info(observations)").some(i=>i.name==="discovery_tokens")||(this.db.exec("ALTER TABLE observations ADD COLUMN discovery_tokens INTEGER DEFAULT 0"),console.error("[SessionStore] Added discovery_tokens column to observations table")),this.db.pragma("table_info(session_summaries)").some(i=>i.name==="discovery_tokens")||(this.db.exec("ALTER TABLE session_summaries ADD COLUMN discovery_tokens INTEGER DEFAULT 0"),console.error("[SessionStore] Added discovery_tokens column to session_summaries table")),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(11,new Date().toISOString())}catch(e){throw console.error("[SessionStore] Discovery tokens migration error:",e.message),e}}getRecentSummaries(e,s=10){return this.db.prepare(`
`),this.db.exec("COMMIT"),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(10,new Date().toISOString()),console.error("[SessionStore] Successfully created user_prompts table with FTS5 support")}catch(t){throw this.db.exec("ROLLBACK"),t}}catch(e){console.error("[SessionStore] Migration error (create user_prompts table):",e.message)}}ensureDiscoveryTokensColumn(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(11))return;this.db.pragma("table_info(observations)").some(n=>n.name==="discovery_tokens")||(this.db.exec("ALTER TABLE observations ADD COLUMN discovery_tokens INTEGER DEFAULT 0"),console.error("[SessionStore] Added discovery_tokens column to observations table")),this.db.pragma("table_info(session_summaries)").some(n=>n.name==="discovery_tokens")||(this.db.exec("ALTER TABLE session_summaries ADD COLUMN discovery_tokens INTEGER DEFAULT 0"),console.error("[SessionStore] Added discovery_tokens column to session_summaries table")),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(11,new Date().toISOString())}catch(e){throw console.error("[SessionStore] Discovery tokens migration error:",e.message),e}}getRecentSummaries(e,s=10){return this.db.prepare(`
SELECT
request, investigated, learned, completed, next_steps,
files_read, files_edited, notes, prompt_number, created_at
@@ -219,7 +219,17 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let s=Obje
FROM sdk_sessions
WHERE project IS NOT NULL AND project != ''
ORDER BY project ASC
`).all().map(t=>t.project)}getRecentSessionsWithStatus(e,s=3){return this.db.prepare(`
`).all().map(t=>t.project)}getLatestUserPrompt(e){return this.db.prepare(`
SELECT
up.*,
s.sdk_session_id,
s.project
FROM user_prompts up
JOIN sdk_sessions s ON up.claude_session_id = s.claude_session_id
WHERE up.claude_session_id = ?
ORDER BY up.created_at_epoch DESC
LIMIT 1
`).get(e)}getRecentSessionsWithStatus(e,s=3){return this.db.prepare(`
SELECT * FROM (
SELECT
s.sdk_session_id,
@@ -245,12 +255,12 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let s=Obje
SELECT *
FROM observations
WHERE id = ?
`).get(e)||null}getObservationsByIds(e,s={}){if(e.length===0)return[];let{orderBy:t="date_desc",limit:r}=s,n=t==="date_asc"?"ASC":"DESC",i=r?`LIMIT ${r}`:"",o=e.map(()=>"?").join(",");return this.db.prepare(`
`).get(e)||null}getObservationsByIds(e,s={}){if(e.length===0)return[];let{orderBy:t="date_desc",limit:r}=s,o=t==="date_asc"?"ASC":"DESC",n=r?`LIMIT ${r}`:"",i=e.map(()=>"?").join(",");return this.db.prepare(`
SELECT *
FROM observations
WHERE id IN (${o})
ORDER BY created_at_epoch ${n}
${i}
WHERE id IN (${i})
ORDER BY created_at_epoch ${o}
${n}
`).all(...e)}getSummaryForSession(e){return this.db.prepare(`
SELECT
request, investigated, learned, completed, next_steps,
@@ -263,7 +273,7 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let s=Obje
SELECT files_read, files_modified
FROM observations
WHERE sdk_session_id = ?
`).all(e),r=new Set,n=new Set;for(let i of t){if(i.files_read)try{let o=JSON.parse(i.files_read);Array.isArray(o)&&o.forEach(p=>r.add(p))}catch{}if(i.files_modified)try{let o=JSON.parse(i.files_modified);Array.isArray(o)&&o.forEach(p=>n.add(p))}catch{}}return{filesRead:Array.from(r),filesModified:Array.from(n)}}getSessionById(e){return this.db.prepare(`
`).all(e),r=new Set,o=new Set;for(let n of t){if(n.files_read)try{let i=JSON.parse(n.files_read);Array.isArray(i)&&i.forEach(p=>r.add(p))}catch{}if(n.files_modified)try{let i=JSON.parse(n.files_modified);Array.isArray(i)&&i.forEach(p=>o.add(p))}catch{}}return{filesRead:Array.from(r),filesModified:Array.from(o)}}getSessionById(e){return this.db.prepare(`
SELECT id, claude_session_id, sdk_session_id, project, user_prompt
FROM sdk_sessions
WHERE id = ?
@@ -290,17 +300,17 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let s=Obje
SELECT prompt_counter FROM sdk_sessions WHERE id = ?
`).get(e)?.prompt_counter||1}getPromptCounter(e){return this.db.prepare(`
SELECT prompt_counter FROM sdk_sessions WHERE id = ?
`).get(e)?.prompt_counter||0}createSDKSession(e,s,t){let r=new Date,n=r.getTime(),o=this.db.prepare(`
`).get(e)?.prompt_counter||0}createSDKSession(e,s,t){let r=new Date,o=r.getTime(),i=this.db.prepare(`
INSERT OR IGNORE INTO sdk_sessions
(claude_session_id, sdk_session_id, project, user_prompt, started_at, started_at_epoch, status)
VALUES (?, ?, ?, ?, ?, ?, 'active')
`).run(e,e,s,t,r.toISOString(),n);return o.lastInsertRowid===0||o.changes===0?(s&&s.trim()!==""&&this.db.prepare(`
`).run(e,e,s,t,r.toISOString(),o);return i.lastInsertRowid===0||i.changes===0?(s&&s.trim()!==""&&this.db.prepare(`
UPDATE sdk_sessions
SET project = ?, user_prompt = ?
WHERE claude_session_id = ?
`).run(s,t,e),this.db.prepare(`
SELECT id FROM sdk_sessions WHERE claude_session_id = ? LIMIT 1
`).get(e).id):o.lastInsertRowid}updateSDKSessionId(e,s){return this.db.prepare(`
`).get(e).id):i.lastInsertRowid}updateSDKSessionId(e,s){return this.db.prepare(`
UPDATE sdk_sessions
SET sdk_session_id = ?
WHERE id = ? AND sdk_session_id IS NULL
@@ -313,38 +323,38 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let s=Obje
FROM sdk_sessions
WHERE id = ?
LIMIT 1
`).get(e)?.worker_port||null}saveUserPrompt(e,s,t){let r=new Date,n=r.getTime();return this.db.prepare(`
`).get(e)?.worker_port||null}saveUserPrompt(e,s,t){let r=new Date,o=r.getTime();return this.db.prepare(`
INSERT INTO user_prompts
(claude_session_id, prompt_number, prompt_text, created_at, created_at_epoch)
VALUES (?, ?, ?, ?, ?)
`).run(e,s,t,r.toISOString(),n).lastInsertRowid}getUserPrompt(e,s){return this.db.prepare(`
`).run(e,s,t,r.toISOString(),o).lastInsertRowid}getUserPrompt(e,s){return this.db.prepare(`
SELECT prompt_text
FROM user_prompts
WHERE claude_session_id = ? AND prompt_number = ?
LIMIT 1
`).get(e,s)?.prompt_text??null}storeObservation(e,s,t,r,n=0){let i=new Date,o=i.getTime();this.db.prepare(`
`).get(e,s)?.prompt_text??null}storeObservation(e,s,t,r,o=0){let n=new Date,i=n.getTime();this.db.prepare(`
SELECT id FROM sdk_sessions WHERE sdk_session_id = ?
`).get(e)||(this.db.prepare(`
INSERT INTO sdk_sessions
(claude_session_id, sdk_session_id, project, started_at, started_at_epoch, status)
VALUES (?, ?, ?, ?, ?, 'active')
`).run(e,e,s,i.toISOString(),o),console.error(`[SessionStore] Auto-created session record for session_id: ${e}`));let m=this.db.prepare(`
`).run(e,e,s,n.toISOString(),i),console.error(`[SessionStore] Auto-created session record for session_id: ${e}`));let _=this.db.prepare(`
INSERT INTO observations
(sdk_session_id, project, type, title, subtitle, facts, narrative, concepts,
files_read, files_modified, prompt_number, discovery_tokens, created_at, created_at_epoch)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`).run(e,s,t.type,t.title,t.subtitle,JSON.stringify(t.facts),t.narrative,JSON.stringify(t.concepts),JSON.stringify(t.files_read),JSON.stringify(t.files_modified),r||null,n,i.toISOString(),o);return{id:Number(m.lastInsertRowid),createdAtEpoch:o}}storeSummary(e,s,t,r,n=0){let i=new Date,o=i.getTime();this.db.prepare(`
`).run(e,s,t.type,t.title,t.subtitle,JSON.stringify(t.facts),t.narrative,JSON.stringify(t.concepts),JSON.stringify(t.files_read),JSON.stringify(t.files_modified),r||null,o,n.toISOString(),i);return{id:Number(_.lastInsertRowid),createdAtEpoch:i}}storeSummary(e,s,t,r,o=0){let n=new Date,i=n.getTime();this.db.prepare(`
SELECT id FROM sdk_sessions WHERE sdk_session_id = ?
`).get(e)||(this.db.prepare(`
INSERT INTO sdk_sessions
(claude_session_id, sdk_session_id, project, started_at, started_at_epoch, status)
VALUES (?, ?, ?, ?, ?, 'active')
`).run(e,e,s,i.toISOString(),o),console.error(`[SessionStore] Auto-created session record for session_id: ${e}`));let m=this.db.prepare(`
`).run(e,e,s,n.toISOString(),i),console.error(`[SessionStore] Auto-created session record for session_id: ${e}`));let _=this.db.prepare(`
INSERT INTO session_summaries
(sdk_session_id, project, request, investigated, learned, completed,
next_steps, notes, prompt_number, discovery_tokens, created_at, created_at_epoch)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`).run(e,s,t.request,t.investigated,t.learned,t.completed,t.next_steps,t.notes,r||null,n,i.toISOString(),o);return{id:Number(m.lastInsertRowid),createdAtEpoch:o}}markSessionCompleted(e){let s=new Date,t=s.getTime();this.db.prepare(`
`).run(e,s,t.request,t.investigated,t.learned,t.completed,t.next_steps,t.notes,r||null,o,n.toISOString(),i);return{id:Number(_.lastInsertRowid),createdAtEpoch:i}}markSessionCompleted(e){let s=new Date,t=s.getTime();this.db.prepare(`
UPDATE sdk_sessions
SET status = 'completed', completed_at = ?, completed_at_epoch = ?
WHERE id = ?
@@ -352,67 +362,67 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let s=Obje
UPDATE sdk_sessions
SET status = 'failed', completed_at = ?, completed_at_epoch = ?
WHERE id = ?
`).run(s.toISOString(),t,e)}getSessionSummariesByIds(e,s={}){if(e.length===0)return[];let{orderBy:t="date_desc",limit:r}=s,n=t==="date_asc"?"ASC":"DESC",i=r?`LIMIT ${r}`:"",o=e.map(()=>"?").join(",");return this.db.prepare(`
`).run(s.toISOString(),t,e)}getSessionSummariesByIds(e,s={}){if(e.length===0)return[];let{orderBy:t="date_desc",limit:r}=s,o=t==="date_asc"?"ASC":"DESC",n=r?`LIMIT ${r}`:"",i=e.map(()=>"?").join(",");return this.db.prepare(`
SELECT * FROM session_summaries
WHERE id IN (${o})
ORDER BY created_at_epoch ${n}
${i}
`).all(...e)}getUserPromptsByIds(e,s={}){if(e.length===0)return[];let{orderBy:t="date_desc",limit:r}=s,n=t==="date_asc"?"ASC":"DESC",i=r?`LIMIT ${r}`:"",o=e.map(()=>"?").join(",");return this.db.prepare(`
WHERE id IN (${i})
ORDER BY created_at_epoch ${o}
${n}
`).all(...e)}getUserPromptsByIds(e,s={}){if(e.length===0)return[];let{orderBy:t="date_desc",limit:r}=s,o=t==="date_asc"?"ASC":"DESC",n=r?`LIMIT ${r}`:"",i=e.map(()=>"?").join(",");return this.db.prepare(`
SELECT
up.*,
s.project,
s.sdk_session_id
FROM user_prompts up
JOIN sdk_sessions s ON up.claude_session_id = s.claude_session_id
WHERE up.id IN (${o})
ORDER BY up.created_at_epoch ${n}
${i}
`).all(...e)}getTimelineAroundTimestamp(e,s=10,t=10,r){return this.getTimelineAroundObservation(null,e,s,t,r)}getTimelineAroundObservation(e,s,t=10,r=10,n){let i=n?"AND project = ?":"",o=n?[n]:[],p,c;if(e!==null){let g=`
WHERE up.id IN (${i})
ORDER BY up.created_at_epoch ${o}
${n}
`).all(...e)}getTimelineAroundTimestamp(e,s=10,t=10,r){return this.getTimelineAroundObservation(null,e,s,t,r)}getTimelineAroundObservation(e,s,t=10,r=10,o){let n=o?"AND project = ?":"",i=o?[o]:[],p,d;if(e!==null){let S=`
SELECT id, created_at_epoch
FROM observations
WHERE id <= ? ${i}
WHERE id <= ? ${n}
ORDER BY id DESC
LIMIT ?
`,S=`
`,b=`
SELECT id, created_at_epoch
FROM observations
WHERE id >= ? ${i}
WHERE id >= ? ${n}
ORDER BY id ASC
LIMIT ?
`;try{let u=this.db.prepare(g).all(e,...o,t+1),d=this.db.prepare(S).all(e,...o,r+1);if(u.length===0&&d.length===0)return{observations:[],sessions:[],prompts:[]};p=u.length>0?u[u.length-1].created_at_epoch:s,c=d.length>0?d[d.length-1].created_at_epoch:s}catch(u){return console.error("[SessionStore] Error getting boundary observations:",u.message),{observations:[],sessions:[],prompts:[]}}}else{let g=`
`;try{let u=this.db.prepare(S).all(e,...i,t+1),c=this.db.prepare(b).all(e,...i,r+1);if(u.length===0&&c.length===0)return{observations:[],sessions:[],prompts:[]};p=u.length>0?u[u.length-1].created_at_epoch:s,d=c.length>0?c[c.length-1].created_at_epoch:s}catch(u){return console.error("[SessionStore] Error getting boundary observations:",u.message),{observations:[],sessions:[],prompts:[]}}}else{let S=`
SELECT created_at_epoch
FROM observations
WHERE created_at_epoch <= ? ${i}
WHERE created_at_epoch <= ? ${n}
ORDER BY created_at_epoch DESC
LIMIT ?
`,S=`
`,b=`
SELECT created_at_epoch
FROM observations
WHERE created_at_epoch >= ? ${i}
WHERE created_at_epoch >= ? ${n}
ORDER BY created_at_epoch ASC
LIMIT ?
`;try{let u=this.db.prepare(g).all(s,...o,t),d=this.db.prepare(S).all(s,...o,r+1);if(u.length===0&&d.length===0)return{observations:[],sessions:[],prompts:[]};p=u.length>0?u[u.length-1].created_at_epoch:s,c=d.length>0?d[d.length-1].created_at_epoch:s}catch(u){return console.error("[SessionStore] Error getting boundary timestamps:",u.message),{observations:[],sessions:[],prompts:[]}}}let _=`
`;try{let u=this.db.prepare(S).all(s,...i,t),c=this.db.prepare(b).all(s,...i,r+1);if(u.length===0&&c.length===0)return{observations:[],sessions:[],prompts:[]};p=u.length>0?u[u.length-1].created_at_epoch:s,d=c.length>0?c[c.length-1].created_at_epoch:s}catch(u){return console.error("[SessionStore] Error getting boundary timestamps:",u.message),{observations:[],sessions:[],prompts:[]}}}let E=`
SELECT *
FROM observations
WHERE created_at_epoch >= ? AND created_at_epoch <= ? ${i}
WHERE created_at_epoch >= ? AND created_at_epoch <= ? ${n}
ORDER BY created_at_epoch ASC
`,m=`
`,_=`
SELECT *
FROM session_summaries
WHERE created_at_epoch >= ? AND created_at_epoch <= ? ${i}
WHERE created_at_epoch >= ? AND created_at_epoch <= ? ${n}
ORDER BY created_at_epoch ASC
`,T=`
SELECT up.*, s.project, s.sdk_session_id
FROM user_prompts up
JOIN sdk_sessions s ON up.claude_session_id = s.claude_session_id
WHERE up.created_at_epoch >= ? AND up.created_at_epoch <= ? ${i.replace("project","s.project")}
WHERE up.created_at_epoch >= ? AND up.created_at_epoch <= ? ${n.replace("project","s.project")}
ORDER BY up.created_at_epoch ASC
`;try{let g=this.db.prepare(_).all(p,c,...o),S=this.db.prepare(m).all(p,c,...o),u=this.db.prepare(T).all(p,c,...o);return{observations:g,sessions:S.map(d=>({id:d.id,sdk_session_id:d.sdk_session_id,project:d.project,request:d.request,completed:d.completed,next_steps:d.next_steps,created_at:d.created_at,created_at_epoch:d.created_at_epoch})),prompts:u.map(d=>({id:d.id,claude_session_id:d.claude_session_id,project:d.project,prompt:d.prompt_text,created_at:d.created_at,created_at_epoch:d.created_at_epoch}))}}catch(g){return console.error("[SessionStore] Error querying timeline records:",g.message),{observations:[],sessions:[],prompts:[]}}}close(){this.db.close()}};function K(a,e,s){return a==="PreCompact"?e?{continue:!0,suppressOutput:!0}:{continue:!1,stopReason:s.reason||"Pre-compact operation failed",suppressOutput:!0}:a==="SessionStart"?e&&s.context?{continue:!0,suppressOutput:!0,hookSpecificOutput:{hookEventName:"SessionStart",additionalContext:s.context}}:{continue:!0,suppressOutput:!0}:a==="UserPromptSubmit"||a==="PostToolUse"?{continue:!0,suppressOutput:!0}:a==="Stop"?{continue:!0,suppressOutput:!0}:{continue:e,suppressOutput:!0,...s.reason&&!e?{stopReason:s.reason}:{}}}function L(a,e,s={}){let t=K(a,e,s);return JSON.stringify(t)}import A from"path";import{homedir as V}from"os";import{existsSync as y,readFileSync as q}from"fs";import{spawnSync as J}from"child_process";var Q=100,z=500,Z=10;function h(){try{let a=A.join(V(),".claude-mem","settings.json");if(y(a)){let e=JSON.parse(q(a,"utf-8")),s=parseInt(e.env?.CLAUDE_MEM_WORKER_PORT,10);if(!isNaN(s))return s}}catch{}return parseInt(process.env.CLAUDE_MEM_WORKER_PORT||"37777",10)}async function U(){try{let a=h();return(await fetch(`http://127.0.0.1:${a}/health`,{signal:AbortSignal.timeout(Q)})).ok}catch{return!1}}async function ee(){try{let a=N(),e=A.join(a,"ecosystem.config.cjs");if(!y(e))throw new Error(`Ecosystem config not found at ${e}`);let s=A.join(a,"node_modules",".bin","pm2"),t=process.platform==="win32"?s+".cmd":s,r=y(t)?t:"pm2",n=J(r,["start",e],{cwd:a,stdio:"pipe",encoding:"utf-8",windowsHide:!0});if(n.status!==0)throw new Error(n.stderr||"PM2 start failed");for(let i=0;i<Z;i++)if(await new Promise(o=>setTimeout(o,z)),await U())return!0;return!1}catch{return!1}}async function w(){if(await U())return;if(!await ee()){let e=h(),s=N();throw new Error(`Worker service failed to start on port ${e}.
`;try{let S=this.db.prepare(E).all(p,d,...i),b=this.db.prepare(_).all(p,d,...i),u=this.db.prepare(T).all(p,d,...i);return{observations:S,sessions:b.map(c=>({id:c.id,sdk_session_id:c.sdk_session_id,project:c.project,request:c.request,completed:c.completed,next_steps:c.next_steps,created_at:c.created_at,created_at_epoch:c.created_at_epoch})),prompts:u.map(c=>({id:c.id,claude_session_id:c.claude_session_id,project:c.project,prompt:c.prompt_text,created_at:c.created_at,created_at_epoch:c.created_at_epoch}))}}catch(S){return console.error("[SessionStore] Error querying timeline records:",S.message),{observations:[],sessions:[],prompts:[]}}}close(){this.db.close()}};function q(a,e,s){return a==="PreCompact"?e?{continue:!0,suppressOutput:!0}:{continue:!1,stopReason:s.reason||"Pre-compact operation failed",suppressOutput:!0}:a==="SessionStart"?e&&s.context?{continue:!0,suppressOutput:!0,hookSpecificOutput:{hookEventName:"SessionStart",additionalContext:s.context}}:{continue:!0,suppressOutput:!0}:a==="UserPromptSubmit"||a==="PostToolUse"?{continue:!0,suppressOutput:!0}:a==="Stop"?{continue:!0,suppressOutput:!0}:{continue:e,suppressOutput:!0,...s.reason&&!e?{stopReason:s.reason}:{}}}function L(a,e,s={}){let t=q(a,e,s);return JSON.stringify(t)}import C from"path";import{homedir as ee}from"os";import{spawnSync as se}from"child_process";import{readFileSync as z,existsSync as Z}from"fs";var J=["bugfix","feature","refactor","discovery","decision","change"],Q=["how-it-works","why-it-exists","what-changed","problem-solution","gotcha","pattern","trade-off"];var U=J.join(","),M=Q.join(",");var O=class{static DEFAULTS={CLAUDE_MEM_MODEL:"claude-haiku-4-5",CLAUDE_MEM_CONTEXT_OBSERVATIONS:"50",CLAUDE_MEM_WORKER_PORT:"37777",CLAUDE_MEM_CONTEXT_SHOW_READ_TOKENS:"true",CLAUDE_MEM_CONTEXT_SHOW_WORK_TOKENS:"true",CLAUDE_MEM_CONTEXT_SHOW_SAVINGS_AMOUNT:"true",CLAUDE_MEM_CONTEXT_SHOW_SAVINGS_PERCENT:"true",CLAUDE_MEM_CONTEXT_OBSERVATION_TYPES:U,CLAUDE_MEM_CONTEXT_OBSERVATION_CONCEPTS:M,CLAUDE_MEM_CONTEXT_FULL_COUNT:"5",CLAUDE_MEM_CONTEXT_FULL_FIELD:"narrative",CLAUDE_MEM_CONTEXT_SESSION_COUNT:"10",CLAUDE_MEM_CONTEXT_SHOW_LAST_SUMMARY:"true",CLAUDE_MEM_CONTEXT_SHOW_LAST_MESSAGE:"false"};static getAllDefaults(){return{...this.DEFAULTS}}static get(e){return process.env[e]||this.DEFAULTS[e]}static getInt(e){let s=this.get(e);return parseInt(s,10)}static getBool(e){return this.get(e)==="true"}static loadFromFile(e){if(!Z(e))return this.getAllDefaults();let s=z(e,"utf-8"),r=JSON.parse(s).env||{},o={...this.DEFAULTS};for(let n of Object.keys(this.DEFAULTS))r[n]!==void 0&&(o[n]=r[n]);return o}};var te=100,re=500,oe=10;function h(){let a=C.join(ee(),".claude-mem","settings.json"),e=O.loadFromFile(a);return parseInt(e.CLAUDE_MEM_WORKER_PORT,10)}async function w(){try{let a=h();return(await fetch(`http://127.0.0.1:${a}/health`,{signal:AbortSignal.timeout(te)})).ok}catch{return!1}}async function ne(){try{let a=f(),e=C.join(a,"ecosystem.config.cjs");if(!existsSync(e))throw new Error(`Ecosystem config not found at ${e}`);let s=C.join(a,"node_modules",".bin","pm2"),t=process.platform==="win32"?s+".cmd":s,r=existsSync(t)?t:"pm2",o=se(r,["start",e],{cwd:a,stdio:"pipe",encoding:"utf-8",windowsHide:!0});if(o.status!==0)throw new Error(o.stderr||"PM2 start failed");for(let n=0;n<oe;n++)if(await new Promise(i=>setTimeout(i,re)),await w())return!0;return!1}catch{return!1}}async function F(){if(await w())return;if(!await ne()){let e=h(),s=f();throw new Error(`Worker service failed to start on port ${e}.
To start manually, run:
cd ${s}
npx pm2 start ecosystem.config.cjs
If already running, try: npx pm2 restart claude-mem-worker`)}}import{appendFileSync as se}from"fs";import{homedir as te}from"os";import{join as re}from"path";var ne=re(te(),".claude-mem","silent.log");function b(a,e,s=""){let t=new Date().toISOString(),o=((new Error().stack||"").split(`
`)[2]||"").match(/at\s+(?:.*\s+)?\(?([^:]+):(\d+):(\d+)\)?/),p=o?`${o[1].split("/").pop()}:${o[2]}`:"unknown",c=`[${t}] [${p}] ${a}`;if(e!==void 0)try{c+=` ${JSON.stringify(e)}`}catch(_){c+=` [stringify error: ${_}]`}c+=`
`;try{se(ne,c)}catch(_){console.error("[silent-debug] Failed to write to log:",_)}return s}var M=100;function oe(a){let e=(a.match(/<private>/g)||[]).length,s=(a.match(/<claude-mem-context>/g)||[]).length;return e+s}function F(a){if(typeof a!="string")return b("[tag-stripping] received non-string for prompt context:",{type:typeof a}),"";let e=oe(a);return e>M&&b("[tag-stripping] tag count exceeds limit, truncating:",{tagCount:e,maxAllowed:M,contentLength:a.length}),a.replace(/<claude-mem-context>[\s\S]*?<\/claude-mem-context>/g,"").replace(/<private>[\s\S]*?<\/private>/g,"").trim()}async function ae(a){if(!a)throw new Error("newHook requires input");let{session_id:e,cwd:s,prompt:t}=a;b("[new-hook] Input received",{session_id:e,cwd:s,cwd_type:typeof s,cwd_length:s?.length,has_cwd:!!s,prompt_length:t?.length});let r=ie.basename(s);b("[new-hook] Project extracted",{project:r,project_type:typeof r,project_length:r?.length,is_empty:r==="",cwd_was:s}),await w();let n=new R,i=n.createSDKSession(e,r,t),o=n.incrementPromptCounter(i),p=F(t);if(!p||p.trim()===""){b("[new-hook] Prompt entirely private, skipping memory operations",{session_id:e,promptNumber:o,originalLength:t.length}),n.close(),console.error(`[new-hook] Session ${i}, prompt #${o} (fully private - skipped)`),console.log(L("UserPromptSubmit",!0));return}n.saveUserPrompt(e,o,p),console.error(`[new-hook] Session ${i}, prompt #${o}`),n.close();let c=h(),_=t.startsWith("/")?t.substring(1):t;try{let m=await fetch(`http://127.0.0.1:${c}/sessions/${i}/init`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({project:r,userPrompt:_,promptNumber:o}),signal:AbortSignal.timeout(5e3)});if(!m.ok){let T=await m.text();throw new Error(`Failed to initialize session: ${m.status} ${T}`)}}catch(m){throw m.cause?.code==="ECONNREFUSED"||m.name==="TimeoutError"||m.message.includes("fetch failed")?new Error("There's a problem with the worker. If you just updated, type `pm2 restart claude-mem-worker` in your terminal to continue"):m}console.log(L("UserPromptSubmit",!0))}var v="";X.on("data",a=>v+=a);X.on("end",async()=>{let a=v?JSON.parse(v):void 0;await ae(a)});
If already running, try: npx pm2 restart claude-mem-worker`)}}import{appendFileSync as ie}from"fs";import{homedir as ae}from"os";import{join as pe}from"path";var ce=pe(ae(),".claude-mem","silent.log");function g(a,e,s=""){let t=new Date().toISOString(),i=((new Error().stack||"").split(`
`)[2]||"").match(/at\s+(?:.*\s+)?\(?([^:]+):(\d+):(\d+)\)?/),p=i?`${i[1].split("/").pop()}:${i[2]}`:"unknown",d=`[${t}] [${p}] ${a}`;if(e!==void 0)try{d+=` ${JSON.stringify(e)}`}catch(E){d+=` [stringify error: ${E}]`}d+=`
`;try{ie(ce,d)}catch(E){console.error("[silent-debug] Failed to write to log:",E)}return s}var X=100;function de(a){let e=(a.match(/<private>/g)||[]).length,s=(a.match(/<claude-mem-context>/g)||[]).length;return e+s}function P(a){if(typeof a!="string")return g("[tag-stripping] received non-string for prompt context:",{type:typeof a}),"";let e=de(a);return e>X&&g("[tag-stripping] tag count exceeds limit, truncating:",{tagCount:e,maxAllowed:X,contentLength:a.length}),a.replace(/<claude-mem-context>[\s\S]*?<\/claude-mem-context>/g,"").replace(/<private>[\s\S]*?<\/private>/g,"").trim()}async function ue(a){if(!a)throw new Error("newHook requires input");let{session_id:e,cwd:s,prompt:t}=a;g("[new-hook] Input received",{session_id:e,cwd:s,cwd_type:typeof s,cwd_length:s?.length,has_cwd:!!s,prompt_length:t?.length});let r=_e.basename(s);g("[new-hook] Project extracted",{project:r,project_type:typeof r,project_length:r?.length,is_empty:r==="",cwd_was:s}),await F();let o=new R,n=o.createSDKSession(e,r,t),i=o.incrementPromptCounter(n),p=P(t);if(!p||p.trim()===""){g("[new-hook] Prompt entirely private, skipping memory operations",{session_id:e,promptNumber:i,originalLength:t.length}),o.close(),console.error(`[new-hook] Session ${n}, prompt #${i} (fully private - skipped)`),console.log(L("UserPromptSubmit",!0));return}o.saveUserPrompt(e,i,p),console.error(`[new-hook] Session ${n}, prompt #${i}`),o.close();let d=h(),E=t.startsWith("/")?t.substring(1):t;try{let _=await fetch(`http://127.0.0.1:${d}/sessions/${n}/init`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({project:r,userPrompt:E,promptNumber:i}),signal:AbortSignal.timeout(5e3)});if(!_.ok){let T=await _.text();throw new Error(`Failed to initialize session: ${_.status} ${T}`)}}catch(_){throw _.cause?.code==="ECONNREFUSED"||_.name==="TimeoutError"||_.message.includes("fetch failed")?new Error("There's a problem with the worker. If you just updated, type `pm2 restart claude-mem-worker` in your terminal to continue"):_}console.log(L("UserPromptSubmit",!0))}var D="";H.on("data",a=>D+=a);H.on("end",async()=>{let a=D?JSON.parse(D):void 0;await ue(a)});
+5 -413
View File
@@ -1,418 +1,10 @@
#!/usr/bin/env node
import{stdin as X}from"process";import Y from"better-sqlite3";import{join as l,dirname as B,basename as ce}from"path";import{homedir as k}from"os";import{existsSync as le,mkdirSync as $}from"fs";import{fileURLToPath as j}from"url";function W(){return typeof __dirname<"u"?__dirname:B(j(import.meta.url))}var G=W(),b=process.env.CLAUDE_MEM_DATA_DIR||l(k(),".claude-mem"),O=process.env.CLAUDE_CONFIG_DIR||l(k(),".claude"),Te=l(b,"archives"),ge=l(b,"logs"),be=l(b,"trash"),Se=l(b,"backups"),Re=l(b,"settings.json"),x=l(b,"claude-mem.db"),he=l(b,"vector-db"),fe=l(O,"settings.json"),Ne=l(O,"commands"),Oe=l(O,"CLAUDE.md");function U(a){$(a,{recursive:!0})}function I(){return l(G,"..","..")}var L=(n=>(n[n.DEBUG=0]="DEBUG",n[n.INFO=1]="INFO",n[n.WARN=2]="WARN",n[n.ERROR=3]="ERROR",n[n.SILENT=4]="SILENT",n))(L||{}),A=class{level;useColor;constructor(){let e=process.env.CLAUDE_MEM_LOG_LEVEL?.toUpperCase()||"INFO";this.level=L[e]??1,this.useColor=process.stdout.isTTY??!1}correlationId(e,s){return`obs-${e}-${s}`}sessionId(e){return`session-${e}`}formatData(e){if(e==null)return"";if(typeof e=="string")return e;if(typeof e=="number"||typeof e=="boolean")return e.toString();if(typeof e=="object"){if(e instanceof Error)return this.level===0?`${e.message}
${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let s=Object.keys(e);return s.length===0?"{}":s.length<=3?JSON.stringify(e):`{${s.length} keys: ${s.slice(0,3).join(", ")}...}`}return String(e)}formatTool(e,s){if(!s)return e;try{let t=typeof s=="string"?JSON.parse(s):s;if(e==="Bash"&&t.command){let r=t.command.length>50?t.command.substring(0,50)+"...":t.command;return`${e}(${r})`}if(e==="Read"&&t.file_path){let r=t.file_path.split("/").pop()||t.file_path;return`${e}(${r})`}if(e==="Edit"&&t.file_path){let r=t.file_path.split("/").pop()||t.file_path;return`${e}(${r})`}if(e==="Write"&&t.file_path){let r=t.file_path.split("/").pop()||t.file_path;return`${e}(${r})`}return e}catch{return e}}log(e,s,t,r,n){if(e<this.level)return;let i=new Date().toISOString().replace("T"," ").substring(0,23),o=L[e].padEnd(5),p=s.padEnd(6),u="";r?.correlationId?u=`[${r.correlationId}] `:r?.sessionId&&(u=`[session-${r.sessionId}] `);let m="";n!=null&&(this.level===0&&typeof n=="object"?m=`
`+JSON.stringify(n,null,2):m=" "+this.formatData(n));let T="";if(r){let{sessionId:E,sdkSessionId:g,correlationId:c,...d}=r;Object.keys(d).length>0&&(T=` {${Object.entries(d).map(([P,H])=>`${P}=${H}`).join(", ")}}`)}let _=`[${i}] [${o}] [${p}] ${u}${t}${T}${m}`;e===3?console.error(_):console.log(_)}debug(e,s,t,r){this.log(0,e,s,t,r)}info(e,s,t,r){this.log(1,e,s,t,r)}warn(e,s,t,r){this.log(2,e,s,t,r)}error(e,s,t,r){this.log(3,e,s,t,r)}dataIn(e,s,t,r){this.info(e,`\u2192 ${s}`,t,r)}dataOut(e,s,t,r){this.info(e,`\u2190 ${s}`,t,r)}success(e,s,t,r){this.info(e,`\u2713 ${s}`,t,r)}failure(e,s,t,r){this.error(e,`\u2717 ${s}`,t,r)}timing(e,s,t,r){this.info(e,`\u23F1 ${s}`,r,{duration:`${t}ms`})}},S=new A;var h=class{db;constructor(){U(b),this.db=new Y(x),this.db.pragma("journal_mode = WAL"),this.db.pragma("synchronous = NORMAL"),this.db.pragma("foreign_keys = ON"),this.initializeSchema(),this.ensureWorkerPortColumn(),this.ensurePromptTrackingColumns(),this.removeSessionSummariesUniqueConstraint(),this.addObservationHierarchicalFields(),this.makeObservationsTextNullable(),this.createUserPromptsTable(),this.ensureDiscoveryTokensColumn()}initializeSchema(){try{this.db.exec(`
CREATE TABLE IF NOT EXISTS schema_versions (
id INTEGER PRIMARY KEY,
version INTEGER UNIQUE NOT NULL,
applied_at TEXT NOT NULL
)
`);let e=this.db.prepare("SELECT version FROM schema_versions ORDER BY version").all();(e.length>0?Math.max(...e.map(t=>t.version)):0)===0&&(console.error("[SessionStore] Initializing fresh database with migration004..."),this.db.exec(`
CREATE TABLE IF NOT EXISTS sdk_sessions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
claude_session_id TEXT UNIQUE NOT NULL,
sdk_session_id TEXT UNIQUE,
project TEXT NOT NULL,
user_prompt TEXT,
started_at TEXT NOT NULL,
started_at_epoch INTEGER NOT NULL,
completed_at TEXT,
completed_at_epoch INTEGER,
status TEXT CHECK(status IN ('active', 'completed', 'failed')) NOT NULL DEFAULT 'active'
);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_claude_id ON sdk_sessions(claude_session_id);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_sdk_id ON sdk_sessions(sdk_session_id);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_project ON sdk_sessions(project);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_status ON sdk_sessions(status);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_started ON sdk_sessions(started_at_epoch DESC);
CREATE TABLE IF NOT EXISTS observations (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sdk_session_id TEXT NOT NULL,
project TEXT NOT NULL,
text TEXT NOT NULL,
type TEXT NOT NULL CHECK(type IN ('decision', 'bugfix', 'feature', 'refactor', 'discovery')),
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
FOREIGN KEY(sdk_session_id) REFERENCES sdk_sessions(sdk_session_id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_observations_sdk_session ON observations(sdk_session_id);
CREATE INDEX IF NOT EXISTS idx_observations_project ON observations(project);
CREATE INDEX IF NOT EXISTS idx_observations_type ON observations(type);
CREATE INDEX IF NOT EXISTS idx_observations_created ON observations(created_at_epoch DESC);
CREATE TABLE IF NOT EXISTS session_summaries (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sdk_session_id TEXT UNIQUE NOT NULL,
project TEXT NOT NULL,
request TEXT,
investigated TEXT,
learned TEXT,
completed TEXT,
next_steps TEXT,
files_read TEXT,
files_edited TEXT,
notes TEXT,
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
FOREIGN KEY(sdk_session_id) REFERENCES sdk_sessions(sdk_session_id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_session_summaries_sdk_session ON session_summaries(sdk_session_id);
CREATE INDEX IF NOT EXISTS idx_session_summaries_project ON session_summaries(project);
CREATE INDEX IF NOT EXISTS idx_session_summaries_created ON session_summaries(created_at_epoch DESC);
`),this.db.prepare("INSERT INTO schema_versions (version, applied_at) VALUES (?, ?)").run(4,new Date().toISOString()),console.error("[SessionStore] Migration004 applied successfully"))}catch(e){throw console.error("[SessionStore] Schema initialization error:",e.message),e}}ensureWorkerPortColumn(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(5))return;this.db.pragma("table_info(sdk_sessions)").some(r=>r.name==="worker_port")||(this.db.exec("ALTER TABLE sdk_sessions ADD COLUMN worker_port INTEGER"),console.error("[SessionStore] Added worker_port column to sdk_sessions table")),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(5,new Date().toISOString())}catch(e){console.error("[SessionStore] Migration error:",e.message)}}ensurePromptTrackingColumns(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(6))return;this.db.pragma("table_info(sdk_sessions)").some(p=>p.name==="prompt_counter")||(this.db.exec("ALTER TABLE sdk_sessions ADD COLUMN prompt_counter INTEGER DEFAULT 0"),console.error("[SessionStore] Added prompt_counter column to sdk_sessions table")),this.db.pragma("table_info(observations)").some(p=>p.name==="prompt_number")||(this.db.exec("ALTER TABLE observations ADD COLUMN prompt_number INTEGER"),console.error("[SessionStore] Added prompt_number column to observations table")),this.db.pragma("table_info(session_summaries)").some(p=>p.name==="prompt_number")||(this.db.exec("ALTER TABLE session_summaries ADD COLUMN prompt_number INTEGER"),console.error("[SessionStore] Added prompt_number column to session_summaries table")),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(6,new Date().toISOString())}catch(e){console.error("[SessionStore] Prompt tracking migration error:",e.message)}}removeSessionSummariesUniqueConstraint(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(7))return;if(!this.db.pragma("index_list(session_summaries)").some(r=>r.unique===1)){this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(7,new Date().toISOString());return}console.error("[SessionStore] Removing UNIQUE constraint from session_summaries.sdk_session_id..."),this.db.exec("BEGIN TRANSACTION");try{this.db.exec(`
CREATE TABLE session_summaries_new (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sdk_session_id TEXT NOT NULL,
project TEXT NOT NULL,
request TEXT,
investigated TEXT,
learned TEXT,
completed TEXT,
next_steps TEXT,
files_read TEXT,
files_edited TEXT,
notes TEXT,
prompt_number INTEGER,
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
FOREIGN KEY(sdk_session_id) REFERENCES sdk_sessions(sdk_session_id) ON DELETE CASCADE
)
`),this.db.exec(`
INSERT INTO session_summaries_new
SELECT id, sdk_session_id, project, request, investigated, learned,
completed, next_steps, files_read, files_edited, notes,
prompt_number, created_at, created_at_epoch
FROM session_summaries
`),this.db.exec("DROP TABLE session_summaries"),this.db.exec("ALTER TABLE session_summaries_new RENAME TO session_summaries"),this.db.exec(`
CREATE INDEX idx_session_summaries_sdk_session ON session_summaries(sdk_session_id);
CREATE INDEX idx_session_summaries_project ON session_summaries(project);
CREATE INDEX idx_session_summaries_created ON session_summaries(created_at_epoch DESC);
`),this.db.exec("COMMIT"),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(7,new Date().toISOString()),console.error("[SessionStore] Successfully removed UNIQUE constraint from session_summaries.sdk_session_id")}catch(r){throw this.db.exec("ROLLBACK"),r}}catch(e){console.error("[SessionStore] Migration error (remove UNIQUE constraint):",e.message)}}addObservationHierarchicalFields(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(8))return;if(this.db.pragma("table_info(observations)").some(r=>r.name==="title")){this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(8,new Date().toISOString());return}console.error("[SessionStore] Adding hierarchical fields to observations table..."),this.db.exec(`
ALTER TABLE observations ADD COLUMN title TEXT;
ALTER TABLE observations ADD COLUMN subtitle TEXT;
ALTER TABLE observations ADD COLUMN facts TEXT;
ALTER TABLE observations ADD COLUMN narrative TEXT;
ALTER TABLE observations ADD COLUMN concepts TEXT;
ALTER TABLE observations ADD COLUMN files_read TEXT;
ALTER TABLE observations ADD COLUMN files_modified TEXT;
`),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(8,new Date().toISOString()),console.error("[SessionStore] Successfully added hierarchical fields to observations table")}catch(e){console.error("[SessionStore] Migration error (add hierarchical fields):",e.message)}}makeObservationsTextNullable(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(9))return;let t=this.db.pragma("table_info(observations)").find(r=>r.name==="text");if(!t||t.notnull===0){this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(9,new Date().toISOString());return}console.error("[SessionStore] Making observations.text nullable..."),this.db.exec("BEGIN TRANSACTION");try{this.db.exec(`
CREATE TABLE observations_new (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sdk_session_id TEXT NOT NULL,
project TEXT NOT NULL,
text TEXT,
type TEXT NOT NULL CHECK(type IN ('decision', 'bugfix', 'feature', 'refactor', 'discovery', 'change')),
title TEXT,
subtitle TEXT,
facts TEXT,
narrative TEXT,
concepts TEXT,
files_read TEXT,
files_modified TEXT,
prompt_number INTEGER,
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
FOREIGN KEY(sdk_session_id) REFERENCES sdk_sessions(sdk_session_id) ON DELETE CASCADE
)
`),this.db.exec(`
INSERT INTO observations_new
SELECT id, sdk_session_id, project, text, type, title, subtitle, facts,
narrative, concepts, files_read, files_modified, prompt_number,
created_at, created_at_epoch
FROM observations
`),this.db.exec("DROP TABLE observations"),this.db.exec("ALTER TABLE observations_new RENAME TO observations"),this.db.exec(`
CREATE INDEX idx_observations_sdk_session ON observations(sdk_session_id);
CREATE INDEX idx_observations_project ON observations(project);
CREATE INDEX idx_observations_type ON observations(type);
CREATE INDEX idx_observations_created ON observations(created_at_epoch DESC);
`),this.db.exec("COMMIT"),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(9,new Date().toISOString()),console.error("[SessionStore] Successfully made observations.text nullable")}catch(r){throw this.db.exec("ROLLBACK"),r}}catch(e){console.error("[SessionStore] Migration error (make text nullable):",e.message)}}createUserPromptsTable(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(10))return;if(this.db.pragma("table_info(user_prompts)").length>0){this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(10,new Date().toISOString());return}console.error("[SessionStore] Creating user_prompts table with FTS5 support..."),this.db.exec("BEGIN TRANSACTION");try{this.db.exec(`
CREATE TABLE user_prompts (
id INTEGER PRIMARY KEY AUTOINCREMENT,
claude_session_id TEXT NOT NULL,
prompt_number INTEGER NOT NULL,
prompt_text TEXT NOT NULL,
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
FOREIGN KEY(claude_session_id) REFERENCES sdk_sessions(claude_session_id) ON DELETE CASCADE
);
CREATE INDEX idx_user_prompts_claude_session ON user_prompts(claude_session_id);
CREATE INDEX idx_user_prompts_created ON user_prompts(created_at_epoch DESC);
CREATE INDEX idx_user_prompts_prompt_number ON user_prompts(prompt_number);
CREATE INDEX idx_user_prompts_lookup ON user_prompts(claude_session_id, prompt_number);
`),this.db.exec(`
CREATE VIRTUAL TABLE user_prompts_fts USING fts5(
prompt_text,
content='user_prompts',
content_rowid='id'
);
`),this.db.exec(`
CREATE TRIGGER user_prompts_ai AFTER INSERT ON user_prompts BEGIN
INSERT INTO user_prompts_fts(rowid, prompt_text)
VALUES (new.id, new.prompt_text);
END;
CREATE TRIGGER user_prompts_ad AFTER DELETE ON user_prompts BEGIN
INSERT INTO user_prompts_fts(user_prompts_fts, rowid, prompt_text)
VALUES('delete', old.id, old.prompt_text);
END;
CREATE TRIGGER user_prompts_au AFTER UPDATE ON user_prompts BEGIN
INSERT INTO user_prompts_fts(user_prompts_fts, rowid, prompt_text)
VALUES('delete', old.id, old.prompt_text);
INSERT INTO user_prompts_fts(rowid, prompt_text)
VALUES (new.id, new.prompt_text);
END;
`),this.db.exec("COMMIT"),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(10,new Date().toISOString()),console.error("[SessionStore] Successfully created user_prompts table with FTS5 support")}catch(t){throw this.db.exec("ROLLBACK"),t}}catch(e){console.error("[SessionStore] Migration error (create user_prompts table):",e.message)}}ensureDiscoveryTokensColumn(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(11))return;this.db.pragma("table_info(observations)").some(i=>i.name==="discovery_tokens")||(this.db.exec("ALTER TABLE observations ADD COLUMN discovery_tokens INTEGER DEFAULT 0"),console.error("[SessionStore] Added discovery_tokens column to observations table")),this.db.pragma("table_info(session_summaries)").some(i=>i.name==="discovery_tokens")||(this.db.exec("ALTER TABLE session_summaries ADD COLUMN discovery_tokens INTEGER DEFAULT 0"),console.error("[SessionStore] Added discovery_tokens column to session_summaries table")),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(11,new Date().toISOString())}catch(e){throw console.error("[SessionStore] Discovery tokens migration error:",e.message),e}}getRecentSummaries(e,s=10){return this.db.prepare(`
SELECT
request, investigated, learned, completed, next_steps,
files_read, files_edited, notes, prompt_number, created_at
FROM session_summaries
WHERE project = ?
ORDER BY created_at_epoch DESC
LIMIT ?
`).all(e,s)}getRecentSummariesWithSessionInfo(e,s=3){return this.db.prepare(`
SELECT
sdk_session_id, request, learned, completed, next_steps,
prompt_number, created_at
FROM session_summaries
WHERE project = ?
ORDER BY created_at_epoch DESC
LIMIT ?
`).all(e,s)}getRecentObservations(e,s=20){return this.db.prepare(`
SELECT type, text, prompt_number, created_at
FROM observations
WHERE project = ?
ORDER BY created_at_epoch DESC
LIMIT ?
`).all(e,s)}getAllRecentObservations(e=100){return this.db.prepare(`
SELECT id, type, title, subtitle, text, project, prompt_number, created_at, created_at_epoch
FROM observations
ORDER BY created_at_epoch DESC
LIMIT ?
`).all(e)}getAllRecentSummaries(e=50){return this.db.prepare(`
SELECT id, request, investigated, learned, completed, next_steps,
files_read, files_edited, notes, project, prompt_number,
created_at, created_at_epoch
FROM session_summaries
ORDER BY created_at_epoch DESC
LIMIT ?
`).all(e)}getAllRecentUserPrompts(e=100){return this.db.prepare(`
SELECT
up.id,
up.claude_session_id,
s.project,
up.prompt_number,
up.prompt_text,
up.created_at,
up.created_at_epoch
FROM user_prompts up
LEFT JOIN sdk_sessions s ON up.claude_session_id = s.claude_session_id
ORDER BY up.created_at_epoch DESC
LIMIT ?
`).all(e)}getAllProjects(){return this.db.prepare(`
SELECT DISTINCT project
FROM sdk_sessions
WHERE project IS NOT NULL AND project != ''
ORDER BY project ASC
`).all().map(t=>t.project)}getRecentSessionsWithStatus(e,s=3){return this.db.prepare(`
SELECT * FROM (
SELECT
s.sdk_session_id,
s.status,
s.started_at,
s.started_at_epoch,
s.user_prompt,
CASE WHEN sum.sdk_session_id IS NOT NULL THEN 1 ELSE 0 END as has_summary
FROM sdk_sessions s
LEFT JOIN session_summaries sum ON s.sdk_session_id = sum.sdk_session_id
WHERE s.project = ? AND s.sdk_session_id IS NOT NULL
GROUP BY s.sdk_session_id
ORDER BY s.started_at_epoch DESC
LIMIT ?
)
ORDER BY started_at_epoch ASC
`).all(e,s)}getObservationsForSession(e){return this.db.prepare(`
SELECT title, subtitle, type, prompt_number
FROM observations
WHERE sdk_session_id = ?
ORDER BY created_at_epoch ASC
`).all(e)}getObservationById(e){return this.db.prepare(`
SELECT *
FROM observations
WHERE id = ?
`).get(e)||null}getObservationsByIds(e,s={}){if(e.length===0)return[];let{orderBy:t="date_desc",limit:r}=s,n=t==="date_asc"?"ASC":"DESC",i=r?`LIMIT ${r}`:"",o=e.map(()=>"?").join(",");return this.db.prepare(`
SELECT *
FROM observations
WHERE id IN (${o})
ORDER BY created_at_epoch ${n}
${i}
`).all(...e)}getSummaryForSession(e){return this.db.prepare(`
SELECT
request, investigated, learned, completed, next_steps,
files_read, files_edited, notes, prompt_number, created_at
FROM session_summaries
WHERE sdk_session_id = ?
ORDER BY created_at_epoch DESC
LIMIT 1
`).get(e)||null}getFilesForSession(e){let t=this.db.prepare(`
SELECT files_read, files_modified
FROM observations
WHERE sdk_session_id = ?
`).all(e),r=new Set,n=new Set;for(let i of t){if(i.files_read)try{let o=JSON.parse(i.files_read);Array.isArray(o)&&o.forEach(p=>r.add(p))}catch{}if(i.files_modified)try{let o=JSON.parse(i.files_modified);Array.isArray(o)&&o.forEach(p=>n.add(p))}catch{}}return{filesRead:Array.from(r),filesModified:Array.from(n)}}getSessionById(e){return this.db.prepare(`
SELECT id, claude_session_id, sdk_session_id, project, user_prompt
FROM sdk_sessions
WHERE id = ?
LIMIT 1
`).get(e)||null}findActiveSDKSession(e){return this.db.prepare(`
SELECT id, sdk_session_id, project, worker_port
FROM sdk_sessions
WHERE claude_session_id = ? AND status = 'active'
LIMIT 1
`).get(e)||null}findAnySDKSession(e){return this.db.prepare(`
SELECT id
FROM sdk_sessions
WHERE claude_session_id = ?
LIMIT 1
`).get(e)||null}reactivateSession(e,s){this.db.prepare(`
UPDATE sdk_sessions
SET status = 'active', user_prompt = ?, worker_port = NULL
WHERE id = ?
`).run(s,e)}incrementPromptCounter(e){return this.db.prepare(`
UPDATE sdk_sessions
SET prompt_counter = COALESCE(prompt_counter, 0) + 1
WHERE id = ?
`).run(e),this.db.prepare(`
SELECT prompt_counter FROM sdk_sessions WHERE id = ?
`).get(e)?.prompt_counter||1}getPromptCounter(e){return this.db.prepare(`
SELECT prompt_counter FROM sdk_sessions WHERE id = ?
`).get(e)?.prompt_counter||0}createSDKSession(e,s,t){let r=new Date,n=r.getTime(),o=this.db.prepare(`
INSERT OR IGNORE INTO sdk_sessions
(claude_session_id, sdk_session_id, project, user_prompt, started_at, started_at_epoch, status)
VALUES (?, ?, ?, ?, ?, ?, 'active')
`).run(e,e,s,t,r.toISOString(),n);return o.lastInsertRowid===0||o.changes===0?(s&&s.trim()!==""&&this.db.prepare(`
UPDATE sdk_sessions
SET project = ?, user_prompt = ?
WHERE claude_session_id = ?
`).run(s,t,e),this.db.prepare(`
SELECT id FROM sdk_sessions WHERE claude_session_id = ? LIMIT 1
`).get(e).id):o.lastInsertRowid}updateSDKSessionId(e,s){return this.db.prepare(`
UPDATE sdk_sessions
SET sdk_session_id = ?
WHERE id = ? AND sdk_session_id IS NULL
`).run(s,e).changes===0?(S.debug("DB","sdk_session_id already set, skipping update",{sessionId:e,sdkSessionId:s}),!1):!0}setWorkerPort(e,s){this.db.prepare(`
UPDATE sdk_sessions
SET worker_port = ?
WHERE id = ?
`).run(s,e)}getWorkerPort(e){return this.db.prepare(`
SELECT worker_port
FROM sdk_sessions
WHERE id = ?
LIMIT 1
`).get(e)?.worker_port||null}saveUserPrompt(e,s,t){let r=new Date,n=r.getTime();return this.db.prepare(`
INSERT INTO user_prompts
(claude_session_id, prompt_number, prompt_text, created_at, created_at_epoch)
VALUES (?, ?, ?, ?, ?)
`).run(e,s,t,r.toISOString(),n).lastInsertRowid}getUserPrompt(e,s){return this.db.prepare(`
SELECT prompt_text
FROM user_prompts
WHERE claude_session_id = ? AND prompt_number = ?
LIMIT 1
`).get(e,s)?.prompt_text??null}storeObservation(e,s,t,r,n=0){let i=new Date,o=i.getTime();this.db.prepare(`
SELECT id FROM sdk_sessions WHERE sdk_session_id = ?
`).get(e)||(this.db.prepare(`
INSERT INTO sdk_sessions
(claude_session_id, sdk_session_id, project, started_at, started_at_epoch, status)
VALUES (?, ?, ?, ?, ?, 'active')
`).run(e,e,s,i.toISOString(),o),console.error(`[SessionStore] Auto-created session record for session_id: ${e}`));let T=this.db.prepare(`
INSERT INTO observations
(sdk_session_id, project, type, title, subtitle, facts, narrative, concepts,
files_read, files_modified, prompt_number, discovery_tokens, created_at, created_at_epoch)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`).run(e,s,t.type,t.title,t.subtitle,JSON.stringify(t.facts),t.narrative,JSON.stringify(t.concepts),JSON.stringify(t.files_read),JSON.stringify(t.files_modified),r||null,n,i.toISOString(),o);return{id:Number(T.lastInsertRowid),createdAtEpoch:o}}storeSummary(e,s,t,r,n=0){let i=new Date,o=i.getTime();this.db.prepare(`
SELECT id FROM sdk_sessions WHERE sdk_session_id = ?
`).get(e)||(this.db.prepare(`
INSERT INTO sdk_sessions
(claude_session_id, sdk_session_id, project, started_at, started_at_epoch, status)
VALUES (?, ?, ?, ?, ?, 'active')
`).run(e,e,s,i.toISOString(),o),console.error(`[SessionStore] Auto-created session record for session_id: ${e}`));let T=this.db.prepare(`
INSERT INTO session_summaries
(sdk_session_id, project, request, investigated, learned, completed,
next_steps, notes, prompt_number, discovery_tokens, created_at, created_at_epoch)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`).run(e,s,t.request,t.investigated,t.learned,t.completed,t.next_steps,t.notes,r||null,n,i.toISOString(),o);return{id:Number(T.lastInsertRowid),createdAtEpoch:o}}markSessionCompleted(e){let s=new Date,t=s.getTime();this.db.prepare(`
UPDATE sdk_sessions
SET status = 'completed', completed_at = ?, completed_at_epoch = ?
WHERE id = ?
`).run(s.toISOString(),t,e)}markSessionFailed(e){let s=new Date,t=s.getTime();this.db.prepare(`
UPDATE sdk_sessions
SET status = 'failed', completed_at = ?, completed_at_epoch = ?
WHERE id = ?
`).run(s.toISOString(),t,e)}getSessionSummariesByIds(e,s={}){if(e.length===0)return[];let{orderBy:t="date_desc",limit:r}=s,n=t==="date_asc"?"ASC":"DESC",i=r?`LIMIT ${r}`:"",o=e.map(()=>"?").join(",");return this.db.prepare(`
SELECT * FROM session_summaries
WHERE id IN (${o})
ORDER BY created_at_epoch ${n}
${i}
`).all(...e)}getUserPromptsByIds(e,s={}){if(e.length===0)return[];let{orderBy:t="date_desc",limit:r}=s,n=t==="date_asc"?"ASC":"DESC",i=r?`LIMIT ${r}`:"",o=e.map(()=>"?").join(",");return this.db.prepare(`
SELECT
up.*,
s.project,
s.sdk_session_id
FROM user_prompts up
JOIN sdk_sessions s ON up.claude_session_id = s.claude_session_id
WHERE up.id IN (${o})
ORDER BY up.created_at_epoch ${n}
${i}
`).all(...e)}getTimelineAroundTimestamp(e,s=10,t=10,r){return this.getTimelineAroundObservation(null,e,s,t,r)}getTimelineAroundObservation(e,s,t=10,r=10,n){let i=n?"AND project = ?":"",o=n?[n]:[],p,u;if(e!==null){let E=`
SELECT id, created_at_epoch
FROM observations
WHERE id <= ? ${i}
ORDER BY id DESC
LIMIT ?
`,g=`
SELECT id, created_at_epoch
FROM observations
WHERE id >= ? ${i}
ORDER BY id ASC
LIMIT ?
`;try{let c=this.db.prepare(E).all(e,...o,t+1),d=this.db.prepare(g).all(e,...o,r+1);if(c.length===0&&d.length===0)return{observations:[],sessions:[],prompts:[]};p=c.length>0?c[c.length-1].created_at_epoch:s,u=d.length>0?d[d.length-1].created_at_epoch:s}catch(c){return console.error("[SessionStore] Error getting boundary observations:",c.message),{observations:[],sessions:[],prompts:[]}}}else{let E=`
SELECT created_at_epoch
FROM observations
WHERE created_at_epoch <= ? ${i}
ORDER BY created_at_epoch DESC
LIMIT ?
`,g=`
SELECT created_at_epoch
FROM observations
WHERE created_at_epoch >= ? ${i}
ORDER BY created_at_epoch ASC
LIMIT ?
`;try{let c=this.db.prepare(E).all(s,...o,t),d=this.db.prepare(g).all(s,...o,r+1);if(c.length===0&&d.length===0)return{observations:[],sessions:[],prompts:[]};p=c.length>0?c[c.length-1].created_at_epoch:s,u=d.length>0?d[d.length-1].created_at_epoch:s}catch(c){return console.error("[SessionStore] Error getting boundary timestamps:",c.message),{observations:[],sessions:[],prompts:[]}}}let m=`
SELECT *
FROM observations
WHERE created_at_epoch >= ? AND created_at_epoch <= ? ${i}
ORDER BY created_at_epoch ASC
`,T=`
SELECT *
FROM session_summaries
WHERE created_at_epoch >= ? AND created_at_epoch <= ? ${i}
ORDER BY created_at_epoch ASC
`,_=`
SELECT up.*, s.project, s.sdk_session_id
FROM user_prompts up
JOIN sdk_sessions s ON up.claude_session_id = s.claude_session_id
WHERE up.created_at_epoch >= ? AND up.created_at_epoch <= ? ${i.replace("project","s.project")}
ORDER BY up.created_at_epoch ASC
`;try{let E=this.db.prepare(m).all(p,u,...o),g=this.db.prepare(T).all(p,u,...o),c=this.db.prepare(_).all(p,u,...o);return{observations:E,sessions:g.map(d=>({id:d.id,sdk_session_id:d.sdk_session_id,project:d.project,request:d.request,completed:d.completed,next_steps:d.next_steps,created_at:d.created_at,created_at_epoch:d.created_at_epoch})),prompts:c.map(d=>({id:d.id,claude_session_id:d.claude_session_id,project:d.project,prompt:d.prompt_text,created_at:d.created_at,created_at_epoch:d.created_at_epoch}))}}catch(E){return console.error("[SessionStore] Error querying timeline records:",E.message),{observations:[],sessions:[],prompts:[]}}}close(){this.db.close()}};function K(a,e,s){return a==="PreCompact"?e?{continue:!0,suppressOutput:!0}:{continue:!1,stopReason:s.reason||"Pre-compact operation failed",suppressOutput:!0}:a==="SessionStart"?e&&s.context?{continue:!0,suppressOutput:!0,hookSpecificOutput:{hookEventName:"SessionStart",additionalContext:s.context}}:{continue:!0,suppressOutput:!0}:a==="UserPromptSubmit"||a==="PostToolUse"?{continue:!0,suppressOutput:!0}:a==="Stop"?{continue:!0,suppressOutput:!0}:{continue:e,suppressOutput:!0,...s.reason&&!e?{stopReason:s.reason}:{}}}function f(a,e,s={}){let t=K(a,e,s);return JSON.stringify(t)}import v from"path";import{homedir as V}from"os";import{existsSync as y,readFileSync as q}from"fs";import{spawnSync as J}from"child_process";var Q=100,z=500,Z=10;function N(){try{let a=v.join(V(),".claude-mem","settings.json");if(y(a)){let e=JSON.parse(q(a,"utf-8")),s=parseInt(e.env?.CLAUDE_MEM_WORKER_PORT,10);if(!isNaN(s))return s}}catch{}return parseInt(process.env.CLAUDE_MEM_WORKER_PORT||"37777",10)}async function w(){try{let a=N();return(await fetch(`http://127.0.0.1:${a}/health`,{signal:AbortSignal.timeout(Q)})).ok}catch{return!1}}async function ee(){try{let a=I(),e=v.join(a,"ecosystem.config.cjs");if(!y(e))throw new Error(`Ecosystem config not found at ${e}`);let s=v.join(a,"node_modules",".bin","pm2"),t=process.platform==="win32"?s+".cmd":s,r=y(t)?t:"pm2",n=J(r,["start",e],{cwd:a,stdio:"pipe",encoding:"utf-8",windowsHide:!0});if(n.status!==0)throw new Error(n.stderr||"PM2 start failed");for(let i=0;i<Z;i++)if(await new Promise(o=>setTimeout(o,z)),await w())return!0;return!1}catch{return!1}}async function M(){if(await w())return;if(!await ee()){let e=N(),s=I();throw new Error(`Worker service failed to start on port ${e}.
import{stdin as I}from"process";function v(n,t,e){return n==="PreCompact"?t?{continue:!0,suppressOutput:!0}:{continue:!1,stopReason:e.reason||"Pre-compact operation failed",suppressOutput:!0}:n==="SessionStart"?t&&e.context?{continue:!0,suppressOutput:!0,hookSpecificOutput:{hookEventName:"SessionStart",additionalContext:e.context}}:{continue:!0,suppressOutput:!0}:n==="UserPromptSubmit"||n==="PostToolUse"?{continue:!0,suppressOutput:!0}:n==="Stop"?{continue:!0,suppressOutput:!0}:{continue:t,suppressOutput:!0,...e.reason&&!t?{stopReason:e.reason}:{}}}function S(n,t,e={}){let o=v(n,t,e);return JSON.stringify(o)}var T=(s=>(s[s.DEBUG=0]="DEBUG",s[s.INFO=1]="INFO",s[s.WARN=2]="WARN",s[s.ERROR=3]="ERROR",s[s.SILENT=4]="SILENT",s))(T||{}),O=class{level;useColor;constructor(){let t=process.env.CLAUDE_MEM_LOG_LEVEL?.toUpperCase()||"INFO";this.level=T[t]??1,this.useColor=process.stdout.isTTY??!1}correlationId(t,e){return`obs-${t}-${e}`}sessionId(t){return`session-${t}`}formatData(t){if(t==null)return"";if(typeof t=="string")return t;if(typeof t=="number"||typeof t=="boolean")return t.toString();if(typeof t=="object"){if(t instanceof Error)return this.level===0?`${t.message}
${t.stack}`:t.message;if(Array.isArray(t))return`[${t.length} items]`;let e=Object.keys(t);return e.length===0?"{}":e.length<=3?JSON.stringify(t):`{${e.length} keys: ${e.slice(0,3).join(", ")}...}`}return String(t)}formatTool(t,e){if(!e)return t;try{let o=typeof e=="string"?JSON.parse(e):e;if(t==="Bash"&&o.command){let r=o.command.length>50?o.command.substring(0,50)+"...":o.command;return`${t}(${r})`}if(t==="Read"&&o.file_path){let r=o.file_path.split("/").pop()||o.file_path;return`${t}(${r})`}if(t==="Edit"&&o.file_path){let r=o.file_path.split("/").pop()||o.file_path;return`${t}(${r})`}if(t==="Write"&&o.file_path){let r=o.file_path.split("/").pop()||o.file_path;return`${t}(${r})`}return t}catch{return t}}log(t,e,o,r,s){if(t<this.level)return;let a=new Date().toISOString().replace("T"," ").substring(0,23),_=T[t].padEnd(5),c=e.padEnd(6),p="";r?.correlationId?p=`[${r.correlationId}] `:r?.sessionId&&(p=`[session-${r.sessionId}] `);let g="";s!=null&&(this.level===0&&typeof s=="object"?g=`
`+JSON.stringify(s,null,2):g=" "+this.formatData(s));let D="";if(r){let{sessionId:Q,sdkSessionId:z,correlationId:Z,...y}=r;Object.keys(y).length>0&&(D=` {${Object.entries(y).map(([x,P])=>`${x}=${P}`).join(", ")}}`)}let R=`[${a}] [${_}] [${c}] ${p}${o}${D}${g}`;t===3?console.error(R):console.log(R)}debug(t,e,o,r){this.log(0,t,e,o,r)}info(t,e,o,r){this.log(1,t,e,o,r)}warn(t,e,o,r){this.log(2,t,e,o,r)}error(t,e,o,r){this.log(3,t,e,o,r)}dataIn(t,e,o,r){this.info(t,`\u2192 ${e}`,o,r)}dataOut(t,e,o,r){this.info(t,`\u2190 ${e}`,o,r)}success(t,e,o,r){this.info(t,`\u2713 ${e}`,o,r)}failure(t,e,o,r){this.error(t,`\u2717 ${e}`,o,r)}timing(t,e,o,r){this.info(t,`\u23F1 ${e}`,r,{duration:`${o}ms`})}},E=new O;import C from"path";import{homedir as X}from"os";import{spawnSync as j}from"child_process";import{join as i,dirname as k,basename as nt}from"path";import{homedir as h}from"os";import{fileURLToPath as b}from"url";function w(){return typeof __dirname<"u"?__dirname:k(b(import.meta.url))}var $=w(),u=process.env.CLAUDE_MEM_DATA_DIR||i(h(),".claude-mem"),m=process.env.CLAUDE_CONFIG_DIR||i(h(),".claude"),ct=i(u,"archives"),ut=i(u,"logs"),pt=i(u,"trash"),_t=i(u,"backups"),Et=i(u,"settings.json"),lt=i(u,"claude-mem.db"),ft=i(u,"vector-db"),gt=i(m,"settings.json"),St=i(m,"commands"),Tt=i(m,"CLAUDE.md");function d(){return i($,"..","..")}import{readFileSync as F,existsSync as B}from"fs";var H=["bugfix","feature","refactor","discovery","decision","change"],W=["how-it-works","why-it-exists","what-changed","problem-solution","gotcha","pattern","trade-off"];var L=H.join(","),M=W.join(",");var l=class{static DEFAULTS={CLAUDE_MEM_MODEL:"claude-haiku-4-5",CLAUDE_MEM_CONTEXT_OBSERVATIONS:"50",CLAUDE_MEM_WORKER_PORT:"37777",CLAUDE_MEM_CONTEXT_SHOW_READ_TOKENS:"true",CLAUDE_MEM_CONTEXT_SHOW_WORK_TOKENS:"true",CLAUDE_MEM_CONTEXT_SHOW_SAVINGS_AMOUNT:"true",CLAUDE_MEM_CONTEXT_SHOW_SAVINGS_PERCENT:"true",CLAUDE_MEM_CONTEXT_OBSERVATION_TYPES:L,CLAUDE_MEM_CONTEXT_OBSERVATION_CONCEPTS:M,CLAUDE_MEM_CONTEXT_FULL_COUNT:"5",CLAUDE_MEM_CONTEXT_FULL_FIELD:"narrative",CLAUDE_MEM_CONTEXT_SESSION_COUNT:"10",CLAUDE_MEM_CONTEXT_SHOW_LAST_SUMMARY:"true",CLAUDE_MEM_CONTEXT_SHOW_LAST_MESSAGE:"false"};static getAllDefaults(){return{...this.DEFAULTS}}static get(t){return process.env[t]||this.DEFAULTS[t]}static getInt(t){let e=this.get(t);return parseInt(e,10)}static getBool(t){return this.get(t)==="true"}static loadFromFile(t){if(!B(t))return this.getAllDefaults();let e=F(t,"utf-8"),r=JSON.parse(e).env||{},s={...this.DEFAULTS};for(let a of Object.keys(this.DEFAULTS))r[a]!==void 0&&(s[a]=r[a]);return s}};var K=100,V=500,G=10;function f(){let n=C.join(X(),".claude-mem","settings.json"),t=l.loadFromFile(n);return parseInt(t.CLAUDE_MEM_WORKER_PORT,10)}async function N(){try{let n=f();return(await fetch(`http://127.0.0.1:${n}/health`,{signal:AbortSignal.timeout(K)})).ok}catch{return!1}}async function Y(){try{let n=d(),t=C.join(n,"ecosystem.config.cjs");if(!existsSync(t))throw new Error(`Ecosystem config not found at ${t}`);let e=C.join(n,"node_modules",".bin","pm2"),o=process.platform==="win32"?e+".cmd":e,r=existsSync(o)?o:"pm2",s=j(r,["start",t],{cwd:n,stdio:"pipe",encoding:"utf-8",windowsHide:!0});if(s.status!==0)throw new Error(s.stderr||"PM2 start failed");for(let a=0;a<G;a++)if(await new Promise(_=>setTimeout(_,V)),await N())return!0;return!1}catch{return!1}}async function U(){if(await N())return;if(!await Y()){let t=f(),e=d();throw new Error(`Worker service failed to start on port ${t}.
To start manually, run:
cd ${s}
cd ${e}
npx pm2 start ecosystem.config.cjs
If already running, try: npx pm2 restart claude-mem-worker`)}}import{appendFileSync as se}from"fs";import{homedir as te}from"os";import{join as re}from"path";var oe=re(te(),".claude-mem","silent.log");function R(a,e,s=""){let t=new Date().toISOString(),o=((new Error().stack||"").split(`
`)[2]||"").match(/at\s+(?:.*\s+)?\(?([^:]+):(\d+):(\d+)\)?/),p=o?`${o[1].split("/").pop()}:${o[2]}`:"unknown",u=`[${t}] [${p}] ${a}`;if(e!==void 0)try{u+=` ${JSON.stringify(e)}`}catch(m){u+=` [stringify error: ${m}]`}u+=`
`;try{se(oe,u)}catch(m){console.error("[silent-debug] Failed to write to log:",m)}return s}var F=100;function ne(a){let e=(a.match(/<private>/g)||[]).length,s=(a.match(/<claude-mem-context>/g)||[]).length;return e+s}function C(a){if(typeof a!="string")return R("[tag-stripping] received non-string for JSON context:",{type:typeof a}),"{}";let e=ne(a);return e>F&&R("[tag-stripping] tag count exceeds limit, truncating:",{tagCount:e,maxAllowed:F,contentLength:a.length}),a.replace(/<claude-mem-context>[\s\S]*?<\/claude-mem-context>/g,"").replace(/<private>[\s\S]*?<\/private>/g,"").trim()}var ie=new Set(["ListMcpResourcesTool","SlashCommand","Skill","TodoWrite","AskUserQuestion"]);async function ae(a){if(!a)throw new Error("saveHook requires input");let{session_id:e,cwd:s,tool_name:t,tool_input:r,tool_response:n}=a;if(ie.has(t)){console.log(f("PostToolUse",!0));return}await M();let i=new h,o=i.createSDKSession(e,"",""),p=i.getPromptCounter(o),u=i.getUserPrompt(e,p);if(!u||u.trim()===""){R("[save-hook] Skipping observation - user prompt was entirely private",{session_id:e,promptNumber:p,tool_name:t}),i.close(),console.log(f("PostToolUse",!0));return}i.close();let m=S.formatTool(t,r),T=N();S.dataIn("HOOK",`PostToolUse: ${m}`,{sessionId:o,workerPort:T});try{let _="{}",E="{}";try{_=r!==void 0?C(JSON.stringify(r)):"{}"}catch(c){R("[save-hook] Failed to stringify tool_input:",{error:c,tool_name:t}),_='{"error": "Failed to serialize tool_input"}'}try{E=n!==void 0?C(JSON.stringify(n)):"{}"}catch(c){R("[save-hook] Failed to stringify tool_response:",{error:c,tool_name:t}),E='{"error": "Failed to serialize tool_response"}'}let g=await fetch(`http://127.0.0.1:${T}/sessions/${o}/observations`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({tool_name:t,tool_input:_,tool_response:E,prompt_number:p,cwd:s||""}),signal:AbortSignal.timeout(2e3)});if(!g.ok){let c=await g.text();throw S.failure("HOOK","Failed to send observation",{sessionId:o,status:g.status},c),new Error(`Failed to send observation to worker: ${g.status} ${c}`)}S.debug("HOOK","Observation sent successfully",{sessionId:o,toolName:t})}catch(_){throw _.cause?.code==="ECONNREFUSED"||_.name==="TimeoutError"||_.message.includes("fetch failed")?new Error("There's a problem with the worker. If you just updated, type `pm2 restart claude-mem-worker` in your terminal to continue"):_}console.log(f("PostToolUse",!0))}var D="";X.on("data",a=>D+=a);X.on("end",async()=>{let a=D?JSON.parse(D):void 0;await ae(a)});
If already running, try: npx pm2 restart claude-mem-worker`)}}var J=new Set(["ListMcpResourcesTool","SlashCommand","Skill","TodoWrite","AskUserQuestion"]);async function q(n){if(!n)throw new Error("saveHook requires input");let{session_id:t,cwd:e,tool_name:o,tool_input:r,tool_response:s}=n;if(J.has(o)){console.log(S("PostToolUse",!0));return}await U();let a=f(),_=E.formatTool(o,r);E.dataIn("HOOK",`PostToolUse: ${_}`,{workerPort:a});try{let c=await fetch(`http://127.0.0.1:${a}/api/sessions/observations`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({claudeSessionId:t,tool_name:o,tool_input:r,tool_response:s,cwd:e||""}),signal:AbortSignal.timeout(2e3)});if(!c.ok){let p=await c.text();throw E.failure("HOOK","Failed to send observation",{status:c.status},p),new Error(`Failed to send observation to worker: ${c.status} ${p}`)}E.debug("HOOK","Observation sent successfully",{toolName:o})}catch(c){throw c.cause?.code==="ECONNREFUSED"||c.name==="TimeoutError"||c.message.includes("fetch failed")?new Error("There's a problem with the worker. If you just updated, type `pm2 restart claude-mem-worker` in your terminal to continue"):c}console.log(S("PostToolUse",!0))}var A="";I.on("data",n=>A+=n);I.on("end",async()=>{let n=A?JSON.parse(A):void 0;await q(n)});
File diff suppressed because one or more lines are too long
+10 -425
View File
@@ -1,431 +1,16 @@
#!/usr/bin/env node
import{stdin as w}from"process";import{readFileSync as F,existsSync as X}from"fs";import Y from"better-sqlite3";import{join as l,dirname as B,basename as ce}from"path";import{homedir as D}from"os";import{existsSync as le,mkdirSync as j}from"fs";import{fileURLToPath as $}from"url";function W(){return typeof __dirname<"u"?__dirname:B($(import.meta.url))}var G=W(),T=process.env.CLAUDE_MEM_DATA_DIR||l(D(),".claude-mem"),O=process.env.CLAUDE_CONFIG_DIR||l(D(),".claude"),Te=l(T,"archives"),ge=l(T,"logs"),Se=l(T,"trash"),be=l(T,"backups"),Re=l(T,"settings.json"),k=l(T,"claude-mem.db"),he=l(T,"vector-db"),fe=l(O,"settings.json"),Oe=l(O,"commands"),Ne=l(O,"CLAUDE.md");function x(a){j(a,{recursive:!0})}function N(){return l(G,"..","..")}var I=(n=>(n[n.DEBUG=0]="DEBUG",n[n.INFO=1]="INFO",n[n.WARN=2]="WARN",n[n.ERROR=3]="ERROR",n[n.SILENT=4]="SILENT",n))(I||{}),L=class{level;useColor;constructor(){let e=process.env.CLAUDE_MEM_LOG_LEVEL?.toUpperCase()||"INFO";this.level=I[e]??1,this.useColor=process.stdout.isTTY??!1}correlationId(e,s){return`obs-${e}-${s}`}sessionId(e){return`session-${e}`}formatData(e){if(e==null)return"";if(typeof e=="string")return e;if(typeof e=="number"||typeof e=="boolean")return e.toString();if(typeof e=="object"){if(e instanceof Error)return this.level===0?`${e.message}
${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let s=Object.keys(e);return s.length===0?"{}":s.length<=3?JSON.stringify(e):`{${s.length} keys: ${s.slice(0,3).join(", ")}...}`}return String(e)}formatTool(e,s){if(!s)return e;try{let t=typeof s=="string"?JSON.parse(s):s;if(e==="Bash"&&t.command){let r=t.command.length>50?t.command.substring(0,50)+"...":t.command;return`${e}(${r})`}if(e==="Read"&&t.file_path){let r=t.file_path.split("/").pop()||t.file_path;return`${e}(${r})`}if(e==="Edit"&&t.file_path){let r=t.file_path.split("/").pop()||t.file_path;return`${e}(${r})`}if(e==="Write"&&t.file_path){let r=t.file_path.split("/").pop()||t.file_path;return`${e}(${r})`}return e}catch{return e}}log(e,s,t,r,n){if(e<this.level)return;let o=new Date().toISOString().replace("T"," ").substring(0,23),i=I[e].padEnd(5),p=s.padEnd(6),c="";r?.correlationId?c=`[${r.correlationId}] `:r?.sessionId&&(c=`[session-${r.sessionId}] `);let _="";n!=null&&(this.level===0&&typeof n=="object"?_=`
`+JSON.stringify(n,null,2):_=" "+this.formatData(n));let u="";if(r){let{sessionId:g,sdkSessionId:b,correlationId:m,...d}=r;Object.keys(d).length>0&&(u=` {${Object.entries(d).map(([H,P])=>`${H}=${P}`).join(", ")}}`)}let E=`[${o}] [${i}] [${p}] ${c}${t}${u}${_}`;e===3?console.error(E):console.log(E)}debug(e,s,t,r){this.log(0,e,s,t,r)}info(e,s,t,r){this.log(1,e,s,t,r)}warn(e,s,t,r){this.log(2,e,s,t,r)}error(e,s,t,r){this.log(3,e,s,t,r)}dataIn(e,s,t,r){this.info(e,`\u2192 ${s}`,t,r)}dataOut(e,s,t,r){this.info(e,`\u2190 ${s}`,t,r)}success(e,s,t,r){this.info(e,`\u2713 ${s}`,t,r)}failure(e,s,t,r){this.error(e,`\u2717 ${s}`,t,r)}timing(e,s,t,r){this.info(e,`\u23F1 ${s}`,r,{duration:`${t}ms`})}},S=new L;var R=class{db;constructor(){x(T),this.db=new Y(k),this.db.pragma("journal_mode = WAL"),this.db.pragma("synchronous = NORMAL"),this.db.pragma("foreign_keys = ON"),this.initializeSchema(),this.ensureWorkerPortColumn(),this.ensurePromptTrackingColumns(),this.removeSessionSummariesUniqueConstraint(),this.addObservationHierarchicalFields(),this.makeObservationsTextNullable(),this.createUserPromptsTable(),this.ensureDiscoveryTokensColumn()}initializeSchema(){try{this.db.exec(`
CREATE TABLE IF NOT EXISTS schema_versions (
id INTEGER PRIMARY KEY,
version INTEGER UNIQUE NOT NULL,
applied_at TEXT NOT NULL
)
`);let e=this.db.prepare("SELECT version FROM schema_versions ORDER BY version").all();(e.length>0?Math.max(...e.map(t=>t.version)):0)===0&&(console.error("[SessionStore] Initializing fresh database with migration004..."),this.db.exec(`
CREATE TABLE IF NOT EXISTS sdk_sessions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
claude_session_id TEXT UNIQUE NOT NULL,
sdk_session_id TEXT UNIQUE,
project TEXT NOT NULL,
user_prompt TEXT,
started_at TEXT NOT NULL,
started_at_epoch INTEGER NOT NULL,
completed_at TEXT,
completed_at_epoch INTEGER,
status TEXT CHECK(status IN ('active', 'completed', 'failed')) NOT NULL DEFAULT 'active'
);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_claude_id ON sdk_sessions(claude_session_id);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_sdk_id ON sdk_sessions(sdk_session_id);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_project ON sdk_sessions(project);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_status ON sdk_sessions(status);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_started ON sdk_sessions(started_at_epoch DESC);
CREATE TABLE IF NOT EXISTS observations (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sdk_session_id TEXT NOT NULL,
project TEXT NOT NULL,
text TEXT NOT NULL,
type TEXT NOT NULL CHECK(type IN ('decision', 'bugfix', 'feature', 'refactor', 'discovery')),
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
FOREIGN KEY(sdk_session_id) REFERENCES sdk_sessions(sdk_session_id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_observations_sdk_session ON observations(sdk_session_id);
CREATE INDEX IF NOT EXISTS idx_observations_project ON observations(project);
CREATE INDEX IF NOT EXISTS idx_observations_type ON observations(type);
CREATE INDEX IF NOT EXISTS idx_observations_created ON observations(created_at_epoch DESC);
CREATE TABLE IF NOT EXISTS session_summaries (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sdk_session_id TEXT UNIQUE NOT NULL,
project TEXT NOT NULL,
request TEXT,
investigated TEXT,
learned TEXT,
completed TEXT,
next_steps TEXT,
files_read TEXT,
files_edited TEXT,
notes TEXT,
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
FOREIGN KEY(sdk_session_id) REFERENCES sdk_sessions(sdk_session_id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_session_summaries_sdk_session ON session_summaries(sdk_session_id);
CREATE INDEX IF NOT EXISTS idx_session_summaries_project ON session_summaries(project);
CREATE INDEX IF NOT EXISTS idx_session_summaries_created ON session_summaries(created_at_epoch DESC);
`),this.db.prepare("INSERT INTO schema_versions (version, applied_at) VALUES (?, ?)").run(4,new Date().toISOString()),console.error("[SessionStore] Migration004 applied successfully"))}catch(e){throw console.error("[SessionStore] Schema initialization error:",e.message),e}}ensureWorkerPortColumn(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(5))return;this.db.pragma("table_info(sdk_sessions)").some(r=>r.name==="worker_port")||(this.db.exec("ALTER TABLE sdk_sessions ADD COLUMN worker_port INTEGER"),console.error("[SessionStore] Added worker_port column to sdk_sessions table")),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(5,new Date().toISOString())}catch(e){console.error("[SessionStore] Migration error:",e.message)}}ensurePromptTrackingColumns(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(6))return;this.db.pragma("table_info(sdk_sessions)").some(p=>p.name==="prompt_counter")||(this.db.exec("ALTER TABLE sdk_sessions ADD COLUMN prompt_counter INTEGER DEFAULT 0"),console.error("[SessionStore] Added prompt_counter column to sdk_sessions table")),this.db.pragma("table_info(observations)").some(p=>p.name==="prompt_number")||(this.db.exec("ALTER TABLE observations ADD COLUMN prompt_number INTEGER"),console.error("[SessionStore] Added prompt_number column to observations table")),this.db.pragma("table_info(session_summaries)").some(p=>p.name==="prompt_number")||(this.db.exec("ALTER TABLE session_summaries ADD COLUMN prompt_number INTEGER"),console.error("[SessionStore] Added prompt_number column to session_summaries table")),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(6,new Date().toISOString())}catch(e){console.error("[SessionStore] Prompt tracking migration error:",e.message)}}removeSessionSummariesUniqueConstraint(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(7))return;if(!this.db.pragma("index_list(session_summaries)").some(r=>r.unique===1)){this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(7,new Date().toISOString());return}console.error("[SessionStore] Removing UNIQUE constraint from session_summaries.sdk_session_id..."),this.db.exec("BEGIN TRANSACTION");try{this.db.exec(`
CREATE TABLE session_summaries_new (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sdk_session_id TEXT NOT NULL,
project TEXT NOT NULL,
request TEXT,
investigated TEXT,
learned TEXT,
completed TEXT,
next_steps TEXT,
files_read TEXT,
files_edited TEXT,
notes TEXT,
prompt_number INTEGER,
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
FOREIGN KEY(sdk_session_id) REFERENCES sdk_sessions(sdk_session_id) ON DELETE CASCADE
)
`),this.db.exec(`
INSERT INTO session_summaries_new
SELECT id, sdk_session_id, project, request, investigated, learned,
completed, next_steps, files_read, files_edited, notes,
prompt_number, created_at, created_at_epoch
FROM session_summaries
`),this.db.exec("DROP TABLE session_summaries"),this.db.exec("ALTER TABLE session_summaries_new RENAME TO session_summaries"),this.db.exec(`
CREATE INDEX idx_session_summaries_sdk_session ON session_summaries(sdk_session_id);
CREATE INDEX idx_session_summaries_project ON session_summaries(project);
CREATE INDEX idx_session_summaries_created ON session_summaries(created_at_epoch DESC);
`),this.db.exec("COMMIT"),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(7,new Date().toISOString()),console.error("[SessionStore] Successfully removed UNIQUE constraint from session_summaries.sdk_session_id")}catch(r){throw this.db.exec("ROLLBACK"),r}}catch(e){console.error("[SessionStore] Migration error (remove UNIQUE constraint):",e.message)}}addObservationHierarchicalFields(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(8))return;if(this.db.pragma("table_info(observations)").some(r=>r.name==="title")){this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(8,new Date().toISOString());return}console.error("[SessionStore] Adding hierarchical fields to observations table..."),this.db.exec(`
ALTER TABLE observations ADD COLUMN title TEXT;
ALTER TABLE observations ADD COLUMN subtitle TEXT;
ALTER TABLE observations ADD COLUMN facts TEXT;
ALTER TABLE observations ADD COLUMN narrative TEXT;
ALTER TABLE observations ADD COLUMN concepts TEXT;
ALTER TABLE observations ADD COLUMN files_read TEXT;
ALTER TABLE observations ADD COLUMN files_modified TEXT;
`),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(8,new Date().toISOString()),console.error("[SessionStore] Successfully added hierarchical fields to observations table")}catch(e){console.error("[SessionStore] Migration error (add hierarchical fields):",e.message)}}makeObservationsTextNullable(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(9))return;let t=this.db.pragma("table_info(observations)").find(r=>r.name==="text");if(!t||t.notnull===0){this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(9,new Date().toISOString());return}console.error("[SessionStore] Making observations.text nullable..."),this.db.exec("BEGIN TRANSACTION");try{this.db.exec(`
CREATE TABLE observations_new (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sdk_session_id TEXT NOT NULL,
project TEXT NOT NULL,
text TEXT,
type TEXT NOT NULL CHECK(type IN ('decision', 'bugfix', 'feature', 'refactor', 'discovery', 'change')),
title TEXT,
subtitle TEXT,
facts TEXT,
narrative TEXT,
concepts TEXT,
files_read TEXT,
files_modified TEXT,
prompt_number INTEGER,
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
FOREIGN KEY(sdk_session_id) REFERENCES sdk_sessions(sdk_session_id) ON DELETE CASCADE
)
`),this.db.exec(`
INSERT INTO observations_new
SELECT id, sdk_session_id, project, text, type, title, subtitle, facts,
narrative, concepts, files_read, files_modified, prompt_number,
created_at, created_at_epoch
FROM observations
`),this.db.exec("DROP TABLE observations"),this.db.exec("ALTER TABLE observations_new RENAME TO observations"),this.db.exec(`
CREATE INDEX idx_observations_sdk_session ON observations(sdk_session_id);
CREATE INDEX idx_observations_project ON observations(project);
CREATE INDEX idx_observations_type ON observations(type);
CREATE INDEX idx_observations_created ON observations(created_at_epoch DESC);
`),this.db.exec("COMMIT"),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(9,new Date().toISOString()),console.error("[SessionStore] Successfully made observations.text nullable")}catch(r){throw this.db.exec("ROLLBACK"),r}}catch(e){console.error("[SessionStore] Migration error (make text nullable):",e.message)}}createUserPromptsTable(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(10))return;if(this.db.pragma("table_info(user_prompts)").length>0){this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(10,new Date().toISOString());return}console.error("[SessionStore] Creating user_prompts table with FTS5 support..."),this.db.exec("BEGIN TRANSACTION");try{this.db.exec(`
CREATE TABLE user_prompts (
id INTEGER PRIMARY KEY AUTOINCREMENT,
claude_session_id TEXT NOT NULL,
prompt_number INTEGER NOT NULL,
prompt_text TEXT NOT NULL,
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
FOREIGN KEY(claude_session_id) REFERENCES sdk_sessions(claude_session_id) ON DELETE CASCADE
);
CREATE INDEX idx_user_prompts_claude_session ON user_prompts(claude_session_id);
CREATE INDEX idx_user_prompts_created ON user_prompts(created_at_epoch DESC);
CREATE INDEX idx_user_prompts_prompt_number ON user_prompts(prompt_number);
CREATE INDEX idx_user_prompts_lookup ON user_prompts(claude_session_id, prompt_number);
`),this.db.exec(`
CREATE VIRTUAL TABLE user_prompts_fts USING fts5(
prompt_text,
content='user_prompts',
content_rowid='id'
);
`),this.db.exec(`
CREATE TRIGGER user_prompts_ai AFTER INSERT ON user_prompts BEGIN
INSERT INTO user_prompts_fts(rowid, prompt_text)
VALUES (new.id, new.prompt_text);
END;
CREATE TRIGGER user_prompts_ad AFTER DELETE ON user_prompts BEGIN
INSERT INTO user_prompts_fts(user_prompts_fts, rowid, prompt_text)
VALUES('delete', old.id, old.prompt_text);
END;
CREATE TRIGGER user_prompts_au AFTER UPDATE ON user_prompts BEGIN
INSERT INTO user_prompts_fts(user_prompts_fts, rowid, prompt_text)
VALUES('delete', old.id, old.prompt_text);
INSERT INTO user_prompts_fts(rowid, prompt_text)
VALUES (new.id, new.prompt_text);
END;
`),this.db.exec("COMMIT"),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(10,new Date().toISOString()),console.error("[SessionStore] Successfully created user_prompts table with FTS5 support")}catch(t){throw this.db.exec("ROLLBACK"),t}}catch(e){console.error("[SessionStore] Migration error (create user_prompts table):",e.message)}}ensureDiscoveryTokensColumn(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(11))return;this.db.pragma("table_info(observations)").some(o=>o.name==="discovery_tokens")||(this.db.exec("ALTER TABLE observations ADD COLUMN discovery_tokens INTEGER DEFAULT 0"),console.error("[SessionStore] Added discovery_tokens column to observations table")),this.db.pragma("table_info(session_summaries)").some(o=>o.name==="discovery_tokens")||(this.db.exec("ALTER TABLE session_summaries ADD COLUMN discovery_tokens INTEGER DEFAULT 0"),console.error("[SessionStore] Added discovery_tokens column to session_summaries table")),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(11,new Date().toISOString())}catch(e){throw console.error("[SessionStore] Discovery tokens migration error:",e.message),e}}getRecentSummaries(e,s=10){return this.db.prepare(`
SELECT
request, investigated, learned, completed, next_steps,
files_read, files_edited, notes, prompt_number, created_at
FROM session_summaries
WHERE project = ?
ORDER BY created_at_epoch DESC
LIMIT ?
`).all(e,s)}getRecentSummariesWithSessionInfo(e,s=3){return this.db.prepare(`
SELECT
sdk_session_id, request, learned, completed, next_steps,
prompt_number, created_at
FROM session_summaries
WHERE project = ?
ORDER BY created_at_epoch DESC
LIMIT ?
`).all(e,s)}getRecentObservations(e,s=20){return this.db.prepare(`
SELECT type, text, prompt_number, created_at
FROM observations
WHERE project = ?
ORDER BY created_at_epoch DESC
LIMIT ?
`).all(e,s)}getAllRecentObservations(e=100){return this.db.prepare(`
SELECT id, type, title, subtitle, text, project, prompt_number, created_at, created_at_epoch
FROM observations
ORDER BY created_at_epoch DESC
LIMIT ?
`).all(e)}getAllRecentSummaries(e=50){return this.db.prepare(`
SELECT id, request, investigated, learned, completed, next_steps,
files_read, files_edited, notes, project, prompt_number,
created_at, created_at_epoch
FROM session_summaries
ORDER BY created_at_epoch DESC
LIMIT ?
`).all(e)}getAllRecentUserPrompts(e=100){return this.db.prepare(`
SELECT
up.id,
up.claude_session_id,
s.project,
up.prompt_number,
up.prompt_text,
up.created_at,
up.created_at_epoch
FROM user_prompts up
LEFT JOIN sdk_sessions s ON up.claude_session_id = s.claude_session_id
ORDER BY up.created_at_epoch DESC
LIMIT ?
`).all(e)}getAllProjects(){return this.db.prepare(`
SELECT DISTINCT project
FROM sdk_sessions
WHERE project IS NOT NULL AND project != ''
ORDER BY project ASC
`).all().map(t=>t.project)}getRecentSessionsWithStatus(e,s=3){return this.db.prepare(`
SELECT * FROM (
SELECT
s.sdk_session_id,
s.status,
s.started_at,
s.started_at_epoch,
s.user_prompt,
CASE WHEN sum.sdk_session_id IS NOT NULL THEN 1 ELSE 0 END as has_summary
FROM sdk_sessions s
LEFT JOIN session_summaries sum ON s.sdk_session_id = sum.sdk_session_id
WHERE s.project = ? AND s.sdk_session_id IS NOT NULL
GROUP BY s.sdk_session_id
ORDER BY s.started_at_epoch DESC
LIMIT ?
)
ORDER BY started_at_epoch ASC
`).all(e,s)}getObservationsForSession(e){return this.db.prepare(`
SELECT title, subtitle, type, prompt_number
FROM observations
WHERE sdk_session_id = ?
ORDER BY created_at_epoch ASC
`).all(e)}getObservationById(e){return this.db.prepare(`
SELECT *
FROM observations
WHERE id = ?
`).get(e)||null}getObservationsByIds(e,s={}){if(e.length===0)return[];let{orderBy:t="date_desc",limit:r}=s,n=t==="date_asc"?"ASC":"DESC",o=r?`LIMIT ${r}`:"",i=e.map(()=>"?").join(",");return this.db.prepare(`
SELECT *
FROM observations
WHERE id IN (${i})
ORDER BY created_at_epoch ${n}
${o}
`).all(...e)}getSummaryForSession(e){return this.db.prepare(`
SELECT
request, investigated, learned, completed, next_steps,
files_read, files_edited, notes, prompt_number, created_at
FROM session_summaries
WHERE sdk_session_id = ?
ORDER BY created_at_epoch DESC
LIMIT 1
`).get(e)||null}getFilesForSession(e){let t=this.db.prepare(`
SELECT files_read, files_modified
FROM observations
WHERE sdk_session_id = ?
`).all(e),r=new Set,n=new Set;for(let o of t){if(o.files_read)try{let i=JSON.parse(o.files_read);Array.isArray(i)&&i.forEach(p=>r.add(p))}catch{}if(o.files_modified)try{let i=JSON.parse(o.files_modified);Array.isArray(i)&&i.forEach(p=>n.add(p))}catch{}}return{filesRead:Array.from(r),filesModified:Array.from(n)}}getSessionById(e){return this.db.prepare(`
SELECT id, claude_session_id, sdk_session_id, project, user_prompt
FROM sdk_sessions
WHERE id = ?
LIMIT 1
`).get(e)||null}findActiveSDKSession(e){return this.db.prepare(`
SELECT id, sdk_session_id, project, worker_port
FROM sdk_sessions
WHERE claude_session_id = ? AND status = 'active'
LIMIT 1
`).get(e)||null}findAnySDKSession(e){return this.db.prepare(`
SELECT id
FROM sdk_sessions
WHERE claude_session_id = ?
LIMIT 1
`).get(e)||null}reactivateSession(e,s){this.db.prepare(`
UPDATE sdk_sessions
SET status = 'active', user_prompt = ?, worker_port = NULL
WHERE id = ?
`).run(s,e)}incrementPromptCounter(e){return this.db.prepare(`
UPDATE sdk_sessions
SET prompt_counter = COALESCE(prompt_counter, 0) + 1
WHERE id = ?
`).run(e),this.db.prepare(`
SELECT prompt_counter FROM sdk_sessions WHERE id = ?
`).get(e)?.prompt_counter||1}getPromptCounter(e){return this.db.prepare(`
SELECT prompt_counter FROM sdk_sessions WHERE id = ?
`).get(e)?.prompt_counter||0}createSDKSession(e,s,t){let r=new Date,n=r.getTime(),i=this.db.prepare(`
INSERT OR IGNORE INTO sdk_sessions
(claude_session_id, sdk_session_id, project, user_prompt, started_at, started_at_epoch, status)
VALUES (?, ?, ?, ?, ?, ?, 'active')
`).run(e,e,s,t,r.toISOString(),n);return i.lastInsertRowid===0||i.changes===0?(s&&s.trim()!==""&&this.db.prepare(`
UPDATE sdk_sessions
SET project = ?, user_prompt = ?
WHERE claude_session_id = ?
`).run(s,t,e),this.db.prepare(`
SELECT id FROM sdk_sessions WHERE claude_session_id = ? LIMIT 1
`).get(e).id):i.lastInsertRowid}updateSDKSessionId(e,s){return this.db.prepare(`
UPDATE sdk_sessions
SET sdk_session_id = ?
WHERE id = ? AND sdk_session_id IS NULL
`).run(s,e).changes===0?(S.debug("DB","sdk_session_id already set, skipping update",{sessionId:e,sdkSessionId:s}),!1):!0}setWorkerPort(e,s){this.db.prepare(`
UPDATE sdk_sessions
SET worker_port = ?
WHERE id = ?
`).run(s,e)}getWorkerPort(e){return this.db.prepare(`
SELECT worker_port
FROM sdk_sessions
WHERE id = ?
LIMIT 1
`).get(e)?.worker_port||null}saveUserPrompt(e,s,t){let r=new Date,n=r.getTime();return this.db.prepare(`
INSERT INTO user_prompts
(claude_session_id, prompt_number, prompt_text, created_at, created_at_epoch)
VALUES (?, ?, ?, ?, ?)
`).run(e,s,t,r.toISOString(),n).lastInsertRowid}getUserPrompt(e,s){return this.db.prepare(`
SELECT prompt_text
FROM user_prompts
WHERE claude_session_id = ? AND prompt_number = ?
LIMIT 1
`).get(e,s)?.prompt_text??null}storeObservation(e,s,t,r,n=0){let o=new Date,i=o.getTime();this.db.prepare(`
SELECT id FROM sdk_sessions WHERE sdk_session_id = ?
`).get(e)||(this.db.prepare(`
INSERT INTO sdk_sessions
(claude_session_id, sdk_session_id, project, started_at, started_at_epoch, status)
VALUES (?, ?, ?, ?, ?, 'active')
`).run(e,e,s,o.toISOString(),i),console.error(`[SessionStore] Auto-created session record for session_id: ${e}`));let u=this.db.prepare(`
INSERT INTO observations
(sdk_session_id, project, type, title, subtitle, facts, narrative, concepts,
files_read, files_modified, prompt_number, discovery_tokens, created_at, created_at_epoch)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`).run(e,s,t.type,t.title,t.subtitle,JSON.stringify(t.facts),t.narrative,JSON.stringify(t.concepts),JSON.stringify(t.files_read),JSON.stringify(t.files_modified),r||null,n,o.toISOString(),i);return{id:Number(u.lastInsertRowid),createdAtEpoch:i}}storeSummary(e,s,t,r,n=0){let o=new Date,i=o.getTime();this.db.prepare(`
SELECT id FROM sdk_sessions WHERE sdk_session_id = ?
`).get(e)||(this.db.prepare(`
INSERT INTO sdk_sessions
(claude_session_id, sdk_session_id, project, started_at, started_at_epoch, status)
VALUES (?, ?, ?, ?, ?, 'active')
`).run(e,e,s,o.toISOString(),i),console.error(`[SessionStore] Auto-created session record for session_id: ${e}`));let u=this.db.prepare(`
INSERT INTO session_summaries
(sdk_session_id, project, request, investigated, learned, completed,
next_steps, notes, prompt_number, discovery_tokens, created_at, created_at_epoch)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`).run(e,s,t.request,t.investigated,t.learned,t.completed,t.next_steps,t.notes,r||null,n,o.toISOString(),i);return{id:Number(u.lastInsertRowid),createdAtEpoch:i}}markSessionCompleted(e){let s=new Date,t=s.getTime();this.db.prepare(`
UPDATE sdk_sessions
SET status = 'completed', completed_at = ?, completed_at_epoch = ?
WHERE id = ?
`).run(s.toISOString(),t,e)}markSessionFailed(e){let s=new Date,t=s.getTime();this.db.prepare(`
UPDATE sdk_sessions
SET status = 'failed', completed_at = ?, completed_at_epoch = ?
WHERE id = ?
`).run(s.toISOString(),t,e)}getSessionSummariesByIds(e,s={}){if(e.length===0)return[];let{orderBy:t="date_desc",limit:r}=s,n=t==="date_asc"?"ASC":"DESC",o=r?`LIMIT ${r}`:"",i=e.map(()=>"?").join(",");return this.db.prepare(`
SELECT * FROM session_summaries
WHERE id IN (${i})
ORDER BY created_at_epoch ${n}
${o}
`).all(...e)}getUserPromptsByIds(e,s={}){if(e.length===0)return[];let{orderBy:t="date_desc",limit:r}=s,n=t==="date_asc"?"ASC":"DESC",o=r?`LIMIT ${r}`:"",i=e.map(()=>"?").join(",");return this.db.prepare(`
SELECT
up.*,
s.project,
s.sdk_session_id
FROM user_prompts up
JOIN sdk_sessions s ON up.claude_session_id = s.claude_session_id
WHERE up.id IN (${i})
ORDER BY up.created_at_epoch ${n}
${o}
`).all(...e)}getTimelineAroundTimestamp(e,s=10,t=10,r){return this.getTimelineAroundObservation(null,e,s,t,r)}getTimelineAroundObservation(e,s,t=10,r=10,n){let o=n?"AND project = ?":"",i=n?[n]:[],p,c;if(e!==null){let g=`
SELECT id, created_at_epoch
FROM observations
WHERE id <= ? ${o}
ORDER BY id DESC
LIMIT ?
`,b=`
SELECT id, created_at_epoch
FROM observations
WHERE id >= ? ${o}
ORDER BY id ASC
LIMIT ?
`;try{let m=this.db.prepare(g).all(e,...i,t+1),d=this.db.prepare(b).all(e,...i,r+1);if(m.length===0&&d.length===0)return{observations:[],sessions:[],prompts:[]};p=m.length>0?m[m.length-1].created_at_epoch:s,c=d.length>0?d[d.length-1].created_at_epoch:s}catch(m){return console.error("[SessionStore] Error getting boundary observations:",m.message),{observations:[],sessions:[],prompts:[]}}}else{let g=`
SELECT created_at_epoch
FROM observations
WHERE created_at_epoch <= ? ${o}
ORDER BY created_at_epoch DESC
LIMIT ?
`,b=`
SELECT created_at_epoch
FROM observations
WHERE created_at_epoch >= ? ${o}
ORDER BY created_at_epoch ASC
LIMIT ?
`;try{let m=this.db.prepare(g).all(s,...i,t),d=this.db.prepare(b).all(s,...i,r+1);if(m.length===0&&d.length===0)return{observations:[],sessions:[],prompts:[]};p=m.length>0?m[m.length-1].created_at_epoch:s,c=d.length>0?d[d.length-1].created_at_epoch:s}catch(m){return console.error("[SessionStore] Error getting boundary timestamps:",m.message),{observations:[],sessions:[],prompts:[]}}}let _=`
SELECT *
FROM observations
WHERE created_at_epoch >= ? AND created_at_epoch <= ? ${o}
ORDER BY created_at_epoch ASC
`,u=`
SELECT *
FROM session_summaries
WHERE created_at_epoch >= ? AND created_at_epoch <= ? ${o}
ORDER BY created_at_epoch ASC
`,E=`
SELECT up.*, s.project, s.sdk_session_id
FROM user_prompts up
JOIN sdk_sessions s ON up.claude_session_id = s.claude_session_id
WHERE up.created_at_epoch >= ? AND up.created_at_epoch <= ? ${o.replace("project","s.project")}
ORDER BY up.created_at_epoch ASC
`;try{let g=this.db.prepare(_).all(p,c,...i),b=this.db.prepare(u).all(p,c,...i),m=this.db.prepare(E).all(p,c,...i);return{observations:g,sessions:b.map(d=>({id:d.id,sdk_session_id:d.sdk_session_id,project:d.project,request:d.request,completed:d.completed,next_steps:d.next_steps,created_at:d.created_at,created_at_epoch:d.created_at_epoch})),prompts:m.map(d=>({id:d.id,claude_session_id:d.claude_session_id,project:d.project,prompt:d.prompt_text,created_at:d.created_at,created_at_epoch:d.created_at_epoch}))}}catch(g){return console.error("[SessionStore] Error querying timeline records:",g.message),{observations:[],sessions:[],prompts:[]}}}close(){this.db.close()}};function K(a,e,s){return a==="PreCompact"?e?{continue:!0,suppressOutput:!0}:{continue:!1,stopReason:s.reason||"Pre-compact operation failed",suppressOutput:!0}:a==="SessionStart"?e&&s.context?{continue:!0,suppressOutput:!0,hookSpecificOutput:{hookEventName:"SessionStart",additionalContext:s.context}}:{continue:!0,suppressOutput:!0}:a==="UserPromptSubmit"||a==="PostToolUse"?{continue:!0,suppressOutput:!0}:a==="Stop"?{continue:!0,suppressOutput:!0}:{continue:e,suppressOutput:!0,...s.reason&&!e?{stopReason:s.reason}:{}}}function y(a,e,s={}){let t=K(a,e,s);return JSON.stringify(t)}import A from"path";import{homedir as q}from"os";import{existsSync as v,readFileSync as V}from"fs";import{spawnSync as J}from"child_process";var Q=100,z=500,Z=10;function h(){try{let a=A.join(q(),".claude-mem","settings.json");if(v(a)){let e=JSON.parse(V(a,"utf-8")),s=parseInt(e.env?.CLAUDE_MEM_WORKER_PORT,10);if(!isNaN(s))return s}}catch{}return parseInt(process.env.CLAUDE_MEM_WORKER_PORT||"37777",10)}async function U(){try{let a=h();return(await fetch(`http://127.0.0.1:${a}/health`,{signal:AbortSignal.timeout(Q)})).ok}catch{return!1}}async function ee(){try{let a=N(),e=A.join(a,"ecosystem.config.cjs");if(!v(e))throw new Error(`Ecosystem config not found at ${e}`);let s=A.join(a,"node_modules",".bin","pm2"),t=process.platform==="win32"?s+".cmd":s,r=v(t)?t:"pm2",n=J(r,["start",e],{cwd:a,stdio:"pipe",encoding:"utf-8",windowsHide:!0});if(n.status!==0)throw new Error(n.stderr||"PM2 start failed");for(let o=0;o<Z;o++)if(await new Promise(i=>setTimeout(i,z)),await U())return!0;return!1}catch{return!1}}async function M(){if(await U())return;if(!await ee()){let e=h(),s=N();throw new Error(`Worker service failed to start on port ${e}.
import{stdin as U}from"process";import{readFileSync as I,existsSync as P}from"fs";function b(o,t,e){return o==="PreCompact"?t?{continue:!0,suppressOutput:!0}:{continue:!1,stopReason:e.reason||"Pre-compact operation failed",suppressOutput:!0}:o==="SessionStart"?t&&e.context?{continue:!0,suppressOutput:!0,hookSpecificOutput:{hookEventName:"SessionStart",additionalContext:e.context}}:{continue:!0,suppressOutput:!0}:o==="UserPromptSubmit"||o==="PostToolUse"?{continue:!0,suppressOutput:!0}:o==="Stop"?{continue:!0,suppressOutput:!0}:{continue:t,suppressOutput:!0,...e.reason&&!t?{stopReason:e.reason}:{}}}function R(o,t,e={}){let r=b(o,t,e);return JSON.stringify(r)}var m=(s=>(s[s.DEBUG=0]="DEBUG",s[s.INFO=1]="INFO",s[s.WARN=2]="WARN",s[s.ERROR=3]="ERROR",s[s.SILENT=4]="SILENT",s))(m||{}),S=class{level;useColor;constructor(){let t=process.env.CLAUDE_MEM_LOG_LEVEL?.toUpperCase()||"INFO";this.level=m[t]??1,this.useColor=process.stdout.isTTY??!1}correlationId(t,e){return`obs-${t}-${e}`}sessionId(t){return`session-${t}`}formatData(t){if(t==null)return"";if(typeof t=="string")return t;if(typeof t=="number"||typeof t=="boolean")return t.toString();if(typeof t=="object"){if(t instanceof Error)return this.level===0?`${t.message}
${t.stack}`:t.message;if(Array.isArray(t))return`[${t.length} items]`;let e=Object.keys(t);return e.length===0?"{}":e.length<=3?JSON.stringify(t):`{${e.length} keys: ${e.slice(0,3).join(", ")}...}`}return String(t)}formatTool(t,e){if(!e)return t;try{let r=typeof e=="string"?JSON.parse(e):e;if(t==="Bash"&&r.command){let n=r.command.length>50?r.command.substring(0,50)+"...":r.command;return`${t}(${n})`}if(t==="Read"&&r.file_path){let n=r.file_path.split("/").pop()||r.file_path;return`${t}(${n})`}if(t==="Edit"&&r.file_path){let n=r.file_path.split("/").pop()||r.file_path;return`${t}(${n})`}if(t==="Write"&&r.file_path){let n=r.file_path.split("/").pop()||r.file_path;return`${t}(${n})`}return t}catch{return t}}log(t,e,r,n,s){if(t<this.level)return;let i=new Date().toISOString().replace("T"," ").substring(0,23),c=m[t].padEnd(5),f=e.padEnd(6),g="";n?.correlationId?g=`[${n.correlationId}] `:n?.sessionId&&(g=`[session-${n.sessionId}] `);let l="";s!=null&&(this.level===0&&typeof s=="object"?l=`
`+JSON.stringify(s,null,2):l=" "+this.formatData(s));let y="";if(n){let{sessionId:tt,sdkSessionId:et,correlationId:rt,...D}=n;Object.keys(D).length>0&&(y=` {${Object.entries(D).map(([k,v])=>`${k}=${v}`).join(", ")}}`)}let A=`[${i}] [${c}] [${f}] ${g}${r}${y}${l}`;t===3?console.error(A):console.log(A)}debug(t,e,r,n){this.log(0,t,e,r,n)}info(t,e,r,n){this.log(1,t,e,r,n)}warn(t,e,r,n){this.log(2,t,e,r,n)}error(t,e,r,n){this.log(3,t,e,r,n)}dataIn(t,e,r,n){this.info(t,`\u2192 ${e}`,r,n)}dataOut(t,e,r,n){this.info(t,`\u2190 ${e}`,r,n)}success(t,e,r,n){this.info(t,`\u2713 ${e}`,r,n)}failure(t,e,r,n){this.error(t,`\u2717 ${e}`,r,n)}timing(t,e,r,n){this.info(t,`\u23F1 ${e}`,n,{duration:`${r}ms`})}},p=new S;import d from"path";import{homedir as K}from"os";import{spawnSync as V}from"child_process";import{join as a,dirname as w,basename as at}from"path";import{homedir as h}from"os";import{fileURLToPath as $}from"url";function H(){return typeof __dirname<"u"?__dirname:w($(import.meta.url))}var F=H(),u=process.env.CLAUDE_MEM_DATA_DIR||a(h(),".claude-mem"),O=process.env.CLAUDE_CONFIG_DIR||a(h(),".claude"),ft=a(u,"archives"),Et=a(u,"logs"),_t=a(u,"trash"),gt=a(u,"backups"),lt=a(u,"settings.json"),mt=a(u,"claude-mem.db"),St=a(u,"vector-db"),Ot=a(O,"settings.json"),Tt=a(O,"commands"),dt=a(O,"CLAUDE.md");function T(){return a(F,"..","..")}import{readFileSync as B,existsSync as X}from"fs";var W=["bugfix","feature","refactor","discovery","decision","change"],j=["how-it-works","why-it-exists","what-changed","problem-solution","gotcha","pattern","trade-off"];var M=W.join(","),L=j.join(",");var E=class{static DEFAULTS={CLAUDE_MEM_MODEL:"claude-haiku-4-5",CLAUDE_MEM_CONTEXT_OBSERVATIONS:"50",CLAUDE_MEM_WORKER_PORT:"37777",CLAUDE_MEM_CONTEXT_SHOW_READ_TOKENS:"true",CLAUDE_MEM_CONTEXT_SHOW_WORK_TOKENS:"true",CLAUDE_MEM_CONTEXT_SHOW_SAVINGS_AMOUNT:"true",CLAUDE_MEM_CONTEXT_SHOW_SAVINGS_PERCENT:"true",CLAUDE_MEM_CONTEXT_OBSERVATION_TYPES:M,CLAUDE_MEM_CONTEXT_OBSERVATION_CONCEPTS:L,CLAUDE_MEM_CONTEXT_FULL_COUNT:"5",CLAUDE_MEM_CONTEXT_FULL_FIELD:"narrative",CLAUDE_MEM_CONTEXT_SESSION_COUNT:"10",CLAUDE_MEM_CONTEXT_SHOW_LAST_SUMMARY:"true",CLAUDE_MEM_CONTEXT_SHOW_LAST_MESSAGE:"false"};static getAllDefaults(){return{...this.DEFAULTS}}static get(t){return process.env[t]||this.DEFAULTS[t]}static getInt(t){let e=this.get(t);return parseInt(e,10)}static getBool(t){return this.get(t)==="true"}static loadFromFile(t){if(!X(t))return this.getAllDefaults();let e=B(t,"utf-8"),n=JSON.parse(e).env||{},s={...this.DEFAULTS};for(let i of Object.keys(this.DEFAULTS))n[i]!==void 0&&(s[i]=n[i]);return s}};var G=100,Y=500,J=10;function _(){let o=d.join(K(),".claude-mem","settings.json"),t=E.loadFromFile(o);return parseInt(t.CLAUDE_MEM_WORKER_PORT,10)}async function N(){try{let o=_();return(await fetch(`http://127.0.0.1:${o}/health`,{signal:AbortSignal.timeout(G)})).ok}catch{return!1}}async function q(){try{let o=T(),t=d.join(o,"ecosystem.config.cjs");if(!existsSync(t))throw new Error(`Ecosystem config not found at ${t}`);let e=d.join(o,"node_modules",".bin","pm2"),r=process.platform==="win32"?e+".cmd":e,n=existsSync(r)?r:"pm2",s=V(n,["start",t],{cwd:o,stdio:"pipe",encoding:"utf-8",windowsHide:!0});if(s.status!==0)throw new Error(s.stderr||"PM2 start failed");for(let i=0;i<J;i++)if(await new Promise(c=>setTimeout(c,Y)),await N())return!0;return!1}catch{return!1}}async function x(){if(await N())return;if(!await q()){let t=_(),e=T();throw new Error(`Worker service failed to start on port ${t}.
To start manually, run:
cd ${s}
cd ${e}
npx pm2 start ecosystem.config.cjs
If already running, try: npx pm2 restart claude-mem-worker`)}}import{appendFileSync as se}from"fs";import{homedir as te}from"os";import{join as re}from"path";var ne=re(te(),".claude-mem","silent.log");function f(a,e,s=""){let t=new Date().toISOString(),i=((new Error().stack||"").split(`
`)[2]||"").match(/at\s+(?:.*\s+)?\(?([^:]+):(\d+):(\d+)\)?/),p=i?`${i[1].split("/").pop()}:${i[2]}`:"unknown",c=`[${t}] [${p}] ${a}`;if(e!==void 0)try{c+=` ${JSON.stringify(e)}`}catch(_){c+=` [stringify error: ${_}]`}c+=`
`;try{se(ne,c)}catch(_){console.error("[silent-debug] Failed to write to log:",_)}return s}function oe(a){if(!a||!X(a))return"";try{let e=F(a,"utf-8").trim();if(!e)return"";let s=e.split(`
`);for(let t=s.length-1;t>=0;t--)try{let r=JSON.parse(s[t]);if(r.type==="user"&&r.message?.content){let n=r.message.content;if(typeof n=="string")return n;if(Array.isArray(n))return n.filter(i=>i.type==="text").map(i=>i.text).join(`
`)}}catch{continue}}catch(e){S.error("HOOK","Failed to read transcript",{transcriptPath:a},e)}return""}function ie(a){if(!a||!X(a))return"";try{let e=F(a,"utf-8").trim();if(!e)return"";let s=e.split(`
`);for(let t=s.length-1;t>=0;t--)try{let r=JSON.parse(s[t]);if(r.type==="assistant"&&r.message?.content){let n="",o=r.message.content;return typeof o=="string"?n=o:Array.isArray(o)&&(n=o.filter(p=>p.type==="text").map(p=>p.text).join(`
`)),n=n.replace(/<system-reminder>[\s\S]*?<\/system-reminder>/g,""),n=n.replace(/\n{3,}/g,`
If already running, try: npx pm2 restart claude-mem-worker`)}}function z(o){if(!o||!P(o))return"";try{let t=I(o,"utf-8").trim();if(!t)return"";let e=t.split(`
`);for(let r=e.length-1;r>=0;r--)try{let n=JSON.parse(e[r]);if(n.type==="user"&&n.message?.content){let s=n.message.content;if(typeof s=="string")return s;if(Array.isArray(s))return s.filter(c=>c.type==="text").map(c=>c.text).join(`
`)}}catch{continue}}catch(t){p.error("HOOK","Failed to read transcript",{transcriptPath:o},t)}return""}function Q(o){if(!o||!P(o))return"";try{let t=I(o,"utf-8").trim();if(!t)return"";let e=t.split(`
`);for(let r=e.length-1;r>=0;r--)try{let n=JSON.parse(e[r]);if(n.type==="assistant"&&n.message?.content){let s="",i=n.message.content;return typeof i=="string"?s=i:Array.isArray(i)&&(s=i.filter(f=>f.type==="text").map(f=>f.text).join(`
`)),s=s.replace(/<system-reminder>[\s\S]*?<\/system-reminder>/g,""),s=s.replace(/\n{3,}/g,`
`).trim(),n}}catch{continue}}catch(e){S.error("HOOK","Failed to read transcript",{transcriptPath:a},e)}return""}async function ae(a){if(!a)throw new Error("summaryHook requires input");let{session_id:e}=a;await M();let s=new R,t=s.createSDKSession(e,"",""),r=s.getPromptCounter(t),n=s.getUserPrompt(e,r);if(!n||n.trim()===""){f("[summary-hook] Skipping summary - user prompt was entirely private",{session_id:e,promptNumber:r}),s.close(),console.log(y("Stop",!0));return}let o=s.db.prepare(`
SELECT id, claude_session_id, sdk_session_id, project
FROM sdk_sessions WHERE id = ?
`).get(t),i=s.db.prepare(`
SELECT COUNT(*) as count
FROM observations
WHERE sdk_session_id = ?
`).get(o?.sdk_session_id);f("[summary-hook] Session diagnostics",{claudeSessionId:e,sessionDbId:t,sdkSessionId:o?.sdk_session_id,project:o?.project,promptNumber:r,observationCount:i?.count||0,transcriptPath:a.transcript_path}),s.close();let p=h(),c=oe(a.transcript_path||""),_=ie(a.transcript_path||"");f("[summary-hook] Extracted messages",{hasLastUserMessage:!!c,hasLastAssistantMessage:!!_,lastAssistantPreview:_.substring(0,200),lastAssistantLength:_.length}),S.dataIn("HOOK","Stop: Requesting summary",{sessionId:t,workerPort:p,promptNumber:r,hasLastUserMessage:!!c,hasLastAssistantMessage:!!_});try{let u=await fetch(`http://127.0.0.1:${p}/sessions/${t}/summarize`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({prompt_number:r,last_user_message:c,last_assistant_message:_}),signal:AbortSignal.timeout(2e3)});if(!u.ok){let E=await u.text();throw S.failure("HOOK","Failed to generate summary",{sessionId:t,status:u.status},E),new Error(`Failed to request summary from worker: ${u.status} ${E}`)}S.debug("HOOK","Summary request sent successfully",{sessionId:t})}catch(u){throw u.cause?.code==="ECONNREFUSED"||u.name==="TimeoutError"||u.message.includes("fetch failed")?new Error("There's a problem with the worker. If you just updated, type `pm2 restart claude-mem-worker` in your terminal to continue"):u}finally{await fetch(`http://127.0.0.1:${p}/api/processing`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({isProcessing:!1})})}console.log(y("Stop",!0))}var C="";w.on("data",a=>C+=a);w.on("end",async()=>{let a=C?JSON.parse(C):void 0;await ae(a)});
`).trim(),s}}catch{continue}}catch(t){p.error("HOOK","Failed to read transcript",{transcriptPath:o},t)}return""}async function Z(o){if(!o)throw new Error("summaryHook requires input");let{session_id:t}=o;await x();let e=_(),r=z(o.transcript_path||""),n=Q(o.transcript_path||"");p.dataIn("HOOK","Stop: Requesting summary",{workerPort:e,hasLastUserMessage:!!r,hasLastAssistantMessage:!!n});try{let s=await fetch(`http://127.0.0.1:${e}/api/sessions/summarize`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({claudeSessionId:t,last_user_message:r,last_assistant_message:n}),signal:AbortSignal.timeout(2e3)});if(!s.ok){let i=await s.text();throw p.failure("HOOK","Failed to generate summary",{status:s.status},i),new Error(`Failed to request summary from worker: ${s.status} ${i}`)}p.debug("HOOK","Summary request sent successfully")}catch(s){throw s.cause?.code==="ECONNREFUSED"||s.name==="TimeoutError"||s.message.includes("fetch failed")?new Error("There's a problem with the worker. If you just updated, type `pm2 restart claude-mem-worker` in your terminal to continue"):s}finally{fetch(`http://127.0.0.1:${e}/api/processing`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({isProcessing:!1})}).catch(()=>{})}console.log(R("Stop",!0))}var C="";U.on("data",o=>C+=o);U.on("end",async()=>{let o=C?JSON.parse(C):void 0;await Z(o)});
+8 -8
View File
@@ -1,7 +1,7 @@
#!/usr/bin/env node
import{execSync as P}from"child_process";import{join as a}from"path";import{homedir as f}from"os";import{existsSync as S}from"fs";import k from"path";import{homedir as A}from"os";import{existsSync as R,readFileSync as C}from"fs";import{join as t,dirname as _,basename as H}from"path";import{homedir as d}from"os";import{fileURLToPath as x}from"url";function E(){return typeof __dirname<"u"?__dirname:_(x(import.meta.url))}var j=E(),o=process.env.CLAUDE_MEM_DATA_DIR||t(d(),".claude-mem"),c=process.env.CLAUDE_CONFIG_DIR||t(d(),".claude"),N=t(o,"archives"),$=t(o,"logs"),F=t(o,"trash"),K=t(o,"backups"),B=t(o,"settings.json"),G=t(o,"claude-mem.db"),V=t(o,"vector-db"),J=t(c,"settings.json"),Y=t(c,"commands"),Z=t(c,"CLAUDE.md");function l(){try{let r=k.join(A(),".claude-mem","settings.json");if(R(r)){let s=JSON.parse(C(r,"utf-8")),n=parseInt(s.env?.CLAUDE_MEM_WORKER_PORT,10);if(!isNaN(n))return n}}catch{}return parseInt(process.env.CLAUDE_MEM_WORKER_PORT||"37777",10)}var v=a(f(),".claude","plugins","marketplaces","thedotmack"),I=a(v,"node_modules");S(I)||(console.error(`
import{join as O,basename as x}from"path";import{homedir as P}from"os";import{existsSync as k}from"fs";import I from"path";import{homedir as w}from"os";import{join as e,dirname as M,basename as X}from"path";import{homedir as l}from"os";import{fileURLToPath as h}from"url";function N(){return typeof __dirname<"u"?__dirname:M(h(import.meta.url))}var G=N(),s=process.env.CLAUDE_MEM_DATA_DIR||e(l(),".claude-mem"),E=process.env.CLAUDE_CONFIG_DIR||e(l(),".claude"),K=e(s,"archives"),Y=e(s,"logs"),$=e(s,"trash"),q=e(s,"backups"),J=e(s,"settings.json"),Z=e(s,"claude-mem.db"),z=e(s,"vector-db"),Q=e(E,"settings.json"),tt=e(E,"commands"),et=e(E,"CLAUDE.md");import{readFileSync as R,existsSync as y}from"fs";var U=["bugfix","feature","refactor","discovery","decision","change"],L=["how-it-works","why-it-exists","what-changed","problem-solution","gotcha","pattern","trade-off"];var S=U.join(","),d=L.join(",");var c=class{static DEFAULTS={CLAUDE_MEM_MODEL:"claude-haiku-4-5",CLAUDE_MEM_CONTEXT_OBSERVATIONS:"50",CLAUDE_MEM_WORKER_PORT:"37777",CLAUDE_MEM_CONTEXT_SHOW_READ_TOKENS:"true",CLAUDE_MEM_CONTEXT_SHOW_WORK_TOKENS:"true",CLAUDE_MEM_CONTEXT_SHOW_SAVINGS_AMOUNT:"true",CLAUDE_MEM_CONTEXT_SHOW_SAVINGS_PERCENT:"true",CLAUDE_MEM_CONTEXT_OBSERVATION_TYPES:S,CLAUDE_MEM_CONTEXT_OBSERVATION_CONCEPTS:d,CLAUDE_MEM_CONTEXT_FULL_COUNT:"5",CLAUDE_MEM_CONTEXT_FULL_FIELD:"narrative",CLAUDE_MEM_CONTEXT_SESSION_COUNT:"10",CLAUDE_MEM_CONTEXT_SHOW_LAST_SUMMARY:"true",CLAUDE_MEM_CONTEXT_SHOW_LAST_MESSAGE:"false"};static getAllDefaults(){return{...this.DEFAULTS}}static get(t){return process.env[t]||this.DEFAULTS[t]}static getInt(t){let r=this.get(t);return parseInt(r,10)}static getBool(t){return this.get(t)==="true"}static loadFromFile(t){if(!y(t))return this.getAllDefaults();let r=R(t,"utf-8"),o=JSON.parse(r).env||{},a={...this.DEFAULTS};for(let i of Object.keys(this.DEFAULTS))o[i]!==void 0&&(a[i]=o[i]);return a}};function g(){let n=I.join(w(),".claude-mem","settings.json"),t=c.loadFromFile(n);return parseInt(t.CLAUDE_MEM_WORKER_PORT,10)}var v=O(P(),".claude","plugins","marketplaces","thedotmack"),b=O(v,"node_modules");k(b)||(console.error(`
---
\u{1F389} Note: This appears under Plugin Hook Error, but it's not an error. That's the only option for
\u{1F389} Note: This appears under Plugin Hook Error, but it's not an error. That's the only option for
user messages in Claude Code UI until a better method is provided.
---
@@ -17,7 +17,7 @@ Dependencies have been installed in the background. This only happens once.
Thank you for installing Claude-Mem!
This message was not added to your startup context, so you can continue working as normal.
`),process.exit(3));try{let r=a(f(),".claude","plugins","marketplaces","thedotmack","plugin","scripts","context-hook.js"),s=P(`node "${r}" --colors`,{encoding:"utf8",windowsHide:!0}),n=l(),e=new Date,g=new Date("2025-12-06T00:00:00Z"),h=new Date("2025-12-05T05:00:00Z"),m="";e<h&&(m=`
`),process.exit(3));try{let n=g(),t=x(process.cwd()),r=await fetch(`http://127.0.0.1:${n}/api/context/inject?project=${encodeURIComponent(t)}&colors=true`,{method:"GET",signal:AbortSignal.timeout(5e3)});if(!r.ok)throw new Error(`Worker error ${r.status}`);let u=await r.text(),o=new Date,a=new Date("2025-12-06T00:00:00Z"),i=new Date("2025-12-05T05:00:00Z"),T="";o<i&&(T=`
\u{1F680} \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 \u{1F680}
@@ -27,19 +27,19 @@ This message was not added to your startup context, so you can continue working
\u2B50 Your upvote means the world - thank you!
\u{1F680} \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 \u{1F680}
`);let i="";if(e<g){let D=e.getUTCHours()*60+e.getUTCMinutes(),p=Math.floor((D-300+1440)%1440/60),u=e.getUTCDate(),w=e.getUTCMonth(),y=e.getUTCFullYear()===2025&&w===11&&u>=1&&u<=5,T=p>=17&&p<19;y&&T?i=`
`);let _="";if(o<a){let f=o.getUTCHours()*60+o.getUTCMinutes(),p=Math.floor((f-300+1440)%1440/60),m=o.getUTCDate(),A=o.getUTCMonth(),C=o.getUTCFullYear()===2025&&A===11&&m>=1&&m<=5,D=p>=17&&p<19;C&&D?_=`
\u{1F534} LIVE NOW: AMA w/ Dev (@thedotmack) until 7pm EST
`:i=`
`:_=`
\u2013 LIVE AMA w/ Dev (@thedotmack) Dec 1st\u20135th, 5pm to 7pm EST
`}console.error(`
\u{1F4DD} Claude-Mem Context Loaded
\u2139\uFE0F Note: This appears as stderr but is informational only
`+s+`
`+u+`
\u{1F4A1} New! Wrap all or part of any message with <private> ... </private> to prevent storing sensitive information in your observation history.
\u{1F4AC} Community https://discord.gg/J4wttp9vDu`+m+i+`
\u{1F4AC} Community https://discord.gg/J4wttp9vDu`+T+_+`
\u{1F4FA} Watch live in browser http://localhost:${n}/
`)}catch(r){console.error(`\u274C Failed to load context display: ${r}`)}process.exit(3);
`)}catch(n){console.error(`\u274C Failed to load context display: ${n}`)}process.exit(3);
File diff suppressed because one or more lines are too long
+38 -13
View File
@@ -26,9 +26,14 @@ const WORKER_SERVICE = {
source: 'src/services/worker-service.ts'
};
const SEARCH_SERVER = {
name: 'search-server',
source: 'src/servers/search-server.ts'
const MCP_SERVER = {
name: 'mcp-server',
source: 'src/servers/mcp-server.ts'
};
const CONTEXT_GENERATOR = {
name: 'context-generator',
source: 'src/services/context-generator.ts'
};
async function buildHooks() {
@@ -92,15 +97,15 @@ async function buildHooks() {
const workerStats = fs.statSync(`${hooksDir}/${WORKER_SERVICE.name}.cjs`);
console.log(`✓ worker-service built (${(workerStats.size / 1024).toFixed(2)} KB)`);
// Build search server
console.log(`\n🔧 Building search server...`);
// Build MCP server
console.log(`\n🔧 Building MCP server...`);
await build({
entryPoints: [SEARCH_SERVER.source],
entryPoints: [MCP_SERVER.source],
bundle: true,
platform: 'node',
target: 'node18',
format: 'cjs',
outfile: `${hooksDir}/${SEARCH_SERVER.name}.cjs`,
outfile: `${hooksDir}/${MCP_SERVER.name}.cjs`,
minify: true,
logLevel: 'error',
external: ['better-sqlite3'],
@@ -112,10 +117,30 @@ async function buildHooks() {
}
});
// Make search server executable
fs.chmodSync(`${hooksDir}/${SEARCH_SERVER.name}.cjs`, 0o755);
const searchServerStats = fs.statSync(`${hooksDir}/${SEARCH_SERVER.name}.cjs`);
console.log(`search-server built (${(searchServerStats.size / 1024).toFixed(2)} KB)`);
// Make MCP server executable
fs.chmodSync(`${hooksDir}/${MCP_SERVER.name}.cjs`, 0o755);
const mcpServerStats = fs.statSync(`${hooksDir}/${MCP_SERVER.name}.cjs`);
console.log(`mcp-server built (${(mcpServerStats.size / 1024).toFixed(2)} KB)`);
// Build context generator
console.log(`\n🔧 Building context generator...`);
await build({
entryPoints: [CONTEXT_GENERATOR.source],
bundle: true,
platform: 'node',
target: 'node18',
format: 'cjs',
outfile: `${hooksDir}/${CONTEXT_GENERATOR.name}.cjs`,
minify: true,
logLevel: 'error',
external: ['better-sqlite3'],
define: {
'__DEFAULT_PACKAGE_VERSION__': `"${version}"`
}
});
const contextGenStats = fs.statSync(`${hooksDir}/${CONTEXT_GENERATOR.name}.cjs`);
console.log(`✓ context-generator built (${(contextGenStats.size / 1024).toFixed(2)} KB)`);
// Build each hook
for (const hook of HOOKS) {
@@ -149,11 +174,11 @@ async function buildHooks() {
console.log(`${hook.name} built (${sizeInKB} KB)`);
}
console.log('\n✅ All hooks, worker service, and search server built successfully!');
console.log('\n✅ All hooks, worker service, and MCP server built successfully!');
console.log(` Output: ${hooksDir}/`);
console.log(` - Hooks: *-hook.js`);
console.log(` - Worker: worker-service.cjs`);
console.log(` - Search Server: search-server.cjs`);
console.log(` - MCP Server: mcp-server.cjs`);
console.log(` - Skills: plugin/skills/`);
console.log('\n💡 Note: Dependencies will be auto-installed on first hook execution');
+229
View File
@@ -0,0 +1,229 @@
#!/usr/bin/env node
/**
* One-time script to extract tool handlers from mcp-server.ts into SearchManager.ts
*/
import { readFileSync, writeFileSync } from 'fs';
import { fileURLToPath } from 'url';
import { dirname, join } from 'path';
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
const projectRoot = join(__dirname, '..');
const mcpServerPath = join(projectRoot, 'src/servers/mcp-server.ts');
const outputPath = join(projectRoot, 'src/services/worker/SearchManager.ts');
console.log('Reading mcp-server.ts...');
const content = readFileSync(mcpServerPath, 'utf-8');
// Extract just the sections we need by finding line numbers
// This is more reliable than parsing
// Extract tool handler bodies by finding each "handler: async (args: any) => {"
// and extracting until the matching closing brace
const extractHandlerBody = (content, startPattern) => {
const lines = content.split('\n');
const startIdx = lines.findIndex(line => line.includes(startPattern));
if (startIdx === -1) return null;
// Find the "handler: async (args: any) => {" line
let handlerIdx = -1;
for (let i = startIdx; i < Math.min(startIdx + 30, lines.length); i++) {
if (lines[i].includes('handler: async (args: any) => {')) {
handlerIdx = i;
break;
}
}
if (handlerIdx === -1) return null;
// Extract the body by counting braces
let braceCount = 0;
let bodyLines = [];
let started = false;
for (let i = handlerIdx; i < lines.length; i++) {
const line = lines[i];
for (const char of line) {
if (char === '{') {
braceCount++;
started = true;
} else if (char === '}') {
braceCount--;
}
}
if (started) {
bodyLines.push(line);
}
if (started && braceCount === 0) {
break;
}
}
// Remove the first line (handler wrapper) and last line (closing brace)
if (bodyLines.length > 2) {
bodyLines = bodyLines.slice(1, -1);
}
return bodyLines.join('\n');
};
// Tool name to search pattern mapping
const tools = {
'search': "name: 'search'",
'timeline': "name: 'timeline'",
'decisions': "name: 'decisions'",
'changes': "name: 'changes'",
'how_it_works': "name: 'how_it_works'",
'search_observations': "name: 'search_observations'",
'search_sessions': "name: 'search_sessions'",
'search_user_prompts': "name: 'search_user_prompts'",
'find_by_concept': "name: 'find_by_concept'",
'find_by_file': "name: 'find_by_file'",
'find_by_type': "name: 'find_by_type'",
'get_recent_context': "name: 'get_recent_context'",
'get_context_timeline': "name: 'get_context_timeline'",
'get_timeline_by_query': "name: 'get_timeline_by_query'"
};
console.log('Extracting tool handlers...');
const handlers = {};
for (const [toolName, pattern] of Object.entries(tools)) {
console.log(` Extracting ${toolName}...`);
const body = extractHandlerBody(content, pattern);
if (body) {
handlers[toolName] = body;
console.log(`${body.split('\n').length} lines`);
} else {
console.log(` ✗ Not found`);
}
}
console.log(`\nExtracted ${Object.keys(handlers).length}/${Object.keys(tools).length} handlers`);
// Now generate SearchManager.ts
console.log('\nGenerating SearchManager.ts...');
const methodBodies = Object.entries(handlers).map(([toolName, body]) => {
// Convert tool name to camelCase method name
const methodName = toolName.replace(/_([a-z])/g, (_, letter) => letter.toUpperCase());
// Replace standalone function calls with class methods
let processedBody = body
.replace(/formatSearchTips\(\)/g, 'this.formatter.formatSearchTips()')
.replace(/formatObservationIndex\(/g, 'this.formatter.formatObservationIndex(')
.replace(/formatSessionIndex\(/g, 'this.formatter.formatSessionIndex(')
.replace(/formatUserPromptIndex\(/g, 'this.formatter.formatUserPromptIndex(')
.replace(/formatObservationResult\(/g, 'this.formatter.formatObservationResult(')
.replace(/formatSessionResult\(/g, 'this.formatter.formatSessionResult(')
.replace(/formatUserPromptResult\(/g, 'this.formatter.formatUserPromptResult(')
.replace(/filterTimelineByDepth\(/g, 'this.timeline.filterByDepth(')
.replace(/\bsearch\./g, 'this.sessionSearch.')
.replace(/\bstore\./g, 'this.sessionStore.')
.replace(/queryChroma\(/g, 'this.queryChroma(')
.replace(/normalizeParams\(/g, 'this.normalizeParams(')
.replace(/chromaClient/g, 'this.chromaSync');
return ` /**
* Tool handler: ${toolName}
*/
async ${methodName}(args: any): Promise<any> {
${processedBody}
}`;
}).join('\n\n');
const searchManagerContent = `/**
* SearchManager - Core search orchestration for claude-mem
* Extracted from mcp-server.ts to centralize business logic in Worker services
*
* This class contains all tool handler logic that was previously in the MCP server.
* The MCP server now acts as a thin HTTP wrapper that calls these methods via HTTP.
*/
import { SessionSearch } from '../sqlite/SessionSearch.js';
import { SessionStore } from '../sqlite/SessionStore.js';
import { ChromaSync } from '../sync/ChromaSync.js';
import { FormattingService } from './FormattingService.js';
import { TimelineService, TimelineItem } from './TimelineService.js';
import { ObservationSearchResult, SessionSummarySearchResult, UserPromptSearchResult } from '../sqlite/types.js';
import { silentDebug } from '../../utils/silent-debug.js';
const COLLECTION_NAME = 'cm__claude-mem';
export class SearchManager {
constructor(
private sessionSearch: SessionSearch,
private sessionStore: SessionStore,
private chromaSync: ChromaSync,
private formatter: FormattingService,
private timeline: TimelineService
) {}
/**
* Query Chroma vector database via ChromaSync
*/
private async queryChroma(
query: string,
limit: number,
whereFilter?: Record<string, any>
): Promise<{ ids: number[]; distances: number[]; metadatas: any[] }> {
return await this.chromaSync.queryChroma(query, limit, whereFilter);
}
/**
* Helper to normalize query parameters from URL-friendly format
* Converts comma-separated strings to arrays and flattens date params
*/
private normalizeParams(args: any): any {
const normalized: any = { ...args };
// Parse comma-separated concepts into array
if (normalized.concepts && typeof normalized.concepts === 'string') {
normalized.concepts = normalized.concepts.split(',').map((s: string) => s.trim()).filter(Boolean);
}
// Parse comma-separated files into array
if (normalized.files && typeof normalized.files === 'string') {
normalized.files = normalized.files.split(',').map((s: string) => s.trim()).filter(Boolean);
}
// Parse comma-separated obs_type into array
if (normalized.obs_type && typeof normalized.obs_type === 'string') {
normalized.obs_type = normalized.obs_type.split(',').map((s: string) => s.trim()).filter(Boolean);
}
// Parse comma-separated type (for filterSchema) into array
if (normalized.type && typeof normalized.type === 'string' && normalized.type.includes(',')) {
normalized.type = normalized.type.split(',').map((s: string) => s.trim()).filter(Boolean);
}
// Flatten dateStart/dateEnd into dateRange object
if (normalized.dateStart || normalized.dateEnd) {
normalized.dateRange = {
start: normalized.dateStart,
end: normalized.dateEnd
};
delete normalized.dateStart;
delete normalized.dateEnd;
}
return normalized;
}
${methodBodies}
}
`;
writeFileSync(outputPath, searchManagerContent, 'utf-8');
console.log(`\n✅ SearchManager.ts generated at ${outputPath}`);
console.log(` Total methods: ${Object.keys(handlers).length + 2} (${Object.keys(handlers).length} tools + queryChroma + normalizeParams)`);
console.log(` File size: ${(searchManagerContent.length / 1024).toFixed(1)} KB`);
+1 -1
View File
@@ -387,7 +387,7 @@ async function main() {
} catch (error) {
// Worker might already be running or PM2 not available - that's okay
// The ensureWorkerRunning() function will handle auto-start when needed
log(' Worker will start automatically when needed', colors.dim);
log('️ Worker startup error', colors.dim);
}
// Success - dependencies installed (if needed)
+32 -44
View File
@@ -1,11 +1,14 @@
/**
* Cleanup Hook - SessionEnd
* Consolidated entry point + logic
*
* Pure HTTP client - sends data to worker, worker handles all database operations.
* This allows the hook to run under any runtime (Node.js or Bun) since it has no
* native module dependencies.
*/
import { stdin } from 'process';
import { SessionStore } from '../services/sqlite/SessionStore.js';
import { getWorkerPort } from '../shared/worker-utils.js';
import { silentDebug } from '../utils/silent-debug.js';
export interface SessionEndInput {
session_id: string;
@@ -16,16 +19,13 @@ export interface SessionEndInput {
}
/**
* Cleanup Hook Main Logic
* Cleanup Hook Main Logic - Fire-and-forget HTTP client
*/
async function cleanupHook(input?: SessionEndInput): Promise<void> {
// Log hook entry point
console.error('[claude-mem cleanup] Hook fired', {
input: input ? {
session_id: input.session_id,
cwd: input.cwd,
reason: input.reason
} : null
silentDebug('[cleanup-hook] Hook fired', {
session_id: input?.session_id,
cwd: input?.cwd,
reason: input?.reason
});
// Handle standalone execution (no input provided)
@@ -43,47 +43,35 @@ async function cleanupHook(input?: SessionEndInput): Promise<void> {
}
const { session_id, reason } = input;
console.error('[claude-mem cleanup] Searching for active SDK session', { session_id, reason });
// Find active SDK session
const db = new SessionStore();
const session = db.findActiveSDKSession(session_id);
const port = getWorkerPort();
if (!session) {
// No active session - nothing to clean up
console.error('[claude-mem cleanup] No active SDK session found', { session_id });
db.close();
console.log('{"continue": true, "suppressOutput": true}');
process.exit(0);
}
console.error('[claude-mem cleanup] Active SDK session found', {
session_id: session.id,
sdk_session_id: session.sdk_session_id,
project: session.project,
worker_port: session.worker_port
});
// Mark session as completed in DB
db.markSessionCompleted(session.id);
console.error('[claude-mem cleanup] Session marked as completed in database');
db.close();
// Tell worker to stop spinner
try {
const workerPort = session.worker_port || getWorkerPort();
await fetch(`http://127.0.0.1:${workerPort}/sessions/${session.id}/complete`, {
// Send to worker - worker handles finding session, marking complete, and stopping spinner
const response = await fetch(`http://127.0.0.1:${port}/api/sessions/complete`, {
method: 'POST',
signal: AbortSignal.timeout(1000)
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
claudeSessionId: session_id,
reason
}),
signal: AbortSignal.timeout(2000)
});
if (response.ok) {
const result = await response.json();
silentDebug('[cleanup-hook] Session cleanup completed', result);
} else {
// Non-fatal - session might not exist
silentDebug('[cleanup-hook] Session not found or already cleaned up');
}
} catch (error: any) {
// Worker might not be running - that's okay
silentDebug('[cleanup-hook] Worker not reachable (non-critical)', {
error: error.message
});
console.error('[claude-mem cleanup] Worker notified to stop processing indicator');
} catch (err) {
// Non-critical - worker might be down
console.error('[claude-mem cleanup] Failed to notify worker (non-critical):', err);
}
console.error('[claude-mem cleanup] Cleanup completed successfully');
console.log('{"continue": true, "suppressOutput": true}');
process.exit(0);
}
+53 -807
View File
@@ -1,111 +1,15 @@
/**
* Context Hook - SessionStart
* Consolidated entry point + logic
*
* Pure HTTP client - calls worker to generate context.
* This allows the hook to run under any runtime (Node.js or Bun) since it has no
* native module dependencies.
*/
import path from 'path';
import { homedir } from 'os';
import { existsSync, readFileSync, unlinkSync } from 'fs';
import { stdin } from 'process';
import { fileURLToPath } from 'url';
import { dirname } from 'path';
import { SessionStore } from '../services/sqlite/SessionStore.js';
import {
OBSERVATION_TYPES,
OBSERVATION_CONCEPTS,
TYPE_ICON_MAP,
TYPE_WORK_EMOJI_MAP,
DEFAULT_OBSERVATION_TYPES_STRING,
DEFAULT_OBSERVATION_CONCEPTS_STRING
} from '../constants/observation-metadata.js';
import { logger } from '../utils/logger.js';
// Get __dirname equivalent in ESM
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
// Version marker path (same as smart-install.js)
// From src/hooks/ we need to go up to plugin root: ../../
const VERSION_MARKER_PATH = path.join(__dirname, '../../.install-version');
interface ContextConfig {
// Display counts
totalObservationCount: number;
fullObservationCount: number;
sessionCount: number;
// Token display toggles
showReadTokens: boolean;
showWorkTokens: boolean;
showSavingsAmount: boolean;
showSavingsPercent: boolean;
// Filters
observationTypes: Set<string>;
observationConcepts: Set<string>;
// Display options
fullObservationField: 'narrative' | 'facts';
showLastSummary: boolean;
showLastMessage: boolean;
}
/**
* Load all context configuration settings
* Priority: ~/.claude-mem/settings.json > env var > defaults
*/
function loadContextConfig(): ContextConfig {
const defaults = {
totalObservationCount: parseInt(process.env.CLAUDE_MEM_CONTEXT_OBSERVATIONS || '50', 10),
fullObservationCount: 5,
sessionCount: 10,
showReadTokens: true,
showWorkTokens: true,
showSavingsAmount: true,
showSavingsPercent: true,
observationTypes: new Set(OBSERVATION_TYPES),
observationConcepts: new Set(OBSERVATION_CONCEPTS),
fullObservationField: 'narrative' as const,
showLastSummary: true,
showLastMessage: false,
};
try {
const settingsPath = path.join(homedir(), '.claude-mem', 'settings.json');
if (!existsSync(settingsPath)) return defaults;
const settings = JSON.parse(readFileSync(settingsPath, 'utf-8'));
const env = settings.env || {};
return {
totalObservationCount: parseInt(env.CLAUDE_MEM_CONTEXT_OBSERVATIONS || '50', 10),
fullObservationCount: parseInt(env.CLAUDE_MEM_CONTEXT_FULL_COUNT || '5', 10),
sessionCount: parseInt(env.CLAUDE_MEM_CONTEXT_SESSION_COUNT || '10', 10),
showReadTokens: env.CLAUDE_MEM_CONTEXT_SHOW_READ_TOKENS !== 'false',
showWorkTokens: env.CLAUDE_MEM_CONTEXT_SHOW_WORK_TOKENS !== 'false',
showSavingsAmount: env.CLAUDE_MEM_CONTEXT_SHOW_SAVINGS_AMOUNT !== 'false',
showSavingsPercent: env.CLAUDE_MEM_CONTEXT_SHOW_SAVINGS_PERCENT !== 'false',
observationTypes: new Set(
(env.CLAUDE_MEM_CONTEXT_OBSERVATION_TYPES || DEFAULT_OBSERVATION_TYPES_STRING)
.split(',').map((t: string) => t.trim()).filter(Boolean)
),
observationConcepts: new Set(
(env.CLAUDE_MEM_CONTEXT_OBSERVATION_CONCEPTS || DEFAULT_OBSERVATION_CONCEPTS_STRING)
.split(',').map((c: string) => c.trim()).filter(Boolean)
),
fullObservationField: (env.CLAUDE_MEM_CONTEXT_FULL_FIELD || 'narrative') as 'narrative' | 'facts',
showLastSummary: env.CLAUDE_MEM_CONTEXT_SHOW_LAST_SUMMARY !== 'false',
showLastMessage: env.CLAUDE_MEM_CONTEXT_SHOW_LAST_MESSAGE === 'true',
};
} catch (error) {
logger.warn('HOOK', 'Failed to load context settings, using defaults', {}, error as Error);
return defaults;
}
}
// Configuration constants
const CHARS_PER_TOKEN_ESTIMATE = 4; // Rough estimate for token counting
const SUMMARY_LOOKAHEAD = 1; // Fetch one extra summary for offset calculation
import path from "path";
import { stdin } from "process";
import { execSync } from "child_process";
import { getWorkerPort } from "../shared/worker-utils.js";
export interface SessionStartInput {
session_id?: string;
@@ -116,722 +20,64 @@ export interface SessionStartInput {
[key: string]: any;
}
// ANSI color codes for terminal output
const colors = {
reset: '\x1b[0m',
bright: '\x1b[1m',
dim: '\x1b[2m',
cyan: '\x1b[36m',
green: '\x1b[32m',
yellow: '\x1b[33m',
blue: '\x1b[34m',
magenta: '\x1b[35m',
gray: '\x1b[90m',
red: '\x1b[31m',
};
async function waitForPort(port: number, maxWaitMs: number = 10000): Promise<boolean> {
const startTime = Date.now();
const pollInterval = 100;
interface Observation {
id: number;
sdk_session_id: string;
type: string;
title: string | null;
subtitle: string | null;
narrative: string | null;
facts: string | null;
concepts: string | null;
files_read: string | null;
files_modified: string | null;
discovery_tokens: number | null;
created_at: string;
created_at_epoch: number;
}
interface SessionSummary {
id: number;
sdk_session_id: string;
request: string | null;
investigated: string | null;
learned: string | null;
completed: string | null;
next_steps: string | null;
created_at: string;
created_at_epoch: number;
}
// Helper: Parse JSON array safely
function parseJsonArray(json: string | null): string[] {
if (!json) return [];
try {
const parsed = JSON.parse(json);
return Array.isArray(parsed) ? parsed : [];
} catch (err) {
return [];
}
}
// Helper: Format date with time
function formatDateTime(dateStr: string): string {
const date = new Date(dateStr);
return date.toLocaleString('en-US', {
month: 'short',
day: 'numeric',
hour: 'numeric',
minute: '2-digit',
hour12: true
});
}
// Helper: Format just time (no date)
function formatTime(dateStr: string): string {
const date = new Date(dateStr);
return date.toLocaleString('en-US', {
hour: 'numeric',
minute: '2-digit',
hour12: true
});
}
// Helper: Format just date
function formatDate(dateStr: string): string {
const date = new Date(dateStr);
return date.toLocaleString('en-US', {
month: 'short',
day: 'numeric',
year: 'numeric'
});
}
// Helper: Convert absolute paths to relative paths
function toRelativePath(filePath: string, cwd: string): string {
if (path.isAbsolute(filePath)) {
return path.relative(cwd, filePath);
}
return filePath;
}
// Helper: Render a summary field (investigated, learned, etc.)
function renderSummaryField(label: string, value: string | null, color: string, useColors: boolean): string[] {
if (!value) return [];
if (useColors) {
return [`${color}${label}:${colors.reset} ${value}`, ''];
}
return [`**${label}**: ${value}`, ''];
}
// Helper: Convert cwd path to dashed format for transcript directory name
function cwdToDashed(cwd: string): string {
// Convert all slashes to dashes (including leading slash)
return cwd.replace(/\//g, '-');
}
// Helper: Extract last assistant message from transcript file
function extractPriorMessages(transcriptPath: string): { userMessage: string; assistantMessage: string } {
try {
if (!existsSync(transcriptPath)) {
return { userMessage: '', assistantMessage: '' };
}
const content = readFileSync(transcriptPath, 'utf-8').trim();
if (!content) {
return { userMessage: '', assistantMessage: '' };
}
const lines = content.split('\n').filter(line => line.trim());
// Find the last assistant message by filtering for assistant type and taking the last one
let lastAssistantMessage = '';
// Iterate backwards to find the most recent assistant message with text content
for (let i = lines.length - 1; i >= 0; i--) {
try {
const line = lines[i];
// Quick check if this line is an assistant message
if (!line.includes('"type":"assistant"')) {
continue;
}
const entry = JSON.parse(line);
if (entry.type === 'assistant' && entry.message?.content && Array.isArray(entry.message.content)) {
let text = '';
for (const block of entry.message.content) {
if (block.type === 'text') {
text += block.text;
}
}
// Remove system-reminder tags
text = text.replace(/<system-reminder>[\s\S]*?<\/system-reminder>/g, '').trim();
if (text) {
lastAssistantMessage = text;
break; // Found it, stop searching
}
}
} catch (parseError) {
// Skip malformed lines
continue;
}
}
return { userMessage: '', assistantMessage: lastAssistantMessage };
} catch (error) {
logger.failure('HOOK', `Failed to extract prior messages from transcript`, { transcriptPath }, error as Error);
return { userMessage: '', assistantMessage: '' };
}
}
/**
* Context Hook Main Logic
*/
async function contextHook(input?: SessionStartInput, useColors: boolean = false): Promise<string> {
const config = loadContextConfig();
const cwd = input?.cwd ?? process.cwd();
const project = cwd ? path.basename(cwd) : 'unknown-project';
let db: SessionStore | null = null;
try {
db = new SessionStore();
} catch (error: any) {
if (error.code === 'ERR_DLOPEN_FAILED') {
// Native module ABI mismatch - delete version marker to trigger reinstall
try {
unlinkSync(VERSION_MARKER_PATH);
} catch (unlinkError) {
// Marker might not exist, that's okay
}
// Log once (not error spam) and exit cleanly
console.error('⚠️ Native module rebuild needed - restart Claude Code to auto-fix');
console.error(' (This happens after Node.js version upgrades)');
process.exit(0); // Exit cleanly to avoid error spam
}
// Other errors should still throw
throw error;
}
// Build SQL WHERE clause for observation types
const typeArray = Array.from(config.observationTypes);
const typePlaceholders = typeArray.map(() => '?').join(',');
// Build SQL WHERE clause for concepts
const conceptArray = Array.from(config.observationConcepts);
const conceptPlaceholders = conceptArray.map(() => '?').join(',');
// Get recent observations filtered by type and concepts at SQL level
// This ensures we show observations even when summaries haven't been generated
// Configurable via settings (default: 50)
const observations = db.db.prepare(`
SELECT
id, sdk_session_id, type, title, subtitle, narrative,
facts, concepts, files_read, files_modified, discovery_tokens,
created_at, created_at_epoch
FROM observations
WHERE project = ?
AND type IN (${typePlaceholders})
AND EXISTS (
SELECT 1 FROM json_each(concepts)
WHERE value IN (${conceptPlaceholders})
)
ORDER BY created_at_epoch DESC
LIMIT ?
`).all(project, ...typeArray, ...conceptArray, config.totalObservationCount) as Observation[];
// Get recent summaries (optional - may not exist for recent sessions)
// Fetch one extra for offset calculation
const recentSummaries = db.db.prepare(`
SELECT id, sdk_session_id, request, investigated, learned, completed, next_steps, created_at, created_at_epoch
FROM session_summaries
WHERE project = ?
ORDER BY created_at_epoch DESC
LIMIT ?
`).all(project, config.sessionCount + SUMMARY_LOOKAHEAD) as SessionSummary[];
// Retrieve prior session messages if enabled
let priorUserMessage = '';
let priorAssistantMessage = '';
// let debugInfo: string[] = [];
if (config.showLastMessage && observations.length > 0) {
while (Date.now() - startTime < maxWaitMs) {
try {
const currentSessionId = input?.session_id;
// Find the first observation from a different session (the prior session)
const priorSessionObs = observations.find(obs => obs.sdk_session_id !== currentSessionId);
if (priorSessionObs) {
const priorSessionId = priorSessionObs.sdk_session_id;
// Construct transcript path: ~/.claude/projects/{dashed-cwd}/{session_id}.jsonl
const dashedCwd = cwdToDashed(cwd);
const transcriptPath = path.join(homedir(), '.claude', 'projects', dashedCwd, `${priorSessionId}.jsonl`);
// debugInfo.push(`📋 Prior Message Retrieval:`);
// debugInfo.push(` Session ID: ${priorSessionId}`);
// debugInfo.push(` Transcript: ${transcriptPath}`);
// debugInfo.push(` Exists: ${existsSync(transcriptPath)}`);
// Extract messages from transcript
const messages = extractPriorMessages(transcriptPath);
priorUserMessage = messages.userMessage;
priorAssistantMessage = messages.assistantMessage;
// if (!priorUserMessage && !priorAssistantMessage) {
// debugInfo.push(` ⚠️ No messages extracted from transcript`);
// } else {
// debugInfo.push(` ✅ Found user message: ${!!priorUserMessage}`);
// debugInfo.push(` ✅ Found assistant message: ${!!priorAssistantMessage}`);
// }
} // else {
// debugInfo.push(`📋 Prior Message Retrieval: No prior session found (all observations from current session)`);
// }
} catch (error) {
// debugInfo.push(`📋 Prior Message Retrieval Error: ${(error as Error).message}`);
execSync(`curl -s -f -m 1 "http://127.0.0.1:${port}/api/health" > /dev/null 2>&1`, {
timeout: 1000,
});
return true;
} catch {
await new Promise((resolve) => setTimeout(resolve, pollInterval));
}
}
// If we have neither observations nor summaries, show empty state
if (observations.length === 0 && recentSummaries.length === 0) {
db?.close();
if (useColors) {
return `\n${colors.bright}${colors.cyan}📝 [${project}] recent context${colors.reset}\n${colors.gray}${'─'.repeat(60)}${colors.reset}\n\n${colors.dim}No previous sessions found for this project yet.${colors.reset}\n`;
}
return `# [${project}] recent context\n\nNo previous sessions found for this project yet.`;
}
const displaySummaries = recentSummaries.slice(0, config.sessionCount);
// All filtered observations are shown in timeline
const timelineObs = observations;
// Build output
const output: string[] = [];
// Header
if (useColors) {
output.push('');
output.push(`${colors.bright}${colors.cyan}📝 [${project}] recent context${colors.reset}`);
output.push(`${colors.gray}${'─'.repeat(60)}${colors.reset}`);
output.push('');
} else {
output.push(`# [${project}] recent context`);
output.push('');
}
// Chronological Timeline
if (timelineObs.length > 0) {
// Legend/Key
if (useColors) {
output.push(`${colors.dim}Legend: 🎯 session-request | 🔴 bugfix | 🟣 feature | 🔄 refactor | ✅ change | 🔵 discovery | ⚖️ decision${colors.reset}`);
} else {
output.push(`**Legend:** 🎯 session-request | 🔴 bugfix | 🟣 feature | 🔄 refactor | ✅ change | 🔵 discovery | ⚖️ decision`);
}
output.push('');
// Column Key
if (useColors) {
output.push(`${colors.bright}💡 Column Key${colors.reset}`);
output.push(`${colors.dim} Read: Tokens to read this observation (cost to learn it now)${colors.reset}`);
output.push(`${colors.dim} Work: Tokens spent on work that produced this record (🔍 research, 🛠️ building, ⚖️ deciding)${colors.reset}`);
} else {
output.push(`💡 **Column Key**:`);
output.push(`- **Read**: Tokens to read this observation (cost to learn it now)`);
output.push(`- **Work**: Tokens spent on work that produced this record (🔍 research, 🛠️ building, ⚖️ deciding)`);
}
output.push('');
// Context Index Usage Instructions
if (useColors) {
output.push(`${colors.dim}💡 Context Index: This semantic index (titles, types, files, tokens) is usually sufficient to understand past work.${colors.reset}`);
output.push('');
output.push(`${colors.dim}When you need implementation details, rationale, or debugging context:${colors.reset}`);
output.push(`${colors.dim} - Use the mem-search skill to fetch full observations on-demand${colors.reset}`);
output.push(`${colors.dim} - Critical types (🔴 bugfix, ⚖️ decision) often need detailed fetching${colors.reset}`);
output.push(`${colors.dim} - Trust this index over re-reading code for past decisions and learnings${colors.reset}`);
} else {
output.push(`💡 **Context Index:** This semantic index (titles, types, files, tokens) is usually sufficient to understand past work.`);
output.push('');
output.push(`When you need implementation details, rationale, or debugging context:`);
output.push(`- Use the mem-search skill to fetch full observations on-demand`);
output.push(`- Critical types (🔴 bugfix, ⚖️ decision) often need detailed fetching`);
output.push(`- Trust this index over re-reading code for past decisions and learnings`);
}
output.push('');
// Section 1: Aggregate ROI Metrics
const totalObservations = observations.length;
const totalReadTokens = observations.reduce((sum, obs) => {
// Estimate read tokens from observation size
const obsSize = (obs.title?.length || 0) +
(obs.subtitle?.length || 0) +
(obs.narrative?.length || 0) +
JSON.stringify(obs.facts || []).length;
return sum + Math.ceil(obsSize / CHARS_PER_TOKEN_ESTIMATE);
}, 0);
const totalDiscoveryTokens = observations.reduce((sum, obs) => sum + (obs.discovery_tokens || 0), 0);
const savings = totalDiscoveryTokens - totalReadTokens;
const savingsPercent = totalDiscoveryTokens > 0
? Math.round((savings / totalDiscoveryTokens) * 100)
: 0;
// Display Context Economics section only if at least one token setting is enabled
const showContextEconomics = config.showReadTokens || config.showWorkTokens ||
config.showSavingsAmount || config.showSavingsPercent;
if (showContextEconomics) {
if (useColors) {
output.push(`${colors.bright}${colors.cyan}📊 Context Economics${colors.reset}`);
output.push(`${colors.dim} Loading: ${totalObservations} observations (${totalReadTokens.toLocaleString()} tokens to read)${colors.reset}`);
output.push(`${colors.dim} Work investment: ${totalDiscoveryTokens.toLocaleString()} tokens spent on research, building, and decisions${colors.reset}`);
if (totalDiscoveryTokens > 0 && (config.showSavingsAmount || config.showSavingsPercent)) {
let savingsLine = ' Your savings: ';
if (config.showSavingsAmount && config.showSavingsPercent) {
savingsLine += `${savings.toLocaleString()} tokens (${savingsPercent}% reduction from reuse)`;
} else if (config.showSavingsAmount) {
savingsLine += `${savings.toLocaleString()} tokens`;
} else {
savingsLine += `${savingsPercent}% reduction from reuse`;
}
output.push(`${colors.green}${savingsLine}${colors.reset}`);
}
output.push('');
} else {
output.push(`📊 **Context Economics**:`);
output.push(`- Loading: ${totalObservations} observations (${totalReadTokens.toLocaleString()} tokens to read)`);
output.push(`- Work investment: ${totalDiscoveryTokens.toLocaleString()} tokens spent on research, building, and decisions`);
if (totalDiscoveryTokens > 0 && (config.showSavingsAmount || config.showSavingsPercent)) {
let savingsLine = '- Your savings: ';
if (config.showSavingsAmount && config.showSavingsPercent) {
savingsLine += `${savings.toLocaleString()} tokens (${savingsPercent}% reduction from reuse)`;
} else if (config.showSavingsAmount) {
savingsLine += `${savings.toLocaleString()} tokens`;
} else {
savingsLine += `${savingsPercent}% reduction from reuse`;
}
output.push(savingsLine);
}
output.push('');
}
}
// Prepare summaries for timeline display
// The most recent summary shows full details (investigated, learned, etc.)
// Older summaries only show as timeline markers (no link needed)
const mostRecentSummaryId = recentSummaries[0]?.id;
interface SummaryTimelineItem extends SessionSummary {
displayEpoch: number;
displayTime: string;
shouldShowLink: boolean;
}
const summariesForTimeline: SummaryTimelineItem[] = displaySummaries.map((summary, i) => {
// For visual grouping, display each summary at the time range it covers
// Most recent: shows at its own time (current session)
// Older: shows at the previous (older) summary's time to mark the session range
const olderSummary = i === 0 ? null : recentSummaries[i + 1];
return {
...summary,
displayEpoch: olderSummary ? olderSummary.created_at_epoch : summary.created_at_epoch,
displayTime: olderSummary ? olderSummary.created_at : summary.created_at,
shouldShowLink: summary.id !== mostRecentSummaryId
};
});
// Identify which observations should show full details (most recent N)
const fullObservationIds = new Set(
observations
.slice(0, config.fullObservationCount)
.map(obs => obs.id)
);
type TimelineItem =
| { type: 'observation'; data: Observation }
| { type: 'summary'; data: SummaryTimelineItem };
const timeline: TimelineItem[] = [
...timelineObs.map(obs => ({ type: 'observation' as const, data: obs })),
...summariesForTimeline.map(summary => ({ type: 'summary' as const, data: summary }))
];
// Sort chronologically
timeline.sort((a, b) => {
const aEpoch = a.type === 'observation' ? a.data.created_at_epoch : a.data.displayEpoch;
const bEpoch = b.type === 'observation' ? b.data.created_at_epoch : b.data.displayEpoch;
return aEpoch - bEpoch;
});
// Group by day for rendering
const itemsByDay = new Map<string, TimelineItem[]>();
for (const item of timeline) {
const itemDate = item.type === 'observation' ? item.data.created_at : item.data.displayTime;
const day = formatDate(itemDate);
if (!itemsByDay.has(day)) {
itemsByDay.set(day, []);
}
itemsByDay.get(day)!.push(item);
}
// Sort days chronologically
const sortedDays = Array.from(itemsByDay.entries()).sort((a, b) => {
const aDate = new Date(a[0]).getTime();
const bDate = new Date(b[0]).getTime();
return aDate - bDate;
});
// Render each day's timeline
for (const [day, dayItems] of sortedDays) {
// Day header
if (useColors) {
output.push(`${colors.bright}${colors.cyan}${day}${colors.reset}`);
output.push('');
} else {
output.push(`### ${day}`);
output.push('');
}
// Render items chronologically with visual file grouping
let currentFile: string | null = null;
let lastTime = '';
let tableOpen = false;
for (const item of dayItems) {
if (item.type === 'summary') {
// Close any open table
if (tableOpen) {
output.push('');
tableOpen = false;
currentFile = null;
lastTime = '';
}
// Render summary
const summary = item.data;
const summaryTitle = `${summary.request || 'Session started'} (${formatDateTime(summary.displayTime)})`;
const link = summary.shouldShowLink ? `claude-mem://session-summary/${summary.id}` : '';
if (useColors) {
const linkPart = link ? `${colors.dim}[${link}]${colors.reset}` : '';
output.push(`🎯 ${colors.yellow}#S${summary.id}${colors.reset} ${summaryTitle} ${linkPart}`);
} else {
const linkPart = link ? ` [→](${link})` : '';
output.push(`**🎯 #S${summary.id}** ${summaryTitle}${linkPart}`);
}
output.push('');
} else {
// Render observation
const obs = item.data;
const files = parseJsonArray(obs.files_modified);
const file = (files.length > 0 && files[0]) ? toRelativePath(files[0], cwd) : 'General';
// Check if we need a new file section
if (file !== currentFile) {
// Close previous table
if (tableOpen) {
output.push('');
}
// File header
if (useColors) {
output.push(`${colors.dim}${file}${colors.reset}`);
} else {
output.push(`**${file}**`);
}
// Table header (markdown only)
if (!useColors) {
output.push(`| ID | Time | T | Title | Read | Work |`);
output.push(`|----|------|---|-------|------|------|`);
}
currentFile = file;
tableOpen = true;
lastTime = '';
}
const time = formatTime(obs.created_at);
const title = obs.title || 'Untitled';
// Map observation type to emoji icon
const icon = TYPE_ICON_MAP[obs.type as keyof typeof TYPE_ICON_MAP] || '•';
// Section 2: Calculate read tokens (estimate from observation size)
const obsSize = (obs.title?.length || 0) +
(obs.subtitle?.length || 0) +
(obs.narrative?.length || 0) +
JSON.stringify(obs.facts || []).length;
const readTokens = Math.ceil(obsSize / CHARS_PER_TOKEN_ESTIMATE);
// Get discovery tokens (handle old observations without this field)
const discoveryTokens = obs.discovery_tokens || 0;
// Map observation type to work emoji
const workEmoji = TYPE_WORK_EMOJI_MAP[obs.type as keyof typeof TYPE_WORK_EMOJI_MAP] || '🔍';
const discoveryDisplay = discoveryTokens > 0 ? `${workEmoji} ${discoveryTokens.toLocaleString()}` : '-';
const showTime = time !== lastTime;
const timeDisplay = showTime ? time : '';
lastTime = time;
// Check if this observation should show full details
const shouldShowFull = fullObservationIds.has(obs.id);
if (shouldShowFull) {
// Render with full details (narrative or facts)
const detailField = config.fullObservationField === 'narrative'
? obs.narrative
: (obs.facts ? parseJsonArray(obs.facts).join('\n') : null);
if (useColors) {
const timePart = showTime ? `${colors.dim}${time}${colors.reset}` : ' '.repeat(time.length);
const readPart = (config.showReadTokens && readTokens > 0) ? `${colors.dim}(~${readTokens}t)${colors.reset}` : '';
const discoveryPart = (config.showWorkTokens && discoveryTokens > 0) ? `${colors.dim}(${workEmoji} ${discoveryTokens.toLocaleString()}t)${colors.reset}` : '';
output.push(` ${colors.dim}#${obs.id}${colors.reset} ${timePart} ${icon} ${colors.bright}${title}${colors.reset}`);
if (detailField) {
output.push(` ${colors.dim}${detailField}${colors.reset}`);
}
if (readPart || discoveryPart) {
output.push(` ${readPart} ${discoveryPart}`);
}
output.push('');
} else {
// Close table for full observation
if (tableOpen) {
output.push('');
tableOpen = false;
}
output.push(`**#${obs.id}** ${timeDisplay || '″'} ${icon} **${title}**`);
if (detailField) {
output.push('');
output.push(detailField);
output.push('');
}
const tokenParts: string[] = [];
if (config.showReadTokens) {
tokenParts.push(`Read: ~${readTokens}`);
}
if (config.showWorkTokens) {
tokenParts.push(`Work: ${discoveryDisplay}`);
}
if (tokenParts.length > 0) {
output.push(tokenParts.join(', '));
}
output.push('');
// Reopen table for next items if in same file
currentFile = null;
}
} else {
// Compact index rendering (existing code)
if (useColors) {
const timePart = showTime ? `${colors.dim}${time}${colors.reset}` : ' '.repeat(time.length);
const readPart = (config.showReadTokens && readTokens > 0) ? `${colors.dim}(~${readTokens}t)${colors.reset}` : '';
const discoveryPart = (config.showWorkTokens && discoveryTokens > 0) ? `${colors.dim}(${workEmoji} ${discoveryTokens.toLocaleString()}t)${colors.reset}` : '';
output.push(` ${colors.dim}#${obs.id}${colors.reset} ${timePart} ${icon} ${title} ${readPart} ${discoveryPart}`);
} else {
const readCol = config.showReadTokens ? `~${readTokens}` : '';
const workCol = config.showWorkTokens ? discoveryDisplay : '';
output.push(`| #${obs.id} | ${timeDisplay || '″'} | ${icon} | ${title} | ${readCol} | ${workCol} |`);
}
}
}
}
// Close final table if open
if (tableOpen) {
output.push('');
}
}
// Add full summary details for most recent session
// Only show if summary was generated AFTER the last observation
const mostRecentSummary = recentSummaries[0];
const mostRecentObservation = observations[0]; // observations are DESC by created_at_epoch
const shouldShowSummary = config.showLastSummary &&
mostRecentSummary &&
(mostRecentSummary.investigated || mostRecentSummary.learned || mostRecentSummary.completed || mostRecentSummary.next_steps) &&
(!mostRecentObservation || mostRecentSummary.created_at_epoch > mostRecentObservation.created_at_epoch);
if (shouldShowSummary) {
output.push(...renderSummaryField('Investigated', mostRecentSummary.investigated, colors.blue, useColors));
output.push(...renderSummaryField('Learned', mostRecentSummary.learned, colors.yellow, useColors));
output.push(...renderSummaryField('Completed', mostRecentSummary.completed, colors.green, useColors));
output.push(...renderSummaryField('Next Steps', mostRecentSummary.next_steps, colors.magenta, useColors));
}
// Previously section (last assistant message from prior session) - positioned at bottom for chronological sense
if (priorAssistantMessage) {
output.push('');
output.push('---');
output.push('');
if (useColors) {
output.push(`${colors.bright}${colors.magenta}📋 Previously${colors.reset}`);
output.push('');
output.push(`${colors.dim}A: ${priorAssistantMessage}${colors.reset}`);
} else {
output.push(`**📋 Previously**`);
output.push('');
output.push(`A: ${priorAssistantMessage}`);
}
output.push('');
}
// Footer with token savings message (only show if token economics is visible)
if (showContextEconomics && totalDiscoveryTokens > 0 && savings > 0) {
const workTokensK = Math.round(totalDiscoveryTokens / 1000);
output.push('');
if (useColors) {
output.push(`${colors.dim}💰 Access ${workTokensK}k tokens of past research & decisions for just ${totalReadTokens.toLocaleString()}t. Use the mem-search skill to access memories by ID instead of re-reading files.${colors.reset}`);
} else {
output.push(`💰 Access ${workTokensK}k tokens of past research & decisions for just ${totalReadTokens.toLocaleString()}t. Use the mem-search skill to access memories by ID instead of re-reading files.`);
}
}
}
db?.close();
// Add debug info directly to output
// if (debugInfo.length > 0) {
// output.push('');
// output.push('---');
// output.push('');
// output.push(...debugInfo);
// }
return output.join('\n').trimEnd();
return false;
}
// Export for use by worker service
export { contextHook };
async function contextHook(input?: SessionStartInput): Promise<string> {
const cwd = input?.cwd ?? process.cwd();
const project = cwd ? path.basename(cwd) : "unknown-project";
const port = getWorkerPort();
// Wait for worker to be available
const isAvailable = await waitForPort(port);
if (!isAvailable) {
throw new Error(
`Worker service not available on port ${port} after 10s. Try: npm run worker:restart`
);
}
const url = `http://127.0.0.1:${port}/api/context/inject?project=${encodeURIComponent(project)}`;
const result = execSync(`curl -s "${url}"`, { encoding: "utf-8", timeout: 5000 });
return result.trim();
}
// Entry Point - handle stdin/stdout
const forceColors = process.argv.includes('--colors');
const forceColors = process.argv.includes("--colors");
if (stdin.isTTY || forceColors) {
// Running manually from terminal - print formatted output with colors
contextHook(undefined, true).then(contextOutput => {
console.log(contextOutput);
contextHook(undefined).then((text) => {
console.log(text);
process.exit(0);
});
} else {
// Running from hook - wrap in hookSpecificOutput JSON format
let input = '';
stdin.on('data', (chunk) => input += chunk);
stdin.on('end', async () => {
let input = "";
stdin.on("data", (chunk) => (input += chunk));
stdin.on("end", async () => {
const parsed = input.trim() ? JSON.parse(input) : undefined;
const contextOutput = await contextHook(parsed, false);
const result = {
hookSpecificOutput: {
hookEventName: "SessionStart",
additionalContext: contextOutput
}
};
console.log(JSON.stringify(result));
const text = await contextHook(parsed);
console.log(
JSON.stringify({
hookSpecificOutput: {
hookEventName: "SessionStart",
additionalContext: text,
},
})
);
process.exit(0);
});
}
}
+12 -63
View File
@@ -1,15 +1,15 @@
/**
* Save Hook - PostToolUse
* Consolidated entry point + logic
*
* Pure HTTP client - sends data to worker, worker handles all database operations
* including privacy checks. This allows the hook to run under any runtime
* (Node.js or Bun) since it has no native module dependencies.
*/
import { stdin } from 'process';
import { SessionStore } from '../services/sqlite/SessionStore.js';
import { createHookResponse } from './hook-response.js';
import { logger } from '../utils/logger.js';
import { ensureWorkerRunning, getWorkerPort } from '../shared/worker-utils.js';
import { silentDebug } from '../utils/silent-debug.js';
import { stripMemoryTagsFromJson } from '../utils/tag-stripping.js';
export interface PostToolUseInput {
session_id: string;
@@ -29,9 +29,8 @@ const SKIP_TOOLS = new Set([
'AskUserQuestion' // User interaction, not substantive work
]);
/**
* Save Hook Main Logic
* Save Hook Main Logic - Fire-and-forget HTTP client
*/
async function saveHook(input?: PostToolUseInput): Promise<void> {
if (!input) {
@@ -48,72 +47,24 @@ async function saveHook(input?: PostToolUseInput): Promise<void> {
// Ensure worker is running
await ensureWorkerRunning();
const db = new SessionStore();
// Get or create session
const sessionDbId = db.createSDKSession(session_id, '', '');
const promptNumber = db.getPromptCounter(sessionDbId);
// Skip observation if user prompt was entirely private
// This respects the user's intent: if they marked the entire prompt as <private>,
// they don't want ANY observations from that interaction
const userPrompt = db.getUserPrompt(session_id, promptNumber);
if (!userPrompt || userPrompt.trim() === '') {
silentDebug('[save-hook] Skipping observation - user prompt was entirely private', {
session_id,
promptNumber,
tool_name
});
db.close();
console.log(createHookResponse('PostToolUse', true));
return;
}
db.close();
const port = getWorkerPort();
const toolStr = logger.formatTool(tool_name, tool_input);
const port = getWorkerPort();
logger.dataIn('HOOK', `PostToolUse: ${toolStr}`, {
sessionId: sessionDbId,
workerPort: port
});
try {
// Serialize and strip memory tags from tool_input and tool_response
// This prevents recursive storage of context and respects <private> tags
let cleanedToolInput = '{}';
let cleanedToolResponse = '{}';
try {
cleanedToolInput = tool_input !== undefined
? stripMemoryTagsFromJson(JSON.stringify(tool_input))
: '{}';
} catch (error) {
// Handle circular references or other JSON.stringify errors
silentDebug('[save-hook] Failed to stringify tool_input:', { error, tool_name });
cleanedToolInput = '{"error": "Failed to serialize tool_input"}';
}
try {
cleanedToolResponse = tool_response !== undefined
? stripMemoryTagsFromJson(JSON.stringify(tool_response))
: '{}';
} catch (error) {
// Handle circular references or other JSON.stringify errors
silentDebug('[save-hook] Failed to stringify tool_response:', { error, tool_name });
cleanedToolResponse = '{"error": "Failed to serialize tool_response"}';
}
const response = await fetch(`http://127.0.0.1:${port}/sessions/${sessionDbId}/observations`, {
// Send to worker - worker handles privacy check and database operations
const response = await fetch(`http://127.0.0.1:${port}/api/sessions/observations`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
claudeSessionId: session_id,
tool_name,
tool_input: cleanedToolInput,
tool_response: cleanedToolResponse,
prompt_number: promptNumber,
tool_input,
tool_response,
cwd: cwd || ''
}),
signal: AbortSignal.timeout(2000)
@@ -122,19 +73,17 @@ async function saveHook(input?: PostToolUseInput): Promise<void> {
if (!response.ok) {
const errorText = await response.text();
logger.failure('HOOK', 'Failed to send observation', {
sessionId: sessionDbId,
status: response.status
}, errorText);
throw new Error(`Failed to send observation to worker: ${response.status} ${errorText}`);
}
logger.debug('HOOK', 'Observation sent successfully', { sessionId: sessionDbId, toolName: tool_name });
logger.debug('HOOK', 'Observation sent successfully', { toolName: tool_name });
} catch (error: any) {
// Only show restart message for connection errors, not HTTP errors
if (error.cause?.code === 'ECONNREFUSED' || error.name === 'TimeoutError' || error.message.includes('fetch failed')) {
throw new Error("There's a problem with the worker. If you just updated, type `pm2 restart claude-mem-worker` in your terminal to continue");
}
// Re-throw HTTP errors and other errors as-is
throw error;
}
+15 -64
View File
@@ -1,15 +1,19 @@
/**
* Summary Hook - Stop
* Consolidated entry point + logic
*
* Pure HTTP client - sends data to worker, worker handles all database operations
* including privacy checks. This allows the hook to run under any runtime
* (Node.js or Bun) since it has no native module dependencies.
*
* Transcript parsing stays in the hook because only the hook has access to
* the transcript file path.
*/
import { stdin } from 'process';
import { readFileSync, existsSync } from 'fs';
import { SessionStore } from '../services/sqlite/SessionStore.js';
import { createHookResponse } from './hook-response.js';
import { logger } from '../utils/logger.js';
import { ensureWorkerRunning, getWorkerPort } from '../shared/worker-utils.js';
import { silentDebug } from '../utils/silent-debug.js';
export interface StopInput {
session_id: string;
@@ -123,7 +127,7 @@ function extractLastAssistantMessage(transcriptPath: string): string {
}
/**
* Summary Hook Main Logic
* Summary Hook Main Logic - Fire-and-forget HTTP client
*/
async function summaryHook(input?: StopInput): Promise<void> {
if (!input) {
@@ -135,77 +139,25 @@ async function summaryHook(input?: StopInput): Promise<void> {
// Ensure worker is running
await ensureWorkerRunning();
const db = new SessionStore();
// Get or create session
const sessionDbId = db.createSDKSession(session_id, '', '');
const promptNumber = db.getPromptCounter(sessionDbId);
// Skip summary if user prompt was entirely private
// This respects the user's intent: if they marked the entire prompt as <private>,
// they don't want ANY memory operations including summaries
const userPrompt = db.getUserPrompt(session_id, promptNumber);
if (!userPrompt || userPrompt.trim() === '') {
silentDebug('[summary-hook] Skipping summary - user prompt was entirely private', {
session_id,
promptNumber
});
db.close();
console.log(createHookResponse('Stop', true));
return;
}
// DIAGNOSTIC: Check session and observations
const sessionInfo = db.db.prepare(`
SELECT id, claude_session_id, sdk_session_id, project
FROM sdk_sessions WHERE id = ?
`).get(sessionDbId) as any;
const obsCount = db.db.prepare(`
SELECT COUNT(*) as count
FROM observations
WHERE sdk_session_id = ?
`).get(sessionInfo?.sdk_session_id) as { count: number };
silentDebug('[summary-hook] Session diagnostics', {
claudeSessionId: session_id,
sessionDbId,
sdkSessionId: sessionInfo?.sdk_session_id,
project: sessionInfo?.project,
promptNumber,
observationCount: obsCount?.count || 0,
transcriptPath: input.transcript_path
});
db.close();
const port = getWorkerPort();
// Extract last user AND assistant messages from transcript
const lastUserMessage = extractLastUserMessage(input.transcript_path || '');
const lastAssistantMessage = extractLastAssistantMessage(input.transcript_path || '');
silentDebug('[summary-hook] Extracted messages', {
hasLastUserMessage: !!lastUserMessage,
hasLastAssistantMessage: !!lastAssistantMessage,
lastAssistantPreview: lastAssistantMessage.substring(0, 200),
lastAssistantLength: lastAssistantMessage.length
});
logger.dataIn('HOOK', 'Stop: Requesting summary', {
sessionId: sessionDbId,
workerPort: port,
promptNumber,
hasLastUserMessage: !!lastUserMessage,
hasLastAssistantMessage: !!lastAssistantMessage
});
try {
const response = await fetch(`http://127.0.0.1:${port}/sessions/${sessionDbId}/summarize`, {
// Send to worker - worker handles privacy check and database operations
const response = await fetch(`http://127.0.0.1:${port}/api/sessions/summarize`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
prompt_number: promptNumber,
claudeSessionId: session_id,
last_user_message: lastUserMessage,
last_assistant_message: lastAssistantMessage
}),
@@ -215,26 +167,25 @@ async function summaryHook(input?: StopInput): Promise<void> {
if (!response.ok) {
const errorText = await response.text();
logger.failure('HOOK', 'Failed to generate summary', {
sessionId: sessionDbId,
status: response.status
}, errorText);
throw new Error(`Failed to request summary from worker: ${response.status} ${errorText}`);
}
logger.debug('HOOK', 'Summary request sent successfully', { sessionId: sessionDbId });
logger.debug('HOOK', 'Summary request sent successfully');
} catch (error: any) {
// Only show restart message for connection errors, not HTTP errors
if (error.cause?.code === 'ECONNREFUSED' || error.name === 'TimeoutError' || error.message.includes('fetch failed')) {
throw new Error("There's a problem with the worker. If you just updated, type `pm2 restart claude-mem-worker` in your terminal to continue");
}
// Re-throw HTTP errors and other errors as-is
throw error;
} finally {
await fetch(`http://127.0.0.1:${port}/api/processing`, {
// Notify worker to stop spinner (fire-and-forget)
fetch(`http://127.0.0.1:${port}/api/processing`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ isProcessing: false })
});
}).catch(() => {});
}
console.log(createHookResponse('Stop', true));
+15 -10
View File
@@ -6,8 +6,7 @@
* has been loaded into their session. Uses stderr as the communication channel
* since it's currently the only way to display messages in Claude Code UI.
*/
import { execSync } from "child_process";
import { join } from "path";
import { join, basename } from "path";
import { homedir } from "os";
import { existsSync } from "fs";
import { getWorkerPort } from "../shared/worker-utils.js";
@@ -20,7 +19,7 @@ if (!existsSync(nodeModulesPath)) {
// First-time installation - dependencies not yet installed
console.error(`
---
🎉 Note: This appears under Plugin Hook Error, but it's not an error. That's the only option for
🎉 Note: This appears under Plugin Hook Error, but it's not an error. That's the only option for
user messages in Claude Code UI until a better method is provided.
---
@@ -41,14 +40,20 @@ This message was not added to your startup context, so you can continue working
}
try {
// Cross-platform path to context-hook.js in the installed plugin
const contextHookPath = join(homedir(), '.claude', 'plugins', 'marketplaces', 'thedotmack', 'plugin', 'scripts', 'context-hook.js');
const output = execSync(`node "${contextHookPath}" --colors`, {
encoding: 'utf8',
windowsHide: true
});
const port = getWorkerPort();
const project = basename(process.cwd());
// Fetch formatted context directly from worker API
const response = await fetch(
`http://127.0.0.1:${port}/api/context/inject?project=${encodeURIComponent(project)}&colors=true`,
{ method: 'GET', signal: AbortSignal.timeout(5000) }
);
if (!response.ok) {
throw new Error(`Worker error ${response.status}`);
}
const output = await response.text();
// If it's after Dec 5, 2025 7pm EST, patch this out
const now = new Date();
+445
View File
@@ -0,0 +1,445 @@
/**
* Claude-mem MCP Search Server - Thin HTTP Wrapper
*
* Refactored from 2,718 lines to ~600-800 lines
* Delegates all business logic to Worker HTTP API at localhost:37777
* Maintains MCP protocol handling and tool schemas
*/
import { Server } from '@modelcontextprotocol/sdk/server/index.js';
import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
import {
CallToolRequestSchema,
ListToolsRequestSchema,
} from '@modelcontextprotocol/sdk/types.js';
import { z } from 'zod';
import { zodToJsonSchema } from 'zod-to-json-schema';
import { silentDebug } from '../utils/silent-debug.js';
/**
* Worker HTTP API configuration
*/
const WORKER_PORT = parseInt(process.env.CLAUDE_MEM_WORKER_PORT || '37777', 10);
const WORKER_BASE_URL = `http://localhost:${WORKER_PORT}`;
/**
* Map tool names to Worker HTTP endpoints
*/
const TOOL_ENDPOINT_MAP: Record<string, string> = {
'search': '/api/search',
'timeline': '/api/timeline',
'decisions': '/api/decisions',
'changes': '/api/changes',
'how_it_works': '/api/how-it-works',
'search_observations': '/api/search/observations',
'search_sessions': '/api/search/sessions',
'search_user_prompts': '/api/search/prompts',
'find_by_concept': '/api/search/by-concept',
'find_by_file': '/api/search/by-file',
'find_by_type': '/api/search/by-type',
'get_recent_context': '/api/context/recent',
'get_context_timeline': '/api/context/timeline',
'get_timeline_by_query': '/api/timeline/by-query'
};
/**
* Call Worker HTTP API endpoint
*/
async function callWorkerAPI(
endpoint: string,
params: Record<string, any>
): Promise<{ content: Array<{ type: 'text'; text: string }>; isError?: boolean }> {
silentDebug('[search-server] → Worker API', { endpoint, params });
try {
const searchParams = new URLSearchParams();
// Convert params to query string
for (const [key, value] of Object.entries(params)) {
if (value !== undefined && value !== null) {
searchParams.append(key, String(value));
}
}
const url = `${WORKER_BASE_URL}${endpoint}?${searchParams}`;
const response = await fetch(url);
if (!response.ok) {
const errorText = await response.text();
throw new Error(`Worker API error (${response.status}): ${errorText}`);
}
const data = await response.json() as { content: Array<{ type: 'text'; text: string }>; isError?: boolean };
silentDebug('[search-server] ← Worker API success', { endpoint });
// Worker returns { content: [...] } format directly
return data;
} catch (error: any) {
silentDebug('[search-server] ← Worker API error', { endpoint, error: error.message });
return {
content: [{
type: 'text' as const,
text: `Error calling Worker API: ${error.message}`
}],
isError: true
};
}
}
/**
* Verify Worker is accessible
*/
async function verifyWorkerConnection(): Promise<boolean> {
try {
const response = await fetch(`${WORKER_BASE_URL}/api/health`);
return response.ok;
} catch (error) {
return false;
}
}
/**
* Tool definitions with HTTP-based handlers
*/
const tools = [
{
name: 'search',
description: 'Unified search across all memory types (observations, sessions, and user prompts) using vector-first semantic search (ChromaDB). Returns combined results from all document types. IMPORTANT: Always use index format first (default) to get an overview with minimal token usage, then use format: "full" only for specific items of interest.',
inputSchema: z.object({
query: z.string().optional().describe('Natural language search query for semantic ranking via ChromaDB vector search. Optional - omit for date-filtered queries only (Chroma cannot filter by date, requires direct SQLite).'),
format: z.enum(['index', 'full']).default('index').describe('Output format: "index" for titles/dates only (default, RECOMMENDED for initial search), "full" for complete details (use only after reviewing index results)'),
type: z.enum(['observations', 'sessions', 'prompts']).optional().describe('Filter by document type (observations, sessions, or prompts). Omit to search all types.'),
obs_type: z.string().optional().describe('Filter observations by type (single value or comma-separated list: decision,bugfix,feature,refactor,discovery,change). Only applies when type="observations"'),
concepts: z.string().optional().describe('Filter by concept tags (single value or comma-separated list). Only applies when type="observations"'),
files: z.string().optional().describe('Filter by file paths (single value or comma-separated list for partial match). Only applies when type="observations"'),
project: z.string().optional().describe('Filter by project name'),
dateStart: z.union([z.string(), z.number()]).optional().describe('Start date for filtering (ISO string or epoch timestamp)'),
dateEnd: z.union([z.string(), z.number()]).optional().describe('End date for filtering (ISO string or epoch timestamp)'),
limit: z.number().min(1).max(100).default(20).describe('Maximum number of results'),
offset: z.number().min(0).default(0).describe('Number of results to skip'),
orderBy: z.enum(['relevance', 'date_desc', 'date_asc']).default('date_desc').describe('Sort order')
}),
handler: async (args: any) => {
const endpoint = TOOL_ENDPOINT_MAP['search'];
return await callWorkerAPI(endpoint, args);
}
},
{
name: 'timeline',
description: 'Fetch timeline of observations around a specific point in time. Supports two modes: anchor-based (fetch observations before/after a specific observation ID) and query-based (semantic search for anchor point). IMPORTANT: Use anchor_id when you know the specific observation, or query to find an anchor point first.',
inputSchema: z.object({
query: z.string().optional().describe('Natural language query to find anchor observation (query-based mode). Mutually exclusive with anchor_id.'),
anchor_id: z.number().optional().describe('Observation ID to use as anchor (anchor-based mode). Mutually exclusive with query.'),
before: z.number().min(0).max(100).default(10).describe('Number of observations to fetch before anchor'),
after: z.number().min(0).max(100).default(10).describe('Number of observations to fetch after anchor'),
format: z.enum(['index', 'full']).default('index').describe('Output format: "index" for titles/dates only (default, RECOMMENDED), "full" for complete details'),
obs_type: z.string().optional().describe('Filter observations by type (single value or comma-separated list: decision,bugfix,feature,refactor,discovery,change)'),
concepts: z.string().optional().describe('Filter by concept tags (single value or comma-separated list)'),
files: z.string().optional().describe('Filter by file paths (single value or comma-separated list for partial match)'),
project: z.string().optional().describe('Filter by project name')
}),
handler: async (args: any) => {
const endpoint = TOOL_ENDPOINT_MAP['timeline'];
return await callWorkerAPI(endpoint, args);
}
},
{
name: 'decisions',
description: 'Semantic shortcut for finding architectural, design, and implementation decisions. Optimized for decision-type observations with relevant keyword boosting.',
inputSchema: z.object({
query: z.string().describe('Natural language query for finding decisions'),
format: z.enum(['index', 'full']).default('index').describe('Output format: "index" for titles/dates only (default, RECOMMENDED), "full" for complete details'),
limit: z.number().min(1).max(100).default(20).describe('Maximum number of results'),
dateStart: z.union([z.string(), z.number()]).optional().describe('Start date for filtering (ISO string or epoch timestamp)'),
dateEnd: z.union([z.string(), z.number()]).optional().describe('End date for filtering (ISO string or epoch timestamp)')
}),
handler: async (args: any) => {
const endpoint = TOOL_ENDPOINT_MAP['decisions'];
return await callWorkerAPI(endpoint, args);
}
},
{
name: 'changes',
description: 'Semantic shortcut for finding code changes, refactorings, and modifications. Optimized for change-type observations with relevant keyword boosting.',
inputSchema: z.object({
query: z.string().describe('Natural language query for finding changes'),
format: z.enum(['index', 'full']).default('index').describe('Output format: "index" for titles/dates only (default, RECOMMENDED), "full" for complete details'),
limit: z.number().min(1).max(100).default(20).describe('Maximum number of results'),
dateStart: z.union([z.string(), z.number()]).optional().describe('Start date for filtering (ISO string or epoch timestamp)'),
dateEnd: z.union([z.string(), z.number()]).optional().describe('End date for filtering (ISO string or epoch timestamp)')
}),
handler: async (args: any) => {
const endpoint = TOOL_ENDPOINT_MAP['changes'];
return await callWorkerAPI(endpoint, args);
}
},
{
name: 'how_it_works',
description: 'Semantic shortcut for understanding system architecture, design patterns, and implementation details. Optimized for discovery-type observations with architecture/design keyword boosting.',
inputSchema: z.object({
query: z.string().describe('Natural language query for understanding how something works'),
format: z.enum(['index', 'full']).default('index').describe('Output format: "index" for titles/dates only (default, RECOMMENDED), "full" for complete details'),
limit: z.number().min(1).max(100).default(20).describe('Maximum number of results'),
dateStart: z.union([z.string(), z.number()]).optional().describe('Start date for filtering (ISO string or epoch timestamp)'),
dateEnd: z.union([z.string(), z.number()]).optional().describe('End date for filtering (ISO string or epoch timestamp)')
}),
handler: async (args: any) => {
const endpoint = TOOL_ENDPOINT_MAP['how_it_works'];
return await callWorkerAPI(endpoint, args);
}
},
{
name: 'search_observations',
description: '[DEPRECATED - Use "search" with type="observations" instead] Search observations (facts/narratives) using FTS5 full-text search. Supports filtering by type, concepts, files, and date range.',
inputSchema: z.object({
query: z.string().optional().describe('Full-text search query (FTS5)'),
format: z.enum(['index', 'full']).default('index').describe('Output format: "index" for titles/dates only (default, RECOMMENDED), "full" for complete details'),
type: z.string().optional().describe('Filter by observation type (single value or comma-separated list: decision,bugfix,feature,refactor,discovery,change)'),
concepts: z.string().optional().describe('Filter by concept tags (single value or comma-separated list)'),
files: z.string().optional().describe('Filter by file paths (single value or comma-separated list for partial match)'),
project: z.string().optional().describe('Filter by project name'),
dateStart: z.union([z.string(), z.number()]).optional().describe('Start date for filtering (ISO string or epoch timestamp)'),
dateEnd: z.union([z.string(), z.number()]).optional().describe('End date for filtering (ISO string or epoch timestamp)'),
limit: z.number().min(1).max(100).default(20).describe('Maximum number of results'),
offset: z.number().min(0).default(0).describe('Number of results to skip'),
orderBy: z.enum(['relevance', 'date_desc', 'date_asc']).default('relevance').describe('Sort order (relevance only when query provided)')
}),
handler: async (args: any) => {
const endpoint = TOOL_ENDPOINT_MAP['search_observations'];
return await callWorkerAPI(endpoint, args);
}
},
{
name: 'search_sessions',
description: '[DEPRECATED - Use "search" with type="sessions" instead] Search session summaries using FTS5 full-text search. Returns both request_summary and learned_summary fields.',
inputSchema: z.object({
query: z.string().optional().describe('Full-text search query (FTS5)'),
format: z.enum(['index', 'full']).default('index').describe('Output format: "index" for titles/dates only (default, RECOMMENDED), "full" for complete details'),
project: z.string().optional().describe('Filter by project name'),
dateStart: z.union([z.string(), z.number()]).optional().describe('Start date for filtering (ISO string or epoch timestamp)'),
dateEnd: z.union([z.string(), z.number()]).optional().describe('End date for filtering (ISO string or epoch timestamp)'),
limit: z.number().min(1).max(100).default(20).describe('Maximum number of results'),
offset: z.number().min(0).default(0).describe('Number of results to skip'),
orderBy: z.enum(['relevance', 'date_desc', 'date_asc']).default('relevance').describe('Sort order (relevance only when query provided)')
}),
handler: async (args: any) => {
const endpoint = TOOL_ENDPOINT_MAP['search_sessions'];
return await callWorkerAPI(endpoint, args);
}
},
{
name: 'search_user_prompts',
description: '[DEPRECATED - Use "search" with type="prompts" instead] Search user prompts using FTS5 full-text search. Searches prompt text only.',
inputSchema: z.object({
query: z.string().optional().describe('Full-text search query (FTS5)'),
format: z.enum(['index', 'full']).default('index').describe('Output format: "index" for titles/dates only (default, RECOMMENDED), "full" for complete details'),
project: z.string().optional().describe('Filter by project name'),
dateStart: z.union([z.string(), z.number()]).optional().describe('Start date for filtering (ISO string or epoch timestamp)'),
dateEnd: z.union([z.string(), z.number()]).optional().describe('End date for filtering (ISO string or epoch timestamp)'),
limit: z.number().min(1).max(100).default(20).describe('Maximum number of results'),
offset: z.number().min(0).default(0).describe('Number of results to skip'),
orderBy: z.enum(['relevance', 'date_desc', 'date_asc']).default('relevance').describe('Sort order (relevance only when query provided)')
}),
handler: async (args: any) => {
const endpoint = TOOL_ENDPOINT_MAP['search_user_prompts'];
return await callWorkerAPI(endpoint, args);
}
},
{
name: 'find_by_concept',
description: 'Find observations tagged with specific concepts. Returns observations that match any of the provided concept tags.',
inputSchema: z.object({
concepts: z.string().describe('Concept tag(s) to filter by (single value or comma-separated list)'),
format: z.enum(['index', 'full']).default('index').describe('Output format: "index" for titles/dates only (default, RECOMMENDED), "full" for complete details'),
type: z.string().optional().describe('Filter by observation type (single value or comma-separated list: decision,bugfix,feature,refactor,discovery,change)'),
files: z.string().optional().describe('Filter by file paths (single value or comma-separated list for partial match)'),
project: z.string().optional().describe('Filter by project name'),
dateStart: z.union([z.string(), z.number()]).optional().describe('Start date for filtering (ISO string or epoch timestamp)'),
dateEnd: z.union([z.string(), z.number()]).optional().describe('End date for filtering (ISO string or epoch timestamp)'),
limit: z.number().min(1).max(100).default(20).describe('Maximum number of results'),
offset: z.number().min(0).default(0).describe('Number of results to skip'),
orderBy: z.enum(['date_desc', 'date_asc']).default('date_desc').describe('Sort order')
}),
handler: async (args: any) => {
const endpoint = TOOL_ENDPOINT_MAP['find_by_concept'];
return await callWorkerAPI(endpoint, args);
}
},
{
name: 'find_by_file',
description: 'Find observations related to specific file paths. Uses partial matching - searches for file paths containing the provided string.',
inputSchema: z.object({
files: z.string().describe('File path(s) to filter by (single value or comma-separated list for partial match)'),
format: z.enum(['index', 'full']).default('index').describe('Output format: "index" for titles/dates only (default, RECOMMENDED), "full" for complete details'),
type: z.string().optional().describe('Filter by observation type (single value or comma-separated list: decision,bugfix,feature,refactor,discovery,change)'),
concepts: z.string().optional().describe('Filter by concept tags (single value or comma-separated list)'),
project: z.string().optional().describe('Filter by project name'),
dateStart: z.union([z.string(), z.number()]).optional().describe('Start date for filtering (ISO string or epoch timestamp)'),
dateEnd: z.union([z.string(), z.number()]).optional().describe('End date for filtering (ISO string or epoch timestamp)'),
limit: z.number().min(1).max(100).default(20).describe('Maximum number of results'),
offset: z.number().min(0).default(0).describe('Number of results to skip'),
orderBy: z.enum(['date_desc', 'date_asc']).default('date_desc').describe('Sort order')
}),
handler: async (args: any) => {
const endpoint = TOOL_ENDPOINT_MAP['find_by_file'];
return await callWorkerAPI(endpoint, args);
}
},
{
name: 'find_by_type',
description: 'Find observations of specific types. Returns observations matching any of the provided observation types.',
inputSchema: z.object({
type: z.string().describe('Observation type(s) to filter by (single value or comma-separated list: decision,bugfix,feature,refactor,discovery,change)'),
format: z.enum(['index', 'full']).default('index').describe('Output format: "index" for titles/dates only (default, RECOMMENDED), "full" for complete details'),
concepts: z.string().optional().describe('Filter by concept tags (single value or comma-separated list)'),
files: z.string().optional().describe('Filter by file paths (single value or comma-separated list for partial match)'),
project: z.string().optional().describe('Filter by project name'),
dateStart: z.union([z.string(), z.number()]).optional().describe('Start date for filtering (ISO string or epoch timestamp)'),
dateEnd: z.union([z.string(), z.number()]).optional().describe('End date for filtering (ISO string or epoch timestamp)'),
limit: z.number().min(1).max(100).default(20).describe('Maximum number of results'),
offset: z.number().min(0).default(0).describe('Number of results to skip'),
orderBy: z.enum(['date_desc', 'date_asc']).default('date_desc').describe('Sort order')
}),
handler: async (args: any) => {
const endpoint = TOOL_ENDPOINT_MAP['find_by_type'];
return await callWorkerAPI(endpoint, args);
}
},
{
name: 'get_recent_context',
description: 'Get recent session context for timeline display. Returns recent observations, sessions, and user prompts with metadata for building timeline UI.',
inputSchema: z.object({
limit: z.number().min(1).max(100).default(30).describe('Maximum number of timeline items to return'),
format: z.enum(['index', 'full']).default('index').describe('Output format: "index" for titles/dates only (default, RECOMMENDED), "full" for complete details'),
type: z.string().optional().describe('Filter by observation type (single value or comma-separated list: decision,bugfix,feature,refactor,discovery,change)'),
concepts: z.string().optional().describe('Filter by concept tags (single value or comma-separated list)'),
files: z.string().optional().describe('Filter by file paths (single value or comma-separated list for partial match)'),
project: z.string().optional().describe('Filter by project name'),
dateStart: z.union([z.string(), z.number()]).optional().describe('Start date for filtering (ISO string or epoch timestamp)'),
dateEnd: z.union([z.string(), z.number()]).optional().describe('End date for filtering (ISO string or epoch timestamp)')
}),
handler: async (args: any) => {
const endpoint = TOOL_ENDPOINT_MAP['get_recent_context'];
return await callWorkerAPI(endpoint, args);
}
},
{
name: 'get_context_timeline',
description: 'Get timeline of observations around a specific observation ID. Returns observations before and after the anchor point with metadata for timeline display.',
inputSchema: z.object({
anchor_id: z.number().describe('Observation ID to use as anchor point'),
before: z.number().min(0).max(100).default(10).describe('Number of observations to fetch before anchor'),
after: z.number().min(0).max(100).default(10).describe('Number of observations to fetch after anchor'),
format: z.enum(['index', 'full']).default('index').describe('Output format: "index" for titles/dates only (default, RECOMMENDED), "full" for complete details'),
type: z.string().optional().describe('Filter by observation type (single value or comma-separated list: decision,bugfix,feature,refactor,discovery,change)'),
concepts: z.string().optional().describe('Filter by concept tags (single value or comma-separated list)'),
files: z.string().optional().describe('Filter by file paths (single value or comma-separated list for partial match)'),
project: z.string().optional().describe('Filter by project name')
}),
handler: async (args: any) => {
const endpoint = TOOL_ENDPOINT_MAP['get_context_timeline'];
return await callWorkerAPI(endpoint, args);
}
},
{
name: 'get_timeline_by_query',
description: 'Combined search + timeline tool. First searches for observations matching the query, then returns timeline around the best match. Useful for finding specific observations and viewing their context.',
inputSchema: z.object({
query: z.string().describe('Natural language query to find anchor observation'),
before: z.number().min(0).max(100).default(10).describe('Number of observations to fetch before anchor'),
after: z.number().min(0).max(100).default(10).describe('Number of observations to fetch after anchor'),
format: z.enum(['index', 'full']).default('index').describe('Output format: "index" for titles/dates only (default, RECOMMENDED), "full" for complete details'),
type: z.string().optional().describe('Filter by observation type (single value or comma-separated list: decision,bugfix,feature,refactor,discovery,change)'),
concepts: z.string().optional().describe('Filter by concept tags (single value or comma-separated list)'),
files: z.string().optional().describe('Filter by file paths (single value or comma-separated list for partial match)'),
project: z.string().optional().describe('Filter by project name'),
dateStart: z.union([z.string(), z.number()]).optional().describe('Start date for filtering (ISO string or epoch timestamp)'),
dateEnd: z.union([z.string(), z.number()]).optional().describe('End date for filtering (ISO string or epoch timestamp)')
}),
handler: async (args: any) => {
const endpoint = TOOL_ENDPOINT_MAP['get_timeline_by_query'];
return await callWorkerAPI(endpoint, args);
}
}
];
// Create the MCP server
const server = new Server(
{
name: 'claude-mem-search-server',
version: '1.0.0',
},
{
capabilities: {
tools: {},
},
}
);
// Register tools/list handler
server.setRequestHandler(ListToolsRequestSchema, async () => {
return {
tools: tools.map(tool => ({
name: tool.name,
description: tool.description,
inputSchema: zodToJsonSchema(tool.inputSchema) as Record<string, unknown>
}))
};
});
// Register tools/call handler
server.setRequestHandler(CallToolRequestSchema, async (request) => {
const tool = tools.find(t => t.name === request.params.name);
if (!tool) {
throw new Error(`Unknown tool: ${request.params.name}`);
}
try {
return await tool.handler(request.params.arguments || {});
} catch (error: any) {
return {
content: [{
type: 'text' as const,
text: `Tool execution failed: ${error.message}`
}],
isError: true
};
}
});
// Cleanup function
async function cleanup() {
silentDebug('[search-server] Shutting down...');
process.exit(0);
}
// Register cleanup handlers for graceful shutdown
process.on('SIGTERM', cleanup);
process.on('SIGINT', cleanup);
// Start the server
async function main() {
// Start the MCP server
const transport = new StdioServerTransport();
await server.connect(transport);
silentDebug('[search-server] Claude-mem search server started');
// Check Worker availability in background
setTimeout(async () => {
const workerAvailable = await verifyWorkerConnection();
if (!workerAvailable) {
silentDebug('[search-server] WARNING: Worker not available at', WORKER_BASE_URL);
silentDebug('[search-server] Tools will fail until Worker is started');
silentDebug('[search-server] Start Worker with: npm run worker:restart');
} else {
silentDebug('[search-server] Worker available at', WORKER_BASE_URL);
}
}, 0);
}
main().catch((error) => {
silentDebug('[search-server] Fatal error:', error);
process.exit(1);
});
+712
View File
@@ -0,0 +1,712 @@
/**
* Context Generator - generates context injection for SessionStart
*
* This module contains all the logic for building the context injection string.
* It's used by the worker service and called via HTTP from the context-hook.
*/
import path from 'path';
import { homedir } from 'os';
import { existsSync, readFileSync, unlinkSync } from 'fs';
import { SessionStore } from './sqlite/SessionStore.js';
import {
OBSERVATION_TYPES,
OBSERVATION_CONCEPTS,
TYPE_ICON_MAP,
TYPE_WORK_EMOJI_MAP
} from '../constants/observation-metadata.js';
import { logger } from '../utils/logger.js';
import { SettingsDefaultsManager } from './worker/settings/SettingsDefaultsManager.js';
// Version marker path - use homedir-based path that works in both CJS and ESM contexts
const VERSION_MARKER_PATH = path.join(homedir(), '.claude', 'plugins', 'marketplaces', 'thedotmack', 'plugin', '.install-version');
interface ContextConfig {
// Display counts
totalObservationCount: number;
fullObservationCount: number;
sessionCount: number;
// Token display toggles
showReadTokens: boolean;
showWorkTokens: boolean;
showSavingsAmount: boolean;
showSavingsPercent: boolean;
// Filters
observationTypes: Set<string>;
observationConcepts: Set<string>;
// Display options
fullObservationField: 'narrative' | 'facts';
showLastSummary: boolean;
showLastMessage: boolean;
}
/**
* Load all context configuration settings
* Priority: ~/.claude-mem/settings.json > env var > defaults
*/
function loadContextConfig(): ContextConfig {
const settingsPath = path.join(homedir(), '.claude-mem', 'settings.json');
const settings = SettingsDefaultsManager.loadFromFile(settingsPath);
try {
return {
totalObservationCount: parseInt(settings.CLAUDE_MEM_CONTEXT_OBSERVATIONS, 10),
fullObservationCount: parseInt(settings.CLAUDE_MEM_CONTEXT_FULL_COUNT, 10),
sessionCount: parseInt(settings.CLAUDE_MEM_CONTEXT_SESSION_COUNT, 10),
showReadTokens: settings.CLAUDE_MEM_CONTEXT_SHOW_READ_TOKENS === 'true',
showWorkTokens: settings.CLAUDE_MEM_CONTEXT_SHOW_WORK_TOKENS === 'true',
showSavingsAmount: settings.CLAUDE_MEM_CONTEXT_SHOW_SAVINGS_AMOUNT === 'true',
showSavingsPercent: settings.CLAUDE_MEM_CONTEXT_SHOW_SAVINGS_PERCENT === 'true',
observationTypes: new Set(
settings.CLAUDE_MEM_CONTEXT_OBSERVATION_TYPES.split(',').map((t: string) => t.trim()).filter(Boolean)
),
observationConcepts: new Set(
settings.CLAUDE_MEM_CONTEXT_OBSERVATION_CONCEPTS.split(',').map((c: string) => c.trim()).filter(Boolean)
),
fullObservationField: settings.CLAUDE_MEM_CONTEXT_FULL_FIELD as 'narrative' | 'facts',
showLastSummary: settings.CLAUDE_MEM_CONTEXT_SHOW_LAST_SUMMARY === 'true',
showLastMessage: settings.CLAUDE_MEM_CONTEXT_SHOW_LAST_MESSAGE === 'true',
};
} catch (error) {
logger.warn('WORKER', 'Failed to load context settings, using defaults', {}, error as Error);
// Return defaults on error
return {
totalObservationCount: 50,
fullObservationCount: 5,
sessionCount: 10,
showReadTokens: true,
showWorkTokens: true,
showSavingsAmount: true,
showSavingsPercent: true,
observationTypes: new Set(OBSERVATION_TYPES),
observationConcepts: new Set(OBSERVATION_CONCEPTS),
fullObservationField: 'narrative' as const,
showLastSummary: true,
showLastMessage: false,
};
}
}
// Configuration constants
const CHARS_PER_TOKEN_ESTIMATE = 4;
const SUMMARY_LOOKAHEAD = 1;
export interface ContextInput {
session_id?: string;
transcript_path?: string;
cwd?: string;
hook_event_name?: string;
source?: "startup" | "resume" | "clear" | "compact";
[key: string]: any;
}
// ANSI color codes for terminal output
const colors = {
reset: '\x1b[0m',
bright: '\x1b[1m',
dim: '\x1b[2m',
cyan: '\x1b[36m',
green: '\x1b[32m',
yellow: '\x1b[33m',
blue: '\x1b[34m',
magenta: '\x1b[35m',
gray: '\x1b[90m',
red: '\x1b[31m',
};
interface Observation {
id: number;
sdk_session_id: string;
type: string;
title: string | null;
subtitle: string | null;
narrative: string | null;
facts: string | null;
concepts: string | null;
files_read: string | null;
files_modified: string | null;
discovery_tokens: number | null;
created_at: string;
created_at_epoch: number;
}
interface SessionSummary {
id: number;
sdk_session_id: string;
request: string | null;
investigated: string | null;
learned: string | null;
completed: string | null;
next_steps: string | null;
created_at: string;
created_at_epoch: number;
}
// Helper: Parse JSON array safely
function parseJsonArray(json: string | null): string[] {
if (!json) return [];
try {
const parsed = JSON.parse(json);
return Array.isArray(parsed) ? parsed : [];
} catch (err) {
return [];
}
}
// Helper: Format date with time
function formatDateTime(dateStr: string): string {
const date = new Date(dateStr);
return date.toLocaleString('en-US', {
month: 'short',
day: 'numeric',
hour: 'numeric',
minute: '2-digit',
hour12: true
});
}
// Helper: Format just time (no date)
function formatTime(dateStr: string): string {
const date = new Date(dateStr);
return date.toLocaleString('en-US', {
hour: 'numeric',
minute: '2-digit',
hour12: true
});
}
// Helper: Format just date
function formatDate(dateStr: string): string {
const date = new Date(dateStr);
return date.toLocaleString('en-US', {
month: 'short',
day: 'numeric',
year: 'numeric'
});
}
// Helper: Convert absolute paths to relative paths
function toRelativePath(filePath: string, cwd: string): string {
if (path.isAbsolute(filePath)) {
return path.relative(cwd, filePath);
}
return filePath;
}
// Helper: Render a summary field
function renderSummaryField(label: string, value: string | null, color: string, useColors: boolean): string[] {
if (!value) return [];
if (useColors) {
return [`${color}${label}:${colors.reset} ${value}`, ''];
}
return [`**${label}**: ${value}`, ''];
}
// Helper: Convert cwd path to dashed format
function cwdToDashed(cwd: string): string {
return cwd.replace(/\//g, '-');
}
// Helper: Extract last assistant message from transcript file
function extractPriorMessages(transcriptPath: string): { userMessage: string; assistantMessage: string } {
try {
if (!existsSync(transcriptPath)) {
return { userMessage: '', assistantMessage: '' };
}
const content = readFileSync(transcriptPath, 'utf-8').trim();
if (!content) {
return { userMessage: '', assistantMessage: '' };
}
const lines = content.split('\n').filter(line => line.trim());
let lastAssistantMessage = '';
for (let i = lines.length - 1; i >= 0; i--) {
try {
const line = lines[i];
if (!line.includes('"type":"assistant"')) {
continue;
}
const entry = JSON.parse(line);
if (entry.type === 'assistant' && entry.message?.content && Array.isArray(entry.message.content)) {
let text = '';
for (const block of entry.message.content) {
if (block.type === 'text') {
text += block.text;
}
}
text = text.replace(/<system-reminder>[\s\S]*?<\/system-reminder>/g, '').trim();
if (text) {
lastAssistantMessage = text;
break;
}
}
} catch (parseError) {
continue;
}
}
return { userMessage: '', assistantMessage: lastAssistantMessage };
} catch (error) {
logger.failure('WORKER', `Failed to extract prior messages from transcript`, { transcriptPath }, error as Error);
return { userMessage: '', assistantMessage: '' };
}
}
/**
* Generate context for a project
*/
export async function generateContext(input?: ContextInput, useColors: boolean = false): Promise<string> {
const config = loadContextConfig();
const cwd = input?.cwd ?? process.cwd();
const project = cwd ? path.basename(cwd) : 'unknown-project';
let db: SessionStore | null = null;
try {
db = new SessionStore();
} catch (error: any) {
if (error.code === 'ERR_DLOPEN_FAILED') {
try {
unlinkSync(VERSION_MARKER_PATH);
} catch (unlinkError) {
// Marker might not exist
}
console.error('Native module rebuild needed - restart Claude Code to auto-fix');
return '';
}
throw error;
}
// Build SQL WHERE clause for observation types
const typeArray = Array.from(config.observationTypes);
const typePlaceholders = typeArray.map(() => '?').join(',');
// Build SQL WHERE clause for concepts
const conceptArray = Array.from(config.observationConcepts);
const conceptPlaceholders = conceptArray.map(() => '?').join(',');
// Get recent observations
const observations = db.db.prepare(`
SELECT
id, sdk_session_id, type, title, subtitle, narrative,
facts, concepts, files_read, files_modified, discovery_tokens,
created_at, created_at_epoch
FROM observations
WHERE project = ?
AND type IN (${typePlaceholders})
AND EXISTS (
SELECT 1 FROM json_each(concepts)
WHERE value IN (${conceptPlaceholders})
)
ORDER BY created_at_epoch DESC
LIMIT ?
`).all(project, ...typeArray, ...conceptArray, config.totalObservationCount) as Observation[];
// Get recent summaries
const recentSummaries = db.db.prepare(`
SELECT id, sdk_session_id, request, investigated, learned, completed, next_steps, created_at, created_at_epoch
FROM session_summaries
WHERE project = ?
ORDER BY created_at_epoch DESC
LIMIT ?
`).all(project, config.sessionCount + SUMMARY_LOOKAHEAD) as SessionSummary[];
// Retrieve prior session messages if enabled
let priorUserMessage = '';
let priorAssistantMessage = '';
if (config.showLastMessage && observations.length > 0) {
try {
const currentSessionId = input?.session_id;
const priorSessionObs = observations.find(obs => obs.sdk_session_id !== currentSessionId);
if (priorSessionObs) {
const priorSessionId = priorSessionObs.sdk_session_id;
const dashedCwd = cwdToDashed(cwd);
const transcriptPath = path.join(homedir(), '.claude', 'projects', dashedCwd, `${priorSessionId}.jsonl`);
const messages = extractPriorMessages(transcriptPath);
priorUserMessage = messages.userMessage;
priorAssistantMessage = messages.assistantMessage;
}
} catch (error) {
// Ignore
}
}
// If we have neither observations nor summaries, show empty state
if (observations.length === 0 && recentSummaries.length === 0) {
db?.close();
if (useColors) {
return `\n${colors.bright}${colors.cyan}[${project}] recent context${colors.reset}\n${colors.gray}${'─'.repeat(60)}${colors.reset}\n\n${colors.dim}No previous sessions found for this project yet.${colors.reset}\n`;
}
return `# [${project}] recent context\n\nNo previous sessions found for this project yet.`;
}
const displaySummaries = recentSummaries.slice(0, config.sessionCount);
const timelineObs = observations;
// Build output
const output: string[] = [];
// Header
if (useColors) {
output.push('');
output.push(`${colors.bright}${colors.cyan}[${project}] recent context${colors.reset}`);
output.push(`${colors.gray}${'─'.repeat(60)}${colors.reset}`);
output.push('');
} else {
output.push(`# [${project}] recent context`);
output.push('');
}
// Chronological Timeline
if (timelineObs.length > 0) {
// Legend
if (useColors) {
output.push(`${colors.dim}Legend: 🎯 session-request | 🔴 bugfix | 🟣 feature | 🔄 refactor | ✅ change | 🔵 discovery | ⚖️ decision${colors.reset}`);
} else {
output.push(`**Legend:** 🎯 session-request | 🔴 bugfix | 🟣 feature | 🔄 refactor | ✅ change | 🔵 discovery | ⚖️ decision`);
}
output.push('');
// Column Key
if (useColors) {
output.push(`${colors.bright}💡 Column Key${colors.reset}`);
output.push(`${colors.dim} Read: Tokens to read this observation (cost to learn it now)${colors.reset}`);
output.push(`${colors.dim} Work: Tokens spent on work that produced this record (🔍 research, 🛠️ building, ⚖️ deciding)${colors.reset}`);
} else {
output.push(`💡 **Column Key**:`);
output.push(`- **Read**: Tokens to read this observation (cost to learn it now)`);
output.push(`- **Work**: Tokens spent on work that produced this record (🔍 research, 🛠️ building, ⚖️ deciding)`);
}
output.push('');
// Context Index Instructions
if (useColors) {
output.push(`${colors.dim}💡 Context Index: This semantic index (titles, types, files, tokens) is usually sufficient to understand past work.${colors.reset}`);
output.push('');
output.push(`${colors.dim}When you need implementation details, rationale, or debugging context:${colors.reset}`);
output.push(`${colors.dim} - Use the mem-search skill to fetch full observations on-demand${colors.reset}`);
output.push(`${colors.dim} - Critical types (🔴 bugfix, ⚖️ decision) often need detailed fetching${colors.reset}`);
output.push(`${colors.dim} - Trust this index over re-reading code for past decisions and learnings${colors.reset}`);
} else {
output.push(`💡 **Context Index:** This semantic index (titles, types, files, tokens) is usually sufficient to understand past work.`);
output.push('');
output.push(`When you need implementation details, rationale, or debugging context:`);
output.push(`- Use the mem-search skill to fetch full observations on-demand`);
output.push(`- Critical types (🔴 bugfix, ⚖️ decision) often need detailed fetching`);
output.push(`- Trust this index over re-reading code for past decisions and learnings`);
}
output.push('');
// Context Economics
const totalObservations = observations.length;
const totalReadTokens = observations.reduce((sum, obs) => {
const obsSize = (obs.title?.length || 0) +
(obs.subtitle?.length || 0) +
(obs.narrative?.length || 0) +
JSON.stringify(obs.facts || []).length;
return sum + Math.ceil(obsSize / CHARS_PER_TOKEN_ESTIMATE);
}, 0);
const totalDiscoveryTokens = observations.reduce((sum, obs) => sum + (obs.discovery_tokens || 0), 0);
const savings = totalDiscoveryTokens - totalReadTokens;
const savingsPercent = totalDiscoveryTokens > 0
? Math.round((savings / totalDiscoveryTokens) * 100)
: 0;
const showContextEconomics = config.showReadTokens || config.showWorkTokens ||
config.showSavingsAmount || config.showSavingsPercent;
if (showContextEconomics) {
if (useColors) {
output.push(`${colors.bright}${colors.cyan}📊 Context Economics${colors.reset}`);
output.push(`${colors.dim} Loading: ${totalObservations} observations (${totalReadTokens.toLocaleString()} tokens to read)${colors.reset}`);
output.push(`${colors.dim} Work investment: ${totalDiscoveryTokens.toLocaleString()} tokens spent on research, building, and decisions${colors.reset}`);
if (totalDiscoveryTokens > 0 && (config.showSavingsAmount || config.showSavingsPercent)) {
let savingsLine = ' Your savings: ';
if (config.showSavingsAmount && config.showSavingsPercent) {
savingsLine += `${savings.toLocaleString()} tokens (${savingsPercent}% reduction from reuse)`;
} else if (config.showSavingsAmount) {
savingsLine += `${savings.toLocaleString()} tokens`;
} else {
savingsLine += `${savingsPercent}% reduction from reuse`;
}
output.push(`${colors.green}${savingsLine}${colors.reset}`);
}
output.push('');
} else {
output.push(`📊 **Context Economics**:`);
output.push(`- Loading: ${totalObservations} observations (${totalReadTokens.toLocaleString()} tokens to read)`);
output.push(`- Work investment: ${totalDiscoveryTokens.toLocaleString()} tokens spent on research, building, and decisions`);
if (totalDiscoveryTokens > 0 && (config.showSavingsAmount || config.showSavingsPercent)) {
let savingsLine = '- Your savings: ';
if (config.showSavingsAmount && config.showSavingsPercent) {
savingsLine += `${savings.toLocaleString()} tokens (${savingsPercent}% reduction from reuse)`;
} else if (config.showSavingsAmount) {
savingsLine += `${savings.toLocaleString()} tokens`;
} else {
savingsLine += `${savingsPercent}% reduction from reuse`;
}
output.push(savingsLine);
}
output.push('');
}
}
// Prepare summaries for timeline display
const mostRecentSummaryId = recentSummaries[0]?.id;
interface SummaryTimelineItem extends SessionSummary {
displayEpoch: number;
displayTime: string;
shouldShowLink: boolean;
}
const summariesForTimeline: SummaryTimelineItem[] = displaySummaries.map((summary, i) => {
const olderSummary = i === 0 ? null : recentSummaries[i + 1];
return {
...summary,
displayEpoch: olderSummary ? olderSummary.created_at_epoch : summary.created_at_epoch,
displayTime: olderSummary ? olderSummary.created_at : summary.created_at,
shouldShowLink: summary.id !== mostRecentSummaryId
};
});
// Identify which observations should show full details
const fullObservationIds = new Set(
observations
.slice(0, config.fullObservationCount)
.map(obs => obs.id)
);
type TimelineItem =
| { type: 'observation'; data: Observation }
| { type: 'summary'; data: SummaryTimelineItem };
const timeline: TimelineItem[] = [
...timelineObs.map(obs => ({ type: 'observation' as const, data: obs })),
...summariesForTimeline.map(summary => ({ type: 'summary' as const, data: summary }))
];
// Sort chronologically
timeline.sort((a, b) => {
const aEpoch = a.type === 'observation' ? a.data.created_at_epoch : a.data.displayEpoch;
const bEpoch = b.type === 'observation' ? b.data.created_at_epoch : b.data.displayEpoch;
return aEpoch - bEpoch;
});
// Group by day
const itemsByDay = new Map<string, TimelineItem[]>();
for (const item of timeline) {
const itemDate = item.type === 'observation' ? item.data.created_at : item.data.displayTime;
const day = formatDate(itemDate);
if (!itemsByDay.has(day)) {
itemsByDay.set(day, []);
}
itemsByDay.get(day)!.push(item);
}
// Sort days chronologically
const sortedDays = Array.from(itemsByDay.entries()).sort((a, b) => {
const aDate = new Date(a[0]).getTime();
const bDate = new Date(b[0]).getTime();
return aDate - bDate;
});
// Render each day's timeline
for (const [day, dayItems] of sortedDays) {
if (useColors) {
output.push(`${colors.bright}${colors.cyan}${day}${colors.reset}`);
output.push('');
} else {
output.push(`### ${day}`);
output.push('');
}
let currentFile: string | null = null;
let lastTime = '';
let tableOpen = false;
for (const item of dayItems) {
if (item.type === 'summary') {
if (tableOpen) {
output.push('');
tableOpen = false;
currentFile = null;
lastTime = '';
}
const summary = item.data;
const summaryTitle = `${summary.request || 'Session started'} (${formatDateTime(summary.displayTime)})`;
const link = summary.shouldShowLink ? `claude-mem://session-summary/${summary.id}` : '';
if (useColors) {
const linkPart = link ? `${colors.dim}[${link}]${colors.reset}` : '';
output.push(`🎯 ${colors.yellow}#S${summary.id}${colors.reset} ${summaryTitle} ${linkPart}`);
} else {
const linkPart = link ? ` [→](${link})` : '';
output.push(`**🎯 #S${summary.id}** ${summaryTitle}${linkPart}`);
}
output.push('');
} else {
const obs = item.data;
const files = parseJsonArray(obs.files_modified);
const file = (files.length > 0 && files[0]) ? toRelativePath(files[0], cwd) : 'General';
if (file !== currentFile) {
if (tableOpen) {
output.push('');
}
if (useColors) {
output.push(`${colors.dim}${file}${colors.reset}`);
} else {
output.push(`**${file}**`);
}
if (!useColors) {
output.push(`| ID | Time | T | Title | Read | Work |`);
output.push(`|----|------|---|-------|------|------|`);
}
currentFile = file;
tableOpen = true;
lastTime = '';
}
const time = formatTime(obs.created_at);
const title = obs.title || 'Untitled';
const icon = TYPE_ICON_MAP[obs.type as keyof typeof TYPE_ICON_MAP] || '•';
const obsSize = (obs.title?.length || 0) +
(obs.subtitle?.length || 0) +
(obs.narrative?.length || 0) +
JSON.stringify(obs.facts || []).length;
const readTokens = Math.ceil(obsSize / CHARS_PER_TOKEN_ESTIMATE);
const discoveryTokens = obs.discovery_tokens || 0;
const workEmoji = TYPE_WORK_EMOJI_MAP[obs.type as keyof typeof TYPE_WORK_EMOJI_MAP] || '🔍';
const discoveryDisplay = discoveryTokens > 0 ? `${workEmoji} ${discoveryTokens.toLocaleString()}` : '-';
const showTime = time !== lastTime;
const timeDisplay = showTime ? time : '';
lastTime = time;
const shouldShowFull = fullObservationIds.has(obs.id);
if (shouldShowFull) {
const detailField = config.fullObservationField === 'narrative'
? obs.narrative
: (obs.facts ? parseJsonArray(obs.facts).join('\n') : null);
if (useColors) {
const timePart = showTime ? `${colors.dim}${time}${colors.reset}` : ' '.repeat(time.length);
const readPart = (config.showReadTokens && readTokens > 0) ? `${colors.dim}(~${readTokens}t)${colors.reset}` : '';
const discoveryPart = (config.showWorkTokens && discoveryTokens > 0) ? `${colors.dim}(${workEmoji} ${discoveryTokens.toLocaleString()}t)${colors.reset}` : '';
output.push(` ${colors.dim}#${obs.id}${colors.reset} ${timePart} ${icon} ${colors.bright}${title}${colors.reset}`);
if (detailField) {
output.push(` ${colors.dim}${detailField}${colors.reset}`);
}
if (readPart || discoveryPart) {
output.push(` ${readPart} ${discoveryPart}`);
}
output.push('');
} else {
if (tableOpen) {
output.push('');
tableOpen = false;
}
output.push(`**#${obs.id}** ${timeDisplay || '″'} ${icon} **${title}**`);
if (detailField) {
output.push('');
output.push(detailField);
output.push('');
}
const tokenParts: string[] = [];
if (config.showReadTokens) {
tokenParts.push(`Read: ~${readTokens}`);
}
if (config.showWorkTokens) {
tokenParts.push(`Work: ${discoveryDisplay}`);
}
if (tokenParts.length > 0) {
output.push(tokenParts.join(', '));
}
output.push('');
currentFile = null;
}
} else {
if (useColors) {
const timePart = showTime ? `${colors.dim}${time}${colors.reset}` : ' '.repeat(time.length);
const readPart = (config.showReadTokens && readTokens > 0) ? `${colors.dim}(~${readTokens}t)${colors.reset}` : '';
const discoveryPart = (config.showWorkTokens && discoveryTokens > 0) ? `${colors.dim}(${workEmoji} ${discoveryTokens.toLocaleString()}t)${colors.reset}` : '';
output.push(` ${colors.dim}#${obs.id}${colors.reset} ${timePart} ${icon} ${title} ${readPart} ${discoveryPart}`);
} else {
const readCol = config.showReadTokens ? `~${readTokens}` : '';
const workCol = config.showWorkTokens ? discoveryDisplay : '';
output.push(`| #${obs.id} | ${timeDisplay || '″'} | ${icon} | ${title} | ${readCol} | ${workCol} |`);
}
}
}
}
if (tableOpen) {
output.push('');
}
}
// Add full summary details for most recent session
const mostRecentSummary = recentSummaries[0];
const mostRecentObservation = observations[0];
const shouldShowSummary = config.showLastSummary &&
mostRecentSummary &&
(mostRecentSummary.investigated || mostRecentSummary.learned || mostRecentSummary.completed || mostRecentSummary.next_steps) &&
(!mostRecentObservation || mostRecentSummary.created_at_epoch > mostRecentObservation.created_at_epoch);
if (shouldShowSummary) {
output.push(...renderSummaryField('Investigated', mostRecentSummary.investigated, colors.blue, useColors));
output.push(...renderSummaryField('Learned', mostRecentSummary.learned, colors.yellow, useColors));
output.push(...renderSummaryField('Completed', mostRecentSummary.completed, colors.green, useColors));
output.push(...renderSummaryField('Next Steps', mostRecentSummary.next_steps, colors.magenta, useColors));
}
// Previously section
if (priorAssistantMessage) {
output.push('');
output.push('---');
output.push('');
if (useColors) {
output.push(`${colors.bright}${colors.magenta}📋 Previously${colors.reset}`);
output.push('');
output.push(`${colors.dim}A: ${priorAssistantMessage}${colors.reset}`);
} else {
output.push(`**📋 Previously**`);
output.push('');
output.push(`A: ${priorAssistantMessage}`);
}
output.push('');
}
// Footer
if (showContextEconomics && totalDiscoveryTokens > 0 && savings > 0) {
const workTokensK = Math.round(totalDiscoveryTokens / 1000);
output.push('');
if (useColors) {
output.push(`${colors.dim}💰 Access ${workTokensK}k tokens of past research & decisions for just ${totalReadTokens.toLocaleString()}t. Use the mem-search skill to access memories by ID instead of re-reading files.${colors.reset}`);
} else {
output.push(`💰 Access ${workTokensK}k tokens of past research & decisions for just ${totalReadTokens.toLocaleString()}t. Use the mem-search skill to access memories by ID instead of re-reading files.`);
}
}
}
db?.close();
return output.join('\n').trimEnd();
}
+3 -6
View File
@@ -1,9 +1,6 @@
import { Database as BunDatabase } from 'bun:sqlite';
import { Database } from 'better-sqlite3';
import { DATA_DIR, DB_PATH, ensureDir } from '../../shared/paths.js';
// Type alias for better-sqlite3 compatibility
type Database = BunDatabase;
export interface Migration {
version: number;
up: (db: Database) => void;
@@ -47,7 +44,7 @@ export class DatabaseManager {
// Ensure the data directory exists
ensureDir(DATA_DIR);
this.db = new BunDatabase(DB_PATH, { create: true, readwrite: true });
this.db = new Database(DB_PATH, { create: true, readwrite: true });
// Apply optimized SQLite settings
this.db.run('PRAGMA journal_mode = WAL');
@@ -171,4 +168,4 @@ export async function initializeDatabase(): Promise<Database> {
return await manager.initialize();
}
export { BunDatabase as Database };
export { Database };
+3 -2
View File
@@ -1,4 +1,5 @@
import Database from 'better-sqlite3';
import { TableNameRow } from '../../types/database.js';
import { DATA_DIR, DB_PATH, ensureDir } from '../../shared/paths.js';
import {
ObservationSearchResult,
@@ -48,8 +49,8 @@ export class SessionSearch {
private ensureFTSTables(): void {
try {
// Check if FTS tables already exist
const tables = this.db.prepare("SELECT name FROM sqlite_master WHERE type='table' AND name LIKE '%_fts'").all() as any[];
const hasFTS = tables.some((t: any) => t.name === 'observations_fts' || t.name === 'session_summaries_fts');
const tables = this.db.prepare("SELECT name FROM sqlite_master WHERE type='table' AND name LIKE '%_fts'").all() as TableNameRow[];
const hasFTS = tables.some(t => t.name === 'observations_fts' || t.name === 'session_summaries_fts');
if (hasFTS) {
// Already migrated
+94 -55
View File
@@ -1,6 +1,17 @@
import Database from 'better-sqlite3';
import { DATA_DIR, DB_PATH, ensureDir } from '../../shared/paths.js';
import { logger } from '../../utils/logger.js';
import {
TableColumnInfo,
IndexInfo,
TableNameRow,
SchemaVersion,
SdkSessionRecord,
ObservationRecord,
SessionSummaryRecord,
UserPromptRecord,
LatestPromptResult
} from '../../types/database.js';
/**
* Session data store for SDK sessions, observations, and summaries
@@ -47,7 +58,7 @@ export class SessionStore {
`);
// Get applied migrations
const appliedVersions = this.db.prepare('SELECT version FROM schema_versions ORDER BY version').all() as Array<{version: number}>;
const appliedVersions = this.db.prepare('SELECT version FROM schema_versions ORDER BY version').all() as SchemaVersion[];
const maxApplied = appliedVersions.length > 0 ? Math.max(...appliedVersions.map(v => v.version)) : 0;
// Only run migration004 if no migrations have been applied
@@ -131,12 +142,12 @@ export class SessionStore {
private ensureWorkerPortColumn(): void {
try {
// Check if migration already applied
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(5) as {version: number} | undefined;
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(5) as SchemaVersion | undefined;
if (applied) return;
// Check if column exists
const tableInfo = this.db.pragma('table_info(sdk_sessions)');
const hasWorkerPort = (tableInfo as any[]).some((col: any) => col.name === 'worker_port');
const tableInfo = this.db.pragma('table_info(sdk_sessions)') as TableColumnInfo[];
const hasWorkerPort = tableInfo.some(col => col.name === 'worker_port');
if (!hasWorkerPort) {
this.db.exec('ALTER TABLE sdk_sessions ADD COLUMN worker_port INTEGER');
@@ -156,12 +167,12 @@ export class SessionStore {
private ensurePromptTrackingColumns(): void {
try {
// Check if migration already applied
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(6) as {version: number} | undefined;
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(6) as SchemaVersion | undefined;
if (applied) return;
// Check sdk_sessions for prompt_counter
const sessionsInfo = this.db.pragma('table_info(sdk_sessions)');
const hasPromptCounter = (sessionsInfo as any[]).some((col: any) => col.name === 'prompt_counter');
const sessionsInfo = this.db.pragma('table_info(sdk_sessions)') as TableColumnInfo[];
const hasPromptCounter = sessionsInfo.some(col => col.name === 'prompt_counter');
if (!hasPromptCounter) {
this.db.exec('ALTER TABLE sdk_sessions ADD COLUMN prompt_counter INTEGER DEFAULT 0');
@@ -169,8 +180,8 @@ export class SessionStore {
}
// Check observations for prompt_number
const observationsInfo = this.db.pragma('table_info(observations)');
const obsHasPromptNumber = (observationsInfo as any[]).some((col: any) => col.name === 'prompt_number');
const observationsInfo = this.db.pragma('table_info(observations)') as TableColumnInfo[];
const obsHasPromptNumber = observationsInfo.some(col => col.name === 'prompt_number');
if (!obsHasPromptNumber) {
this.db.exec('ALTER TABLE observations ADD COLUMN prompt_number INTEGER');
@@ -178,8 +189,8 @@ export class SessionStore {
}
// Check session_summaries for prompt_number
const summariesInfo = this.db.pragma('table_info(session_summaries)');
const sumHasPromptNumber = (summariesInfo as any[]).some((col: any) => col.name === 'prompt_number');
const summariesInfo = this.db.pragma('table_info(session_summaries)') as TableColumnInfo[];
const sumHasPromptNumber = summariesInfo.some(col => col.name === 'prompt_number');
if (!sumHasPromptNumber) {
this.db.exec('ALTER TABLE session_summaries ADD COLUMN prompt_number INTEGER');
@@ -199,12 +210,12 @@ export class SessionStore {
private removeSessionSummariesUniqueConstraint(): void {
try {
// Check if migration already applied
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(7) as {version: number} | undefined;
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(7) as SchemaVersion | undefined;
if (applied) return;
// Check if UNIQUE constraint exists
const summariesIndexes = this.db.pragma('index_list(session_summaries)');
const hasUniqueConstraint = (summariesIndexes as any[]).some((idx: any) => idx.unique === 1);
const summariesIndexes = this.db.pragma('index_list(session_summaries)') as IndexInfo[];
const hasUniqueConstraint = summariesIndexes.some(idx => idx.unique === 1);
if (!hasUniqueConstraint) {
// Already migrated (no constraint exists)
@@ -284,12 +295,12 @@ export class SessionStore {
private addObservationHierarchicalFields(): void {
try {
// Check if migration already applied
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(8) as {version: number} | undefined;
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(8) as SchemaVersion | undefined;
if (applied) return;
// Check if new fields already exist
const tableInfo = this.db.pragma('table_info(observations)');
const hasTitle = (tableInfo as any[]).some((col: any) => col.name === 'title');
const tableInfo = this.db.pragma('table_info(observations)') as TableColumnInfo[];
const hasTitle = tableInfo.some(col => col.name === 'title');
if (hasTitle) {
// Already migrated
@@ -326,12 +337,12 @@ export class SessionStore {
private makeObservationsTextNullable(): void {
try {
// Check if migration already applied
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(9) as {version: number} | undefined;
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(9) as SchemaVersion | undefined;
if (applied) return;
// Check if text column is already nullable
const tableInfo = this.db.pragma('table_info(observations)');
const textColumn = (tableInfo as any[]).find((col: any) => col.name === 'text');
const tableInfo = this.db.pragma('table_info(observations)') as TableColumnInfo[];
const textColumn = tableInfo.find(col => col.name === 'text');
if (!textColumn || textColumn.notnull === 0) {
// Already migrated or text column doesn't exist
@@ -413,12 +424,12 @@ export class SessionStore {
private createUserPromptsTable(): void {
try {
// Check if migration already applied
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(10) as {version: number} | undefined;
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(10) as SchemaVersion | undefined;
if (applied) return;
// Check if table already exists
const tableInfo = this.db.pragma('table_info(user_prompts)');
if ((tableInfo as any[]).length > 0) {
const tableInfo = this.db.pragma('table_info(user_prompts)') as TableColumnInfo[];
if (tableInfo.length > 0) {
// Already migrated
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(10, new Date().toISOString());
return;
@@ -502,12 +513,12 @@ export class SessionStore {
private ensureDiscoveryTokensColumn(): void {
try {
// Check if migration already applied to avoid unnecessary re-runs
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(11) as {version: number} | undefined;
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(11) as SchemaVersion | undefined;
if (applied) return;
// Check if discovery_tokens column exists in observations table
const observationsInfo = this.db.pragma('table_info(observations)');
const obsHasDiscoveryTokens = (observationsInfo as any[]).some((col: any) => col.name === 'discovery_tokens');
const observationsInfo = this.db.pragma('table_info(observations)') as TableColumnInfo[];
const obsHasDiscoveryTokens = observationsInfo.some(col => col.name === 'discovery_tokens');
if (!obsHasDiscoveryTokens) {
this.db.exec('ALTER TABLE observations ADD COLUMN discovery_tokens INTEGER DEFAULT 0');
@@ -515,8 +526,8 @@ export class SessionStore {
}
// Check if discovery_tokens column exists in session_summaries table
const summariesInfo = this.db.pragma('table_info(session_summaries)');
const sumHasDiscoveryTokens = (summariesInfo as any[]).some((col: any) => col.name === 'discovery_tokens');
const summariesInfo = this.db.pragma('table_info(session_summaries)') as TableColumnInfo[];
const sumHasDiscoveryTokens = summariesInfo.some(col => col.name === 'discovery_tokens');
if (!sumHasDiscoveryTokens) {
this.db.exec('ALTER TABLE session_summaries ADD COLUMN discovery_tokens INTEGER DEFAULT 0');
@@ -556,7 +567,7 @@ export class SessionStore {
LIMIT ?
`);
return stmt.all(project, limit) as any[];
return stmt.all(project, limit);
}
/**
@@ -581,7 +592,7 @@ export class SessionStore {
LIMIT ?
`);
return stmt.all(project, limit) as any[];
return stmt.all(project, limit);
}
/**
@@ -601,7 +612,7 @@ export class SessionStore {
LIMIT ?
`);
return stmt.all(project, limit) as any[];
return stmt.all(project, limit);
}
/**
@@ -625,7 +636,7 @@ export class SessionStore {
LIMIT ?
`);
return stmt.all(limit) as any[];
return stmt.all(limit);
}
/**
@@ -655,7 +666,7 @@ export class SessionStore {
LIMIT ?
`);
return stmt.all(limit) as any[];
return stmt.all(limit);
}
/**
@@ -685,7 +696,7 @@ export class SessionStore {
LIMIT ?
`);
return stmt.all(limit) as any[];
return stmt.all(limit);
}
/**
@@ -703,6 +714,34 @@ export class SessionStore {
return rows.map(row => row.project);
}
/**
* Get latest user prompt with session info for a Claude session
* Used for syncing prompts to Chroma during session initialization
*/
getLatestUserPrompt(claudeSessionId: string): {
id: number;
claude_session_id: string;
sdk_session_id: string;
project: string;
prompt_number: number;
prompt_text: string;
created_at_epoch: number;
} | undefined {
const stmt = this.db.prepare(`
SELECT
up.*,
s.sdk_session_id,
s.project
FROM user_prompts up
JOIN sdk_sessions s ON up.claude_session_id = s.claude_session_id
WHERE up.claude_session_id = ?
ORDER BY up.created_at_epoch DESC
LIMIT 1
`);
return stmt.get(claudeSessionId) as LatestPromptResult | undefined;
}
/**
* Get recent sessions with their status and summary info
*/
@@ -732,7 +771,7 @@ export class SessionStore {
ORDER BY started_at_epoch ASC
`);
return stmt.all(project, limit) as any[];
return stmt.all(project, limit);
}
/**
@@ -751,20 +790,20 @@ export class SessionStore {
ORDER BY created_at_epoch ASC
`);
return stmt.all(sdkSessionId) as any[];
return stmt.all(sdkSessionId);
}
/**
* Get a single observation by ID
*/
getObservationById(id: number): any | null {
getObservationById(id: number): ObservationRecord | null {
const stmt = this.db.prepare(`
SELECT *
FROM observations
WHERE id = ?
`);
return stmt.get(id) as any || null;
return stmt.get(id) as ObservationRecord | undefined || null;
}
/**
@@ -773,7 +812,7 @@ export class SessionStore {
getObservationsByIds(
ids: number[],
options: { orderBy?: 'date_desc' | 'date_asc'; limit?: number } = {}
): any[] {
): ObservationRecord[] {
if (ids.length === 0) return [];
const { orderBy = 'date_desc', limit } = options;
@@ -791,7 +830,7 @@ export class SessionStore {
${limitClause}
`);
return stmt.all(...ids) as any[];
return stmt.all(...ids) as ObservationRecord[];
}
/**
@@ -819,7 +858,7 @@ export class SessionStore {
LIMIT 1
`);
return stmt.get(sdkSessionId) as any || null;
return stmt.get(sdkSessionId) || null;
}
/**
@@ -892,7 +931,7 @@ export class SessionStore {
LIMIT 1
`);
return stmt.get(id) as any || null;
return stmt.get(id) || null;
}
/**
@@ -911,7 +950,7 @@ export class SessionStore {
LIMIT 1
`);
return stmt.get(claudeSessionId) as any || null;
return stmt.get(claudeSessionId) || null;
}
/**
@@ -925,7 +964,7 @@ export class SessionStore {
LIMIT 1
`);
return stmt.get(claudeSessionId) as any || null;
return stmt.get(claudeSessionId) || null;
}
/**
@@ -1315,7 +1354,7 @@ export class SessionStore {
getSessionSummariesByIds(
ids: number[],
options: { orderBy?: 'date_desc' | 'date_asc'; limit?: number } = {}
): any[] {
): SessionSummaryRecord[] {
if (ids.length === 0) return [];
const { orderBy = 'date_desc', limit } = options;
@@ -1330,7 +1369,7 @@ export class SessionStore {
${limitClause}
`);
return stmt.all(...ids) as any[];
return stmt.all(...ids) as SessionSummaryRecord[];
}
/**
@@ -1340,7 +1379,7 @@ export class SessionStore {
getUserPromptsByIds(
ids: number[],
options: { orderBy?: 'date_desc' | 'date_asc'; limit?: number } = {}
): any[] {
): UserPromptRecord[] {
if (ids.length === 0) return [];
const { orderBy = 'date_desc', limit } = options;
@@ -1360,7 +1399,7 @@ export class SessionStore {
${limitClause}
`);
return stmt.all(...ids) as any[];
return stmt.all(...ids) as UserPromptRecord[];
}
/**
@@ -1423,8 +1462,8 @@ export class SessionStore {
`;
try {
const beforeRecords = this.db.prepare(beforeQuery).all(anchorObservationId, ...projectParams, depthBefore + 1) as any[];
const afterRecords = this.db.prepare(afterQuery).all(anchorObservationId, ...projectParams, depthAfter + 1) as any[];
const beforeRecords = this.db.prepare(beforeQuery).all(anchorObservationId, ...projectParams, depthBefore + 1) as Array<{id: number; created_at_epoch: number}>;
const afterRecords = this.db.prepare(afterQuery).all(anchorObservationId, ...projectParams, depthAfter + 1) as Array<{id: number; created_at_epoch: number}>;
// Get the earliest and latest timestamps from boundary observations
if (beforeRecords.length === 0 && afterRecords.length === 0) {
@@ -1456,8 +1495,8 @@ export class SessionStore {
`;
try {
const beforeRecords = this.db.prepare(beforeQuery).all(anchorEpoch, ...projectParams, depthBefore) as any[];
const afterRecords = this.db.prepare(afterQuery).all(anchorEpoch, ...projectParams, depthAfter + 1) as any[];
const beforeRecords = this.db.prepare(beforeQuery).all(anchorEpoch, ...projectParams, depthBefore) as Array<{created_at_epoch: number}>;
const afterRecords = this.db.prepare(afterQuery).all(anchorEpoch, ...projectParams, depthAfter + 1) as Array<{created_at_epoch: number}>;
if (beforeRecords.length === 0 && afterRecords.length === 0) {
return { observations: [], sessions: [], prompts: [] };
@@ -1495,9 +1534,9 @@ export class SessionStore {
`;
try {
const observations = this.db.prepare(obsQuery).all(startEpoch, endEpoch, ...projectParams) as any[];
const sessions = this.db.prepare(sessQuery).all(startEpoch, endEpoch, ...projectParams) as any[];
const prompts = this.db.prepare(promptQuery).all(startEpoch, endEpoch, ...projectParams) as any[];
const observations = this.db.prepare(obsQuery).all(startEpoch, endEpoch, ...projectParams) as ObservationRecord[];
const sessions = this.db.prepare(sessQuery).all(startEpoch, endEpoch, ...projectParams) as SessionSummaryRecord[];
const prompts = this.db.prepare(promptQuery).all(startEpoch, endEpoch, ...projectParams) as UserPromptRecord[];
return {
observations,
+1 -1
View File
@@ -1,4 +1,4 @@
import { Database } from 'bun:sqlite';
import { Database } from 'better-sqlite3';
import { Migration } from './Database.js';
/**
+73
View File
@@ -733,6 +733,79 @@ export class ChromaSync {
}
}
/**
* Query Chroma collection for semantic search
* Used by SearchManager for vector-based search
*/
async queryChroma(
query: string,
limit: number,
whereFilter?: Record<string, any>
): Promise<{ ids: number[]; distances: number[]; metadatas: any[] }> {
await this.ensureConnection();
if (!this.client) {
throw new Error('Chroma client not initialized');
}
const whereStringified = whereFilter ? JSON.stringify(whereFilter) : undefined;
const arguments_obj = {
collection_name: this.collectionName,
query_texts: [query],
n_results: limit,
include: ['documents', 'metadatas', 'distances'],
where: whereStringified
};
const result = await this.client.callTool({
name: 'chroma_query_documents',
arguments: arguments_obj
});
const resultText = result.content[0]?.text || '';
// Parse JSON response
let parsed: any;
try {
parsed = JSON.parse(resultText);
} catch (error) {
logger.error('CHROMA_SYNC', 'Failed to parse Chroma response', { project: this.project }, error as Error);
return { ids: [], distances: [], metadatas: [] };
}
// Extract unique IDs from document IDs
const ids: number[] = [];
const docIds = parsed.ids?.[0] || [];
for (const docId of docIds) {
// Extract sqlite_id from document ID (supports three formats):
// - obs_{id}_narrative, obs_{id}_fact_0, etc (observations)
// - summary_{id}_request, summary_{id}_learned, etc (session summaries)
// - prompt_{id} (user prompts)
const obsMatch = docId.match(/obs_(\d+)_/);
const summaryMatch = docId.match(/summary_(\d+)_/);
const promptMatch = docId.match(/prompt_(\d+)/);
let sqliteId: number | null = null;
if (obsMatch) {
sqliteId = parseInt(obsMatch[1], 10);
} else if (summaryMatch) {
sqliteId = parseInt(summaryMatch[1], 10);
} else if (promptMatch) {
sqliteId = parseInt(promptMatch[1], 10);
}
if (sqliteId !== null && !ids.includes(sqliteId)) {
ids.push(sqliteId);
}
}
const distances = parsed.distances?.[0] || [];
const metadatas = parsed.metadatas?.[0] || [];
return { ids, distances, metadatas };
}
/**
* Close the Chroma client connection
*/
File diff suppressed because it is too large Load Diff
+233
View File
@@ -0,0 +1,233 @@
/**
* FormattingService - Handles all formatting logic for search results
* Extracted from mcp-server.ts to follow worker service organization pattern
*/
import { ObservationSearchResult, SessionSummarySearchResult, UserPromptSearchResult } from '../sqlite/types.js';
export type FormatType = 'index' | 'full';
export class FormattingService {
/**
* Format search tips footer
*/
formatSearchTips(): string {
return `\n---
💡 Search Strategy:
ALWAYS search with index format FIRST to get an overview and identify relevant results.
This is critical for token efficiency - index format uses ~10x fewer tokens than full format.
Search workflow:
1. Initial search: Use default (index) format to see titles, dates, and sources
2. Review results: Identify which items are most relevant to your needs
3. Deep dive: Only then use format: "full" on specific items of interest
4. Narrow down: Use filters (type, dateStart/dateEnd, concepts, files) to refine results
Other tips:
To search by concept: Use find_by_concept tool
To browse by type: Use find_by_type with ["decision", "feature", etc.]
To sort by date: Use orderBy: "date_desc" or "date_asc"`;
}
/**
* Format observation as index entry (title, date, ID only)
*/
formatObservationIndex(obs: ObservationSearchResult, index: number): string {
const title = obs.title || `Observation #${obs.id}`;
const date = new Date(obs.created_at_epoch).toLocaleString();
const type = obs.type ? `[${obs.type}]` : '';
return `${index + 1}. ${type} ${title}
Date: ${date}
Source: claude-mem://observation/${obs.id}`;
}
/**
* Format session summary as index entry (title, date, ID only)
*/
formatSessionIndex(session: SessionSummarySearchResult, index: number): string {
const title = session.request || `Session ${session.sdk_session_id?.substring(0, 8) || 'unknown'}`;
const date = new Date(session.created_at_epoch).toLocaleString();
return `${index + 1}. ${title}
Date: ${date}
Source: claude-mem://session/${session.sdk_session_id}`;
}
/**
* Format user prompt as index entry (full text - don't truncate context!)
*/
formatUserPromptIndex(prompt: UserPromptSearchResult, index: number): string {
const date = new Date(prompt.created_at_epoch).toLocaleString();
return `${index + 1}. "${prompt.prompt_text}"
Date: ${date} | Prompt #${prompt.prompt_number}
Source: claude-mem://user-prompt/${prompt.id}`;
}
/**
* Format observation as text content with metadata
*/
formatObservationResult(obs: ObservationSearchResult): string {
const title = obs.title || `Observation #${obs.id}`;
// Build content from available fields
const contentParts: string[] = [];
contentParts.push(`## ${title}`);
contentParts.push(`*Source: claude-mem://observation/${obs.id}*`);
contentParts.push('');
if (obs.subtitle) {
contentParts.push(`**${obs.subtitle}**`);
contentParts.push('');
}
if (obs.narrative) {
contentParts.push(obs.narrative);
contentParts.push('');
}
if (obs.text) {
contentParts.push(obs.text);
contentParts.push('');
}
// Add metadata
const metadata: string[] = [];
metadata.push(`Type: ${obs.type}`);
if (obs.facts) {
try {
const facts = JSON.parse(obs.facts);
if (facts.length > 0) {
metadata.push(`Facts: ${facts.join('; ')}`);
}
} catch {}
}
if (obs.concepts) {
try {
const concepts = JSON.parse(obs.concepts);
if (concepts.length > 0) {
metadata.push(`Concepts: ${concepts.join(', ')}`);
}
} catch {}
}
if (obs.files_read || obs.files_modified) {
const files: string[] = [];
if (obs.files_read) {
try {
files.push(...JSON.parse(obs.files_read));
} catch {}
}
if (obs.files_modified) {
try {
files.push(...JSON.parse(obs.files_modified));
} catch {}
}
if (files.length > 0) {
metadata.push(`Files: ${[...new Set(files)].join(', ')}`);
}
}
if (metadata.length > 0) {
contentParts.push('---');
contentParts.push(metadata.join(' | '));
}
// Add date
const date = new Date(obs.created_at_epoch).toLocaleString();
contentParts.push('');
contentParts.push(`---`);
contentParts.push(`Date: ${date}`);
return contentParts.join('\n');
}
/**
* Format session summary as text content with metadata
*/
formatSessionResult(session: SessionSummarySearchResult): string {
const title = session.request || `Session ${session.sdk_session_id?.substring(0, 8) || 'unknown'}`;
// Build content from available fields
const contentParts: string[] = [];
contentParts.push(`## ${title}`);
contentParts.push(`*Source: claude-mem://session/${session.sdk_session_id}*`);
contentParts.push('');
if (session.completed) {
contentParts.push(`**Completed:** ${session.completed}`);
contentParts.push('');
}
if (session.learned) {
contentParts.push(`**Learned:** ${session.learned}`);
contentParts.push('');
}
if (session.investigated) {
contentParts.push(`**Investigated:** ${session.investigated}`);
contentParts.push('');
}
if (session.next_steps) {
contentParts.push(`**Next Steps:** ${session.next_steps}`);
contentParts.push('');
}
if (session.notes) {
contentParts.push(`**Notes:** ${session.notes}`);
contentParts.push('');
}
// Add metadata
const metadata: string[] = [];
if (session.files_read || session.files_edited) {
const files: string[] = [];
if (session.files_read) {
try {
files.push(...JSON.parse(session.files_read));
} catch {}
}
if (session.files_edited) {
try {
files.push(...JSON.parse(session.files_edited));
} catch {}
}
if (files.length > 0) {
metadata.push(`Files: ${[...new Set(files)].join(', ')}`);
}
}
const date = new Date(session.created_at_epoch).toLocaleDateString();
metadata.push(`Date: ${date}`);
if (metadata.length > 0) {
contentParts.push('---');
contentParts.push(metadata.join(' | '));
}
return contentParts.join('\n');
}
/**
* Format user prompt as text content with metadata
*/
formatUserPromptResult(prompt: UserPromptSearchResult): string {
const contentParts: string[] = [];
contentParts.push(`## User Prompt #${prompt.prompt_number}`);
contentParts.push(`*Source: claude-mem://user-prompt/${prompt.id}*`);
contentParts.push('');
contentParts.push(prompt.prompt_text);
contentParts.push('');
contentParts.push('---');
const date = new Date(prompt.created_at_epoch).toLocaleString();
contentParts.push(`Date: ${date}`);
return contentParts.join('\n');
}
}
+155
View File
@@ -0,0 +1,155 @@
# Worker Service Architecture
## Overview
The Worker Service is an Express HTTP server that handles all claude-mem operations. It runs on port 37777 (configurable via `CLAUDE_MEM_WORKER_PORT`) and is managed by PM2.
## Request Flow
```
Hook (plugin/scripts/*-hook.js)
→ HTTP Request to Worker (localhost:37777)
→ Route Handler (http/routes/*.ts)
→ MCP Server Tool (for search) OR Domain Service (for session/data)
→ Database (SQLite3 + Chroma vector DB)
```
## Directory Structure
```
src/services/worker/
├── README.md # This file
├── WorkerService.ts # Slim orchestrator (~150 lines)
├── http/ # HTTP layer
│ ├── middleware.ts # Shared middleware (logging, CORS, etc.)
│ └── routes/ # Route handlers organized by domain
│ ├── SessionRoutes.ts # Session lifecycle (init, observations, summarize, complete)
│ ├── DataRoutes.ts # Data retrieval (get observations, summaries, prompts, stats)
│ ├── SearchRoutes.ts # Search/MCP proxy (all search endpoints)
│ ├── SettingsRoutes.ts # Settings, MCP toggle, branch switching
│ └── ViewerRoutes.ts # Health check, viewer UI, SSE stream
└── domain/ # Business logic (existing services, NO CHANGES in Phase 1)
├── DatabaseManager.ts # SQLite connection management
├── SessionManager.ts # Session state tracking
├── SDKAgent.ts # Claude Agent SDK for observations/summaries
├── SSEBroadcaster.ts # Server-Sent Events for real-time updates
├── PaginationHelper.ts # Query pagination utilities
├── SettingsManager.ts # User settings CRUD
└── BranchManager.ts # Git branch operations
```
## Route Organization
### ViewerRoutes.ts
- `GET /health` - Health check endpoint
- `GET /` - Serve viewer UI (React app)
- `GET /stream` - SSE stream for real-time updates
### SessionRoutes.ts
Session lifecycle operations (use domain services directly):
- `POST /sessions/init` - Initialize new session
- `POST /sessions/:sessionId/observations` - Add tool usage observations
- `POST /sessions/:sessionId/summarize` - Trigger session summary
- `GET /sessions/:sessionId/status` - Get session status
- `DELETE /sessions/:sessionId` - Delete session
- `POST /sessions/:sessionId/complete` - Mark session complete
- `POST /sessions/claude-id/:claudeId/observations` - Add observations by claude_id
- `POST /sessions/claude-id/:claudeId/summarize` - Summarize by claude_id
- `POST /sessions/claude-id/:claudeId/complete` - Complete by claude_id
### DataRoutes.ts
Data retrieval operations (use domain services directly):
- `GET /observations` - List observations (paginated)
- `GET /summaries` - List session summaries (paginated)
- `GET /prompts` - List user prompts (paginated)
- `GET /observations/:id` - Get observation by ID
- `GET /sessions/:sessionId` - Get session by ID
- `GET /prompts/:id` - Get prompt by ID
- `GET /stats` - Get database statistics
- `GET /projects` - List all projects
- `GET /processing` - Get processing status
- `POST /processing` - Set processing status
### SearchRoutes.ts
All search operations (proxy to MCP server):
- `GET /search` - Unified search (observations + sessions + prompts)
- `GET /timeline` - Unified timeline context
- `GET /decisions` - Decision-type observations
- `GET /changes` - Change-related observations
- `GET /how-it-works` - How-it-works explanations
- `GET /search/observations` - Search observations
- `GET /search/sessions` - Search sessions
- `GET /search/prompts` - Search prompts
- `GET /search/by-concept` - Find by concept tag
- `GET /search/by-file` - Find by file path
- `GET /search/by-type` - Find by observation type
- `GET /search/recent-context` - Get recent context
- `GET /search/context-timeline` - Get context timeline
- `GET /context/preview` - Preview context
- `GET /context/inject` - Inject context
- `GET /search/timeline-by-query` - Timeline by search query
- `GET /search/help` - Search help
### SettingsRoutes.ts
Settings and configuration (use domain services directly):
- `GET /settings` - Get user settings
- `POST /settings` - Update user settings
- `GET /mcp/status` - Get MCP server status
- `POST /mcp/toggle` - Toggle MCP server on/off
- `GET /branch/status` - Get git branch info
- `POST /branch/switch` - Switch git branch
- `POST /branch/update` - Pull branch updates
## Current State (Phase 1)
**Phase 1** is a pure code reorganization with ZERO functional changes:
- Extract route handlers from WorkerService.ts monolith
- Organize into logical route classes
- Keep all existing behavior identical
**MCP vs Direct DB Split** (inherited, not changed in Phase 1):
- Search operations → MCP server (claude-mem-search)
- Session/data operations → Direct DB access via domain services
## Future Phase 2
Phase 2 will unify the architecture:
1. Expand MCP server to handle ALL operations (not just search)
2. Convert all route handlers to proxy through MCP
3. Move database logic from domain services into MCP tools
4. Result: Worker becomes pure HTTP → MCP proxy for maximum portability
This separation allows the worker to be deployed anywhere (as a CLI tool, cloud service, etc.) without carrying database dependencies.
## Adding New Endpoints
1. Choose the appropriate route file based on the endpoint's purpose
2. Add the route handler method to the class
3. Register the route in the `setupRoutes()` method
4. Import any needed domain services in the constructor
5. Follow the existing patterns for error handling and logging
Example:
```typescript
// In DataRoutes.ts
private async handleGetFoo(req: Request, res: Response): Promise<void> {
try {
const result = await this.dbManager.getFoo();
res.json(result);
} catch (error) {
logger.failure('WORKER', 'Get foo failed', {}, error as Error);
res.status(500).json({ error: (error as Error).message });
}
}
// Register in setupRoutes()
app.get('/foo', this.handleGetFoo.bind(this));
```
## Key Design Principles
1. **Progressive Disclosure**: Navigate from high-level (WorkerService.ts) to specific routes to implementation details
2. **Single Responsibility**: Each route class handles one domain area
3. **Dependency Injection**: Route classes receive only the services they need
4. **Consistent Error Handling**: All handlers use try/catch with logger.failure()
5. **Bound Methods**: All route handlers use `.bind(this)` to preserve context
+4 -13
View File
@@ -11,13 +11,13 @@
import { execSync } from 'child_process';
import { homedir } from 'os';
import path from 'path';
import { existsSync, readFileSync } from 'fs';
import { DatabaseManager } from './DatabaseManager.js';
import { SessionManager } from './SessionManager.js';
import { logger } from '../../utils/logger.js';
import { silentDebug } from '../../utils/silent-debug.js';
import { parseObservations, parseSummary } from '../../sdk/parser.js';
import { buildInitPrompt, buildObservationPrompt, buildSummaryPrompt, buildContinuationPrompt } from '../../sdk/prompts.js';
import { SettingsDefaultsManager } from './settings/SettingsDefaultsManager.js';
import type { ActiveSession, SDKUserMessage, PendingMessage } from '../worker-types.js';
// Import Agent SDK (assumes it's installed)
@@ -425,17 +425,8 @@ export class SDKAgent {
* Get model ID from settings or environment
*/
private getModelId(): string {
try {
const settingsPath = path.join(homedir(), '.claude-mem', 'settings.json');
if (existsSync(settingsPath)) {
const settings = JSON.parse(readFileSync(settingsPath, 'utf-8'));
const modelId = settings.env?.CLAUDE_MEM_MODEL;
if (modelId) return modelId;
}
} catch {
// Fall through to env var or default
}
return process.env.CLAUDE_MEM_MODEL || 'claude-haiku-4-5';
const settingsPath = path.join(homedir(), '.claude-mem', 'settings.json');
const settings = SettingsDefaultsManager.loadFromFile(settingsPath);
return settings.CLAUDE_MEM_MODEL;
}
}
File diff suppressed because it is too large Load Diff
+1 -1
View File
@@ -37,7 +37,7 @@ export class SettingsManager {
for (const row of rows) {
const key = row.key as keyof ViewerSettings;
if (key in settings) {
(settings as any)[key] = JSON.parse(row.value);
settings[key] = JSON.parse(row.value) as ViewerSettings[typeof key];
}
}
+270
View File
@@ -0,0 +1,270 @@
/**
* TimelineService - Handles timeline building, filtering, and formatting
* Extracted from mcp-server.ts to follow worker service organization pattern
*/
import { ObservationSearchResult, SessionSummarySearchResult, UserPromptSearchResult } from '../sqlite/types.js';
/**
* Timeline item for unified chronological display
*/
export interface TimelineItem {
type: 'observation' | 'session' | 'prompt';
data: ObservationSearchResult | SessionSummarySearchResult | UserPromptSearchResult;
epoch: number;
}
export interface TimelineData {
observations: ObservationSearchResult[];
sessions: SessionSummarySearchResult[];
prompts: UserPromptSearchResult[];
}
export class TimelineService {
/**
* Build timeline items from observations, sessions, and prompts
*/
buildTimeline(data: TimelineData): TimelineItem[] {
const items: TimelineItem[] = [
...data.observations.map(obs => ({ type: 'observation' as const, data: obs, epoch: obs.created_at_epoch })),
...data.sessions.map(sess => ({ type: 'session' as const, data: sess, epoch: sess.created_at_epoch })),
...data.prompts.map(prompt => ({ type: 'prompt' as const, data: prompt, epoch: prompt.created_at_epoch }))
];
items.sort((a, b) => a.epoch - b.epoch);
return items;
}
/**
* Filter timeline items to respect depth_before/depth_after window around anchor
*/
filterByDepth(
items: TimelineItem[],
anchorId: number | string,
anchorEpoch: number,
depth_before: number,
depth_after: number
): TimelineItem[] {
if (items.length === 0) return items;
let anchorIndex = -1;
if (typeof anchorId === 'number') {
anchorIndex = items.findIndex(item => item.type === 'observation' && (item.data as ObservationSearchResult).id === anchorId);
} else if (typeof anchorId === 'string' && anchorId.startsWith('S')) {
const sessionNum = parseInt(anchorId.slice(1), 10);
anchorIndex = items.findIndex(item => item.type === 'session' && (item.data as SessionSummarySearchResult).id === sessionNum);
} else {
// Timestamp anchor - find closest item
anchorIndex = items.findIndex(item => item.epoch >= anchorEpoch);
if (anchorIndex === -1) anchorIndex = items.length - 1;
}
if (anchorIndex === -1) return items;
const startIndex = Math.max(0, anchorIndex - depth_before);
const endIndex = Math.min(items.length, anchorIndex + depth_after + 1);
return items.slice(startIndex, endIndex);
}
/**
* Format timeline items as markdown with grouped days and tables
*/
formatTimeline(
items: TimelineItem[],
anchorId: number | string | null,
query?: string,
depth_before?: number,
depth_after?: number
): string {
if (items.length === 0) {
return query
? `Found observation matching "${query}", but no timeline context available.`
: 'No timeline items found';
}
const lines: string[] = [];
// Header
if (query && anchorId) {
const anchorObs = items.find(item => item.type === 'observation' && (item.data as ObservationSearchResult).id === anchorId);
const anchorTitle = anchorObs ? ((anchorObs.data as ObservationSearchResult).title || 'Untitled') : 'Unknown';
lines.push(`# Timeline for query: "${query}"`);
lines.push(`**Anchor:** Observation #${anchorId} - ${anchorTitle}`);
} else if (anchorId) {
lines.push(`# Timeline around anchor: ${anchorId}`);
} else {
lines.push(`# Timeline`);
}
if (depth_before !== undefined && depth_after !== undefined) {
lines.push(`**Window:** ${depth_before} records before → ${depth_after} records after | **Items:** ${items.length}`);
} else {
lines.push(`**Items:** ${items.length}`);
}
lines.push('');
// Legend
lines.push(`**Legend:** 🎯 session-request | 🔴 bugfix | 🟣 feature | 🔄 refactor | ✅ change | 🔵 discovery | 🧠 decision`);
lines.push('');
// Group by day
const dayMap = new Map<string, TimelineItem[]>();
for (const item of items) {
const day = this.formatDate(item.epoch);
if (!dayMap.has(day)) {
dayMap.set(day, []);
}
dayMap.get(day)!.push(item);
}
// Sort days chronologically
const sortedDays = Array.from(dayMap.entries()).sort((a, b) => {
const aDate = new Date(a[0]).getTime();
const bDate = new Date(b[0]).getTime();
return aDate - bDate;
});
// Render each day
for (const [day, dayItems] of sortedDays) {
lines.push(`### ${day}`);
lines.push('');
let currentFile: string | null = null;
let lastTime = '';
let tableOpen = false;
for (const item of dayItems) {
const isAnchor = (
(typeof anchorId === 'number' && item.type === 'observation' && (item.data as ObservationSearchResult).id === anchorId) ||
(typeof anchorId === 'string' && anchorId.startsWith('S') && item.type === 'session' && `S${(item.data as SessionSummarySearchResult).id}` === anchorId)
);
if (item.type === 'session') {
if (tableOpen) {
lines.push('');
tableOpen = false;
currentFile = null;
lastTime = '';
}
const sess = item.data as SessionSummarySearchResult;
const title = sess.request || 'Session summary';
const link = `claude-mem://session-summary/${sess.id}`;
const marker = isAnchor ? ' ← **ANCHOR**' : '';
lines.push(`**🎯 #S${sess.id}** ${title} (${this.formatDateTime(item.epoch)}) [→](${link})${marker}`);
lines.push('');
} else if (item.type === 'prompt') {
if (tableOpen) {
lines.push('');
tableOpen = false;
currentFile = null;
lastTime = '';
}
const prompt = item.data as UserPromptSearchResult;
const truncated = prompt.prompt_text.length > 100 ? prompt.prompt_text.substring(0, 100) + '...' : prompt.prompt_text;
lines.push(`**💬 User Prompt #${prompt.prompt_number}** (${this.formatDateTime(item.epoch)})`);
lines.push(`> ${truncated}`);
lines.push('');
} else if (item.type === 'observation') {
const obs = item.data as ObservationSearchResult;
const file = 'General';
if (file !== currentFile) {
if (tableOpen) {
lines.push('');
}
lines.push(`**${file}**`);
lines.push(`| ID | Time | T | Title | Tokens |`);
lines.push(`|----|------|---|-------|--------|`);
currentFile = file;
tableOpen = true;
lastTime = '';
}
const icon = this.getTypeIcon(obs.type);
const time = this.formatTime(item.epoch);
const title = obs.title || 'Untitled';
const tokens = this.estimateTokens(obs.narrative);
const showTime = time !== lastTime;
const timeDisplay = showTime ? time : '″';
lastTime = time;
const anchorMarker = isAnchor ? ' ← **ANCHOR**' : '';
lines.push(`| #${obs.id} | ${timeDisplay} | ${icon} | ${title}${anchorMarker} | ~${tokens} |`);
}
}
if (tableOpen) {
lines.push('');
}
}
return lines.join('\n');
}
/**
* Get icon for observation type
*/
private getTypeIcon(type: string): string {
switch (type) {
case 'bugfix': return '🔴';
case 'feature': return '🟣';
case 'refactor': return '🔄';
case 'change': return '✅';
case 'discovery': return '🔵';
case 'decision': return '🧠';
default: return '•';
}
}
/**
* Format date for grouping (e.g., "Dec 7, 2025")
*/
private formatDate(epochMs: number): string {
const date = new Date(epochMs);
return date.toLocaleString('en-US', {
month: 'short',
day: 'numeric',
year: 'numeric'
});
}
/**
* Format time (e.g., "6:30 PM")
*/
private formatTime(epochMs: number): string {
const date = new Date(epochMs);
return date.toLocaleString('en-US', {
hour: 'numeric',
minute: '2-digit',
hour12: true
});
}
/**
* Format date and time (e.g., "Dec 7, 6:30 PM")
*/
private formatDateTime(epochMs: number): string {
const date = new Date(epochMs);
return date.toLocaleString('en-US', {
month: 'short',
day: 'numeric',
hour: 'numeric',
minute: '2-digit',
hour12: true
});
}
/**
* Estimate tokens from text length (~4 chars per token)
*/
private estimateTokens(text: string | null): number {
if (!text) return 0;
return Math.ceil(text.length / 4);
}
}
@@ -0,0 +1,96 @@
/**
* Session Event Broadcaster
*
* Provides semantic broadcast methods for session lifecycle events.
* Consolidates SSE broadcasting and processing status updates.
*/
import { SSEBroadcaster } from '../SSEBroadcaster.js';
import type { WorkerService } from '../../worker-service.js';
export class SessionEventBroadcaster {
constructor(
private sseBroadcaster: SSEBroadcaster,
private workerService: WorkerService
) {}
/**
* Broadcast new user prompt arrival
* Starts activity indicator to show work is beginning
*/
broadcastNewPrompt(prompt: {
id: number;
claude_session_id: string;
project: string;
prompt_number: number;
prompt_text: string;
created_at_epoch: number;
}): void {
// Broadcast prompt details
this.sseBroadcaster.broadcast({
type: 'new_prompt',
prompt
});
// Start activity indicator (work is about to begin)
this.sseBroadcaster.broadcast({
type: 'processing_status',
isProcessing: true
});
// Update processing status based on queue depth
this.workerService.broadcastProcessingStatus();
}
/**
* Broadcast session initialization
*/
broadcastSessionStarted(sessionDbId: number, project: string): void {
this.sseBroadcaster.broadcast({
type: 'session_started',
sessionDbId,
project
});
// Update processing status
this.workerService.broadcastProcessingStatus();
}
/**
* Broadcast observation queued
* Updates processing status to reflect new queue depth
*/
broadcastObservationQueued(sessionDbId: number): void {
this.sseBroadcaster.broadcast({
type: 'observation_queued',
sessionDbId
});
// Update processing status (queue depth changed)
this.workerService.broadcastProcessingStatus();
}
/**
* Broadcast session completion
* Updates processing status to reflect session removal
*/
broadcastSessionCompleted(sessionDbId: number): void {
this.sseBroadcaster.broadcast({
type: 'session_completed',
timestamp: Date.now(),
sessionDbId
});
// Update processing status (session removed from queue)
this.workerService.broadcastProcessingStatus();
}
/**
* Broadcast summarize request queued
* Updates processing status to reflect new queue depth
*/
broadcastSummarizeQueued(): void {
// Update processing status (queue depth changed)
this.workerService.broadcastProcessingStatus();
}
}
@@ -0,0 +1,82 @@
/**
* BaseRouteHandler
*
* Base class for all route handlers providing:
* - Automatic try-catch wrapping with error logging
* - Integer parameter validation
* - Required body parameter validation
* - Standard HTTP response helpers
* - Centralized error handling
*/
import { Request, Response } from 'express';
import { logger } from '../../../utils/logger.js';
export abstract class BaseRouteHandler {
/**
* Wrap handler with automatic try-catch and error logging
*/
protected wrapHandler(
handler: (req: Request, res: Response) => void | Promise<void>
): (req: Request, res: Response) => void {
return (req: Request, res: Response): void => {
try {
const result = handler(req, res);
if (result instanceof Promise) {
result.catch(error => this.handleError(res, error as Error));
}
} catch (error) {
this.handleError(res, error as Error);
}
};
}
/**
* Parse and validate integer parameter
* Returns the integer value or sends 400 error response
*/
protected parseIntParam(req: Request, res: Response, paramName: string): number | null {
const value = parseInt(req.params[paramName], 10);
if (isNaN(value)) {
this.badRequest(res, `Invalid ${paramName}`);
return null;
}
return value;
}
/**
* Validate required body parameters
* Returns true if all required params present, sends 400 error otherwise
*/
protected validateRequired(req: Request, res: Response, params: string[]): boolean {
for (const param of params) {
if (req.body[param] === undefined || req.body[param] === null) {
this.badRequest(res, `Missing ${param}`);
return false;
}
}
return true;
}
/**
* Send 400 Bad Request response
*/
protected badRequest(res: Response, message: string): void {
res.status(400).json({ error: message });
}
/**
* Send 404 Not Found response
*/
protected notFound(res: Response, message: string): void {
res.status(404).json({ error: message });
}
/**
* Centralized error logging and response
*/
protected handleError(res: Response, error: Error, context?: string): void {
logger.failure('WORKER', context || 'Request failed', {}, error);
res.status(500).json({ error: error.message });
}
}
+89
View File
@@ -0,0 +1,89 @@
/**
* HTTP Middleware for Worker Service
*
* Extracted from WorkerService.ts for better organization.
* Handles request/response logging, CORS, JSON parsing, and static file serving.
*/
import express, { Request, Response, NextFunction, RequestHandler } from 'express';
import cors from 'cors';
import path from 'path';
import { getPackageRoot } from '../../../shared/paths.js';
import { logger } from '../../../utils/logger.js';
/**
* Create all middleware for the worker service
* @param summarizeRequestBody - Function to summarize request bodies for logging
* @returns Array of middleware functions
*/
export function createMiddleware(
summarizeRequestBody: (method: string, path: string, body: any) => string
): RequestHandler[] {
const middlewares: RequestHandler[] = [];
// JSON parsing with 50mb limit
middlewares.push(express.json({ limit: '50mb' }));
// CORS
middlewares.push(cors());
// HTTP request/response logging
middlewares.push((req: Request, res: Response, next: NextFunction) => {
// Skip logging for static assets and health checks
if (req.path.startsWith('/health') || req.path === '/' || req.path.includes('.')) {
return next();
}
const start = Date.now();
const requestId = `${req.method}-${Date.now()}`;
// Log incoming request with body summary
const bodySummary = summarizeRequestBody(req.method, req.path, req.body);
logger.info('HTTP', `${req.method} ${req.path}`, { requestId }, bodySummary);
// Capture response
const originalSend = res.send.bind(res);
res.send = function(body: any) {
const duration = Date.now() - start;
logger.info('HTTP', `${res.statusCode} ${req.path}`, { requestId, duration: `${duration}ms` });
return originalSend(body);
};
next();
});
// Serve static files for web UI (viewer-bundle.js, logos, fonts, etc.)
const packageRoot = getPackageRoot();
const uiDir = path.join(packageRoot, 'plugin', 'ui');
middlewares.push(express.static(uiDir));
return middlewares;
}
/**
* Summarize request body for logging
* Used to avoid logging sensitive data or large payloads
*/
export function summarizeRequestBody(method: string, path: string, body: any): string {
if (!body || Object.keys(body).length === 0) return '';
// Session init
if (path.includes('/init')) {
return '';
}
// Observations
if (path.includes('/observations')) {
const toolName = body.tool_name || '?';
const toolInput = body.tool_input;
const toolSummary = logger.formatTool(toolName, toolInput);
return `tool=${toolSummary}`;
}
// Summarize request
if (path.includes('/summarize')) {
return 'requesting summary';
}
return '';
}
@@ -0,0 +1,239 @@
/**
* Data Routes
*
* Handles data retrieval operations: observations, summaries, prompts, stats, processing status.
* All endpoints use direct database access via domain services.
*/
import express, { Request, Response } from 'express';
import path from 'path';
import { readFileSync, statSync, existsSync } from 'fs';
import { homedir } from 'os';
import { getPackageRoot } from '../../../../shared/paths.js';
import { getWorkerPort } from '../../../../shared/worker-utils.js';
import { PaginationHelper } from '../../PaginationHelper.js';
import { DatabaseManager } from '../../DatabaseManager.js';
import { SessionManager } from '../../SessionManager.js';
import { SSEBroadcaster } from '../../SSEBroadcaster.js';
import type { WorkerService } from '../../../worker-service.js';
import { BaseRouteHandler } from '../BaseRouteHandler.js';
export class DataRoutes extends BaseRouteHandler {
constructor(
private paginationHelper: PaginationHelper,
private dbManager: DatabaseManager,
private sessionManager: SessionManager,
private sseBroadcaster: SSEBroadcaster,
private workerService: WorkerService,
private startTime: number
) {
super();
}
setupRoutes(app: express.Application): void {
// Pagination endpoints
app.get('/api/observations', this.handleGetObservations.bind(this));
app.get('/api/summaries', this.handleGetSummaries.bind(this));
app.get('/api/prompts', this.handleGetPrompts.bind(this));
// Fetch by ID endpoints
app.get('/api/observation/:id', this.handleGetObservationById.bind(this));
app.get('/api/session/:id', this.handleGetSessionById.bind(this));
app.get('/api/prompt/:id', this.handleGetPromptById.bind(this));
// Metadata endpoints
app.get('/api/stats', this.handleGetStats.bind(this));
app.get('/api/projects', this.handleGetProjects.bind(this));
// Processing status endpoints
app.get('/api/processing-status', this.handleGetProcessingStatus.bind(this));
app.post('/api/processing', this.handleSetProcessing.bind(this));
}
/**
* Get paginated observations
*/
private handleGetObservations = this.wrapHandler((req: Request, res: Response): void => {
const { offset, limit, project } = this.parsePaginationParams(req);
const result = this.paginationHelper.getObservations(offset, limit, project);
res.json(result);
});
/**
* Get paginated summaries
*/
private handleGetSummaries = this.wrapHandler((req: Request, res: Response): void => {
const { offset, limit, project } = this.parsePaginationParams(req);
const result = this.paginationHelper.getSummaries(offset, limit, project);
res.json(result);
});
/**
* Get paginated user prompts
*/
private handleGetPrompts = this.wrapHandler((req: Request, res: Response): void => {
const { offset, limit, project } = this.parsePaginationParams(req);
const result = this.paginationHelper.getPrompts(offset, limit, project);
res.json(result);
});
/**
* Get observation by ID
* GET /api/observation/:id
*/
private handleGetObservationById = this.wrapHandler((req: Request, res: Response): void => {
const id = this.parseIntParam(req, res, 'id');
if (id === null) return;
const store = this.dbManager.getSessionStore();
const observation = store.getObservationById(id);
if (!observation) {
this.notFound(res, `Observation #${id} not found`);
return;
}
res.json(observation);
});
/**
* Get session by ID
* GET /api/session/:id
*/
private handleGetSessionById = this.wrapHandler((req: Request, res: Response): void => {
const id = this.parseIntParam(req, res, 'id');
if (id === null) return;
const store = this.dbManager.getSessionStore();
const sessions = store.getSessionSummariesByIds([id]);
if (sessions.length === 0) {
this.notFound(res, `Session #${id} not found`);
return;
}
res.json(sessions[0]);
});
/**
* Get user prompt by ID
* GET /api/prompt/:id
*/
private handleGetPromptById = this.wrapHandler((req: Request, res: Response): void => {
const id = this.parseIntParam(req, res, 'id');
if (id === null) return;
const store = this.dbManager.getSessionStore();
const prompts = store.getUserPromptsByIds([id]);
if (prompts.length === 0) {
this.notFound(res, `Prompt #${id} not found`);
return;
}
res.json(prompts[0]);
});
/**
* Get database statistics (with worker metadata)
*/
private handleGetStats = this.wrapHandler((req: Request, res: Response): void => {
const db = this.dbManager.getSessionStore().db;
// Read version from package.json
const packageRoot = getPackageRoot();
const packageJsonPath = path.join(packageRoot, 'package.json');
const packageJson = JSON.parse(readFileSync(packageJsonPath, 'utf-8'));
const version = packageJson.version;
// Get database stats
const totalObservations = db.prepare('SELECT COUNT(*) as count FROM observations').get() as { count: number };
const totalSessions = db.prepare('SELECT COUNT(*) as count FROM sdk_sessions').get() as { count: number };
const totalSummaries = db.prepare('SELECT COUNT(*) as count FROM session_summaries').get() as { count: number };
// Get database file size and path
const dbPath = path.join(homedir(), '.claude-mem', 'claude-mem.db');
let dbSize = 0;
if (existsSync(dbPath)) {
dbSize = statSync(dbPath).size;
}
// Worker metadata
const uptime = Math.floor((Date.now() - this.startTime) / 1000);
const activeSessions = this.sessionManager.getActiveSessionCount();
const sseClients = this.sseBroadcaster.getClientCount();
res.json({
worker: {
version,
uptime,
activeSessions,
sseClients,
port: getWorkerPort()
},
database: {
path: dbPath,
size: dbSize,
observations: totalObservations.count,
sessions: totalSessions.count,
summaries: totalSummaries.count
}
});
});
/**
* Get list of distinct projects from observations
* GET /api/projects
*/
private handleGetProjects = this.wrapHandler((req: Request, res: Response): void => {
const db = this.dbManager.getSessionStore().db;
const rows = db.prepare(`
SELECT DISTINCT project
FROM observations
WHERE project IS NOT NULL
GROUP BY project
ORDER BY MAX(created_at_epoch) DESC
`).all() as Array<{ project: string }>;
const projects = rows.map(row => row.project);
res.json({ projects });
});
/**
* Get current processing status
* GET /api/processing-status
*/
private handleGetProcessingStatus = this.wrapHandler((req: Request, res: Response): void => {
const isProcessing = this.sessionManager.isAnySessionProcessing();
const queueDepth = this.sessionManager.getTotalActiveWork(); // Includes queued + actively processing
res.json({ isProcessing, queueDepth });
});
/**
* Set processing status (called by hooks)
* NOTE: This now broadcasts computed status based on active processing (ignores input)
*/
private handleSetProcessing = this.wrapHandler((req: Request, res: Response): void => {
// Broadcast current computed status (ignores manual input)
this.workerService.broadcastProcessingStatus();
const isProcessing = this.sessionManager.isAnySessionProcessing();
const queueDepth = this.sessionManager.getTotalQueueDepth();
const activeSessions = this.sessionManager.getActiveSessionCount();
res.json({ status: 'ok', isProcessing });
});
/**
* Parse pagination parameters from request query
*/
private parsePaginationParams(req: Request): { offset: number; limit: number; project?: string } {
const offset = parseInt(req.query.offset as string, 10) || 0;
const limit = Math.min(parseInt(req.query.limit as string, 10) || 20, 100); // Max 100
const project = req.query.project as string | undefined;
return { offset, limit, project };
}
}
@@ -0,0 +1,360 @@
/**
* Search Routes
*
* Handles all search operations via SearchManager.
* All endpoints call SearchManager methods directly.
*/
import express, { Request, Response } from 'express';
import { SearchManager } from '../../SearchManager.js';
import { BaseRouteHandler } from '../BaseRouteHandler.js';
export class SearchRoutes extends BaseRouteHandler {
constructor(
private searchManager: SearchManager
) {
super();
}
setupRoutes(app: express.Application): void {
// Unified endpoints (new consolidated API)
app.get('/api/search', this.handleUnifiedSearch.bind(this));
app.get('/api/timeline', this.handleUnifiedTimeline.bind(this));
app.get('/api/decisions', this.handleDecisions.bind(this));
app.get('/api/changes', this.handleChanges.bind(this));
app.get('/api/how-it-works', this.handleHowItWorks.bind(this));
// Backward compatibility endpoints
app.get('/api/search/observations', this.handleSearchObservations.bind(this));
app.get('/api/search/sessions', this.handleSearchSessions.bind(this));
app.get('/api/search/prompts', this.handleSearchPrompts.bind(this));
app.get('/api/search/by-concept', this.handleSearchByConcept.bind(this));
app.get('/api/search/by-file', this.handleSearchByFile.bind(this));
app.get('/api/search/by-type', this.handleSearchByType.bind(this));
// Context endpoints
app.get('/api/context/recent', this.handleGetRecentContext.bind(this));
app.get('/api/context/timeline', this.handleGetContextTimeline.bind(this));
app.get('/api/context/preview', this.handleContextPreview.bind(this));
app.get('/api/context/inject', this.handleContextInject.bind(this));
// Timeline and help endpoints
app.get('/api/timeline/by-query', this.handleGetTimelineByQuery.bind(this));
app.get('/api/search/help', this.handleSearchHelp.bind(this));
}
/**
* Unified search (observations + sessions + prompts)
* GET /api/search?query=...&type=observations&format=index&limit=20
*/
private handleUnifiedSearch = this.wrapHandler(async (req: Request, res: Response): Promise<void> => {
const result = await this.searchManager.search(req.query);
res.json(result.content);
});
/**
* Unified timeline (anchor or query-based)
* GET /api/timeline?anchor=123 OR GET /api/timeline?query=...
*/
private handleUnifiedTimeline = this.wrapHandler(async (req: Request, res: Response): Promise<void> => {
const result = await this.searchManager.timeline(req.query);
res.json(result.content);
});
/**
* Semantic shortcut for finding decision observations
* GET /api/decisions?format=index&limit=20
*/
private handleDecisions = this.wrapHandler(async (req: Request, res: Response): Promise<void> => {
const result = await this.searchManager.decisions(req.query);
res.json(result.content);
});
/**
* Semantic shortcut for finding change-related observations
* GET /api/changes?format=index&limit=20
*/
private handleChanges = this.wrapHandler(async (req: Request, res: Response): Promise<void> => {
const result = await this.searchManager.changes(req.query);
res.json(result.content);
});
/**
* Semantic shortcut for finding "how it works" explanations
* GET /api/how-it-works?format=index&limit=20
*/
private handleHowItWorks = this.wrapHandler(async (req: Request, res: Response): Promise<void> => {
const result = await this.searchManager.howItWorks(req.query);
res.json(result.content);
});
/**
* Search observations (use /api/search?type=observations instead)
* GET /api/search/observations?query=...&format=index&limit=20&project=...
*/
private handleSearchObservations = this.wrapHandler(async (req: Request, res: Response): Promise<void> => {
const result = await this.searchManager.searchObservations(req.query);
res.json(result.content);
});
/**
* Search session summaries
* GET /api/search/sessions?query=...&format=index&limit=20
*/
private handleSearchSessions = this.wrapHandler(async (req: Request, res: Response): Promise<void> => {
const result = await this.searchManager.searchSessions(req.query);
res.json(result.content);
});
/**
* Search user prompts
* GET /api/search/prompts?query=...&format=index&limit=20
*/
private handleSearchPrompts = this.wrapHandler(async (req: Request, res: Response): Promise<void> => {
const result = await this.searchManager.searchUserPrompts(req.query);
res.json(result.content);
});
/**
* Search observations by concept
* GET /api/search/by-concept?concept=discovery&format=index&limit=5
*/
private handleSearchByConcept = this.wrapHandler(async (req: Request, res: Response): Promise<void> => {
const result = await this.searchManager.findByConcept(req.query);
res.json(result.content);
});
/**
* Search by file path
* GET /api/search/by-file?filePath=...&format=index&limit=10
*/
private handleSearchByFile = this.wrapHandler(async (req: Request, res: Response): Promise<void> => {
const result = await this.searchManager.findByFile(req.query);
res.json(result.content);
});
/**
* Search observations by type
* GET /api/search/by-type?type=bugfix&format=index&limit=10
*/
private handleSearchByType = this.wrapHandler(async (req: Request, res: Response): Promise<void> => {
const result = await this.searchManager.findByType(req.query);
res.json(result.content);
});
/**
* Get recent context (summaries and observations for a project)
* GET /api/context/recent?project=...&limit=3
*/
private handleGetRecentContext = this.wrapHandler(async (req: Request, res: Response): Promise<void> => {
const result = await this.searchManager.getRecentContext(req.query);
res.json(result.content);
});
/**
* Get context timeline around an anchor point
* GET /api/context/timeline?anchor=123&depth_before=10&depth_after=10&project=...
*/
private handleGetContextTimeline = this.wrapHandler(async (req: Request, res: Response): Promise<void> => {
const result = await this.searchManager.getContextTimeline(req.query);
res.json(result.content);
});
/**
* Generate context preview for settings modal
* GET /api/context/preview?project=...
*/
private handleContextPreview = this.wrapHandler(async (req: Request, res: Response): Promise<void> => {
const projectName = req.query.project as string;
if (!projectName) {
this.badRequest(res, 'Project parameter is required');
return;
}
// Import context generator (runs in worker, has access to database)
const { generateContext } = await import('../../../context-generator.js');
// Use project name as CWD (generateContext uses path.basename to get project)
const cwd = `/preview/${projectName}`;
// Generate context with colors for terminal display
const contextText = await generateContext(
{
session_id: 'preview-' + Date.now(),
cwd: cwd
},
true // useColors=true for ANSI terminal output
);
// Return as plain text
res.setHeader('Content-Type', 'text/plain; charset=utf-8');
res.send(contextText);
});
/**
* Context injection endpoint for hooks
* GET /api/context/inject?project=...&colors=true
*
* Returns pre-formatted context string ready for display.
* Use colors=true for ANSI-colored terminal output.
*/
private handleContextInject = this.wrapHandler(async (req: Request, res: Response): Promise<void> => {
const projectName = req.query.project as string;
const useColors = req.query.colors === 'true';
if (!projectName) {
this.badRequest(res, 'Project parameter is required');
return;
}
// Import context generator (runs in worker, has access to database)
const { generateContext } = await import('../../../context-generator.js');
// Use project name as CWD (generateContext uses path.basename to get project)
const cwd = `/context/${projectName}`;
// Generate context
const contextText = await generateContext(
{
session_id: 'context-inject-' + Date.now(),
cwd: cwd
},
useColors
);
// Return as plain text
res.setHeader('Content-Type', 'text/plain; charset=utf-8');
res.send(contextText);
});
/**
* Get timeline by query (search first, then get timeline around best match)
* GET /api/timeline/by-query?query=...&mode=auto&depth_before=10&depth_after=10
*/
private handleGetTimelineByQuery = this.wrapHandler(async (req: Request, res: Response): Promise<void> => {
const result = await this.searchManager.getTimelineByQuery(req.query);
res.json(result.content);
});
/**
* Get search help documentation
* GET /api/search/help
*/
private handleSearchHelp = this.wrapHandler((req: Request, res: Response): void => {
res.json({
title: 'Claude-Mem Search API',
description: 'HTTP API for searching persistent memory',
endpoints: [
{
path: '/api/search/observations',
method: 'GET',
description: 'Search observations using full-text search',
parameters: {
query: 'Search query (required)',
format: 'Response format: "index" or "full" (default: "full")',
limit: 'Number of results (default: 20)',
project: 'Filter by project name (optional)'
}
},
{
path: '/api/search/sessions',
method: 'GET',
description: 'Search session summaries using full-text search',
parameters: {
query: 'Search query (required)',
format: 'Response format: "index" or "full" (default: "full")',
limit: 'Number of results (default: 20)'
}
},
{
path: '/api/search/prompts',
method: 'GET',
description: 'Search user prompts using full-text search',
parameters: {
query: 'Search query (required)',
format: 'Response format: "index" or "full" (default: "full")',
limit: 'Number of results (default: 20)',
project: 'Filter by project name (optional)'
}
},
{
path: '/api/search/by-concept',
method: 'GET',
description: 'Find observations by concept tag',
parameters: {
concept: 'Concept tag (required): discovery, decision, bugfix, feature, refactor',
format: 'Response format: "index" or "full" (default: "full")',
limit: 'Number of results (default: 10)',
project: 'Filter by project name (optional)'
}
},
{
path: '/api/search/by-file',
method: 'GET',
description: 'Find observations and sessions by file path',
parameters: {
filePath: 'File path or partial path (required)',
format: 'Response format: "index" or "full" (default: "full")',
limit: 'Number of results per type (default: 10)',
project: 'Filter by project name (optional)'
}
},
{
path: '/api/search/by-type',
method: 'GET',
description: 'Find observations by type',
parameters: {
type: 'Observation type (required): discovery, decision, bugfix, feature, refactor',
format: 'Response format: "index" or "full" (default: "full")',
limit: 'Number of results (default: 10)',
project: 'Filter by project name (optional)'
}
},
{
path: '/api/context/recent',
method: 'GET',
description: 'Get recent session context including summaries and observations',
parameters: {
project: 'Project name (default: current directory)',
limit: 'Number of recent sessions (default: 3)'
}
},
{
path: '/api/context/timeline',
method: 'GET',
description: 'Get unified timeline around a specific point in time',
parameters: {
anchor: 'Anchor point: observation ID, session ID (e.g., "S123"), or ISO timestamp (required)',
depth_before: 'Number of records before anchor (default: 10)',
depth_after: 'Number of records after anchor (default: 10)',
project: 'Filter by project name (optional)'
}
},
{
path: '/api/timeline/by-query',
method: 'GET',
description: 'Search for best match, then get timeline around it',
parameters: {
query: 'Search query (required)',
mode: 'Search mode: "auto", "observations", or "sessions" (default: "auto")',
depth_before: 'Number of records before match (default: 10)',
depth_after: 'Number of records after match (default: 10)',
project: 'Filter by project name (optional)'
}
},
{
path: '/api/search/help',
method: 'GET',
description: 'Get this help documentation'
}
],
examples: [
'curl "http://localhost:37777/api/search/observations?query=authentication&format=index&limit=5"',
'curl "http://localhost:37777/api/search/by-type?type=bugfix&limit=10"',
'curl "http://localhost:37777/api/context/recent?project=claude-mem&limit=3"',
'curl "http://localhost:37777/api/context/timeline?anchor=123&depth_before=5&depth_after=5"'
]
});
});
}
@@ -0,0 +1,390 @@
/**
* Session Routes
*
* Handles session lifecycle operations: initialization, observations, summarization, completion.
* These routes manage the flow of work through the Claude Agent SDK.
*/
import express, { Request, Response } from 'express';
import { getWorkerPort } from '../../../../shared/worker-utils.js';
import { logger } from '../../../../utils/logger.js';
import { stripMemoryTagsFromJson } from '../../../../utils/tag-stripping.js';
import { SessionManager } from '../../SessionManager.js';
import { DatabaseManager } from '../../DatabaseManager.js';
import { SDKAgent } from '../../SDKAgent.js';
import type { WorkerService } from '../../../worker-service.js';
import { BaseRouteHandler } from '../BaseRouteHandler.js';
import { SessionEventBroadcaster } from '../../events/SessionEventBroadcaster.js';
import { SessionCompletionHandler } from '../../session/SessionCompletionHandler.js';
import { PrivacyCheckValidator } from '../../validation/PrivacyCheckValidator.js';
export class SessionRoutes extends BaseRouteHandler {
private completionHandler: SessionCompletionHandler;
constructor(
private sessionManager: SessionManager,
private dbManager: DatabaseManager,
private sdkAgent: SDKAgent,
private eventBroadcaster: SessionEventBroadcaster,
private workerService: WorkerService
) {
super();
this.completionHandler = new SessionCompletionHandler(
sessionManager,
dbManager,
eventBroadcaster
);
}
/**
* Ensures SDK agent generator is running for a session
* Auto-starts if not already running to process pending queue
*/
private ensureGeneratorRunning(sessionDbId: number, source: string): void {
const session = this.sessionManager.getSession(sessionDbId);
if (session && !session.generatorPromise) {
logger.info('SESSION', `Generator auto-starting (${source})`, {
sessionId: sessionDbId,
queueDepth: session.pendingMessages.length
});
session.generatorPromise = this.sdkAgent.startSession(session, this.workerService)
.catch(err => {
logger.failure('SDK', 'SDK agent error', { sessionId: sessionDbId }, err);
})
.finally(() => {
logger.info('SESSION', `Generator finished`, { sessionId: sessionDbId });
session.generatorPromise = null;
this.workerService.broadcastProcessingStatus();
});
}
}
setupRoutes(app: express.Application): void {
// Legacy session endpoints (use sessionDbId)
app.post('/sessions/:sessionDbId/init', this.handleSessionInit.bind(this));
app.post('/sessions/:sessionDbId/observations', this.handleObservations.bind(this));
app.post('/sessions/:sessionDbId/summarize', this.handleSummarize.bind(this));
app.get('/sessions/:sessionDbId/status', this.handleSessionStatus.bind(this));
app.delete('/sessions/:sessionDbId', this.handleSessionDelete.bind(this));
app.post('/sessions/:sessionDbId/complete', this.handleSessionComplete.bind(this));
// New session endpoints (use claudeSessionId)
app.post('/api/sessions/observations', this.handleObservationsByClaudeId.bind(this));
app.post('/api/sessions/summarize', this.handleSummarizeByClaudeId.bind(this));
app.post('/api/sessions/complete', this.handleSessionCompleteByClaudeId.bind(this));
}
/**
* Initialize a new session
*/
private handleSessionInit = this.wrapHandler((req: Request, res: Response): void => {
const sessionDbId = this.parseIntParam(req, res, 'sessionDbId');
if (sessionDbId === null) return;
const { userPrompt, promptNumber } = req.body;
const session = this.sessionManager.initializeSession(sessionDbId, userPrompt, promptNumber);
// Get the latest user_prompt for this session to sync to Chroma
const latestPrompt = this.dbManager.getSessionStore().getLatestUserPrompt(session.claudeSessionId);
// Broadcast new prompt to SSE clients (for web UI)
if (latestPrompt) {
this.eventBroadcaster.broadcastNewPrompt({
id: latestPrompt.id,
claude_session_id: latestPrompt.claude_session_id,
project: latestPrompt.project,
prompt_number: latestPrompt.prompt_number,
prompt_text: latestPrompt.prompt_text,
created_at_epoch: latestPrompt.created_at_epoch
});
// Sync user prompt to Chroma with error logging
const chromaStart = Date.now();
const promptText = latestPrompt.prompt_text;
this.dbManager.getChromaSync().syncUserPrompt(
latestPrompt.id,
latestPrompt.sdk_session_id,
latestPrompt.project,
promptText,
latestPrompt.prompt_number,
latestPrompt.created_at_epoch
).then(() => {
const chromaDuration = Date.now() - chromaStart;
const truncatedPrompt = promptText.length > 60
? promptText.substring(0, 60) + '...'
: promptText;
logger.debug('CHROMA', 'User prompt synced', {
promptId: latestPrompt.id,
duration: `${chromaDuration}ms`,
prompt: truncatedPrompt
});
}).catch(err => {
logger.error('CHROMA', 'Failed to sync user_prompt', {
promptId: latestPrompt.id,
sessionId: sessionDbId
}, err);
});
}
// Start SDK agent in background (pass worker ref for spinner control)
logger.info('SESSION', 'Generator starting', {
sessionId: sessionDbId,
project: session.project,
promptNum: session.lastPromptNumber
});
session.generatorPromise = this.sdkAgent.startSession(session, this.workerService)
.catch(err => {
logger.failure('SDK', 'SDK agent error', { sessionId: sessionDbId }, err);
})
.finally(() => {
// Clear generator reference when completed
logger.info('SESSION', `Generator finished`, { sessionId: sessionDbId });
session.generatorPromise = null;
// Broadcast status change (generator finished, may stop spinner)
this.workerService.broadcastProcessingStatus();
});
// Broadcast session started event
this.eventBroadcaster.broadcastSessionStarted(sessionDbId, session.project);
res.json({ status: 'initialized', sessionDbId, port: getWorkerPort() });
});
/**
* Queue observations for processing
* CRITICAL: Ensures SDK agent is running to process the queue (ALWAYS SAVE EVERYTHING)
*/
private handleObservations = this.wrapHandler((req: Request, res: Response): void => {
const sessionDbId = this.parseIntParam(req, res, 'sessionDbId');
if (sessionDbId === null) return;
const { tool_name, tool_input, tool_response, prompt_number, cwd } = req.body;
this.sessionManager.queueObservation(sessionDbId, {
tool_name,
tool_input,
tool_response,
prompt_number,
cwd
});
// CRITICAL: Ensure SDK agent is running to consume the queue
this.ensureGeneratorRunning(sessionDbId, 'observation');
// Broadcast observation queued event
this.eventBroadcaster.broadcastObservationQueued(sessionDbId);
res.json({ status: 'queued' });
});
/**
* Queue summarize request
* CRITICAL: Ensures SDK agent is running to process the queue (ALWAYS SAVE EVERYTHING)
*/
private handleSummarize = this.wrapHandler((req: Request, res: Response): void => {
const sessionDbId = this.parseIntParam(req, res, 'sessionDbId');
if (sessionDbId === null) return;
const { last_user_message, last_assistant_message } = req.body;
this.sessionManager.queueSummarize(sessionDbId, last_user_message, last_assistant_message);
// CRITICAL: Ensure SDK agent is running to consume the queue
this.ensureGeneratorRunning(sessionDbId, 'summarize');
// Broadcast summarize queued event
this.eventBroadcaster.broadcastSummarizeQueued();
res.json({ status: 'queued' });
});
/**
* Get session status
*/
private handleSessionStatus = this.wrapHandler((req: Request, res: Response): void => {
const sessionDbId = this.parseIntParam(req, res, 'sessionDbId');
if (sessionDbId === null) return;
const session = this.sessionManager.getSession(sessionDbId);
if (!session) {
res.json({ status: 'not_found' });
return;
}
res.json({
status: 'active',
sessionDbId,
project: session.project,
queueLength: session.pendingMessages.length,
uptime: Date.now() - session.startTime
});
});
/**
* Delete a session
*/
private handleSessionDelete = this.wrapHandler(async (req: Request, res: Response): Promise<void> => {
const sessionDbId = this.parseIntParam(req, res, 'sessionDbId');
if (sessionDbId === null) return;
await this.completionHandler.completeByDbId(sessionDbId);
res.json({ status: 'deleted' });
});
/**
* Complete a session (backward compatibility for cleanup-hook)
* cleanup-hook expects POST /sessions/:sessionDbId/complete instead of DELETE
*/
private handleSessionComplete = this.wrapHandler(async (req: Request, res: Response): Promise<void> => {
const sessionDbId = this.parseIntParam(req, res, 'sessionDbId');
if (sessionDbId === null) return;
await this.completionHandler.completeByDbId(sessionDbId);
res.json({ success: true });
});
/**
* Queue observations by claudeSessionId (post-tool-use-hook uses this)
* POST /api/sessions/observations
* Body: { claudeSessionId, tool_name, tool_input, tool_response, cwd }
*/
private handleObservationsByClaudeId = this.wrapHandler((req: Request, res: Response): void => {
const { claudeSessionId, tool_name, tool_input, tool_response, cwd } = req.body;
if (!claudeSessionId) {
return this.badRequest(res, 'Missing claudeSessionId');
}
const store = this.dbManager.getSessionStore();
// Get or create session
const sessionDbId = store.createSDKSession(claudeSessionId, '', '');
const promptNumber = store.getPromptCounter(sessionDbId);
// Privacy check: skip if user prompt was entirely private
const userPrompt = PrivacyCheckValidator.checkUserPromptPrivacy(
store,
claudeSessionId,
promptNumber,
'observation',
sessionDbId,
{ tool_name }
);
if (!userPrompt) {
res.json({ status: 'skipped', reason: 'private' });
return;
}
// Strip memory tags from tool_input and tool_response
let cleanedToolInput = '{}';
let cleanedToolResponse = '{}';
try {
cleanedToolInput = tool_input !== undefined
? stripMemoryTagsFromJson(JSON.stringify(tool_input))
: '{}';
} catch (error) {
cleanedToolInput = '{"error": "Failed to serialize tool_input"}';
}
try {
cleanedToolResponse = tool_response !== undefined
? stripMemoryTagsFromJson(JSON.stringify(tool_response))
: '{}';
} catch (error) {
cleanedToolResponse = '{"error": "Failed to serialize tool_response"}';
}
// Queue observation
this.sessionManager.queueObservation(sessionDbId, {
tool_name,
tool_input: cleanedToolInput,
tool_response: cleanedToolResponse,
prompt_number: promptNumber,
cwd: cwd || ''
});
// Ensure SDK agent is running
this.ensureGeneratorRunning(sessionDbId, 'observation');
// Broadcast observation queued event
this.eventBroadcaster.broadcastObservationQueued(sessionDbId);
res.json({ status: 'queued' });
});
/**
* Queue summarize by claudeSessionId (summary-hook uses this)
* POST /api/sessions/summarize
* Body: { claudeSessionId, last_user_message, last_assistant_message }
*
* Checks privacy, queues summarize request for SDK agent
*/
private handleSummarizeByClaudeId = this.wrapHandler((req: Request, res: Response): void => {
const { claudeSessionId, last_user_message, last_assistant_message } = req.body;
if (!claudeSessionId) {
return this.badRequest(res, 'Missing claudeSessionId');
}
const store = this.dbManager.getSessionStore();
// Get or create session
const sessionDbId = store.createSDKSession(claudeSessionId, '', '');
const promptNumber = store.getPromptCounter(sessionDbId);
// Privacy check: skip if user prompt was entirely private
const userPrompt = PrivacyCheckValidator.checkUserPromptPrivacy(
store,
claudeSessionId,
promptNumber,
'summarize',
sessionDbId
);
if (!userPrompt) {
res.json({ status: 'skipped', reason: 'private' });
return;
}
// Queue summarize
this.sessionManager.queueSummarize(sessionDbId, last_user_message || '', last_assistant_message);
// Ensure SDK agent is running
this.ensureGeneratorRunning(sessionDbId, 'summarize');
// Broadcast summarize queued event
this.eventBroadcaster.broadcastSummarizeQueued();
res.json({ status: 'queued' });
});
/**
* Complete session by claudeSessionId (cleanup-hook uses this)
* POST /api/sessions/complete
* Body: { claudeSessionId }
*
* Marks session complete, stops SDK agent, broadcasts status
*/
private handleSessionCompleteByClaudeId = this.wrapHandler(async (req: Request, res: Response): Promise<void> => {
const { claudeSessionId } = req.body;
if (!claudeSessionId) {
return this.badRequest(res, 'Missing claudeSessionId');
}
const found = await this.completionHandler.completeByClaudeId(claudeSessionId);
if (!found) {
// No active session - nothing to clean up (may have already been completed)
res.json({ success: true, message: 'No active session found' });
return;
}
res.json({ success: true });
});
}
@@ -0,0 +1,325 @@
/**
* Settings Routes
*
* Handles settings management, MCP toggle, and branch switching.
* Settings are stored in ~/.claude-mem/settings.json
*/
import express, { Request, Response } from 'express';
import path from 'path';
import { readFileSync, writeFileSync, existsSync, renameSync } from 'fs';
import { homedir } from 'os';
import { getPackageRoot } from '../../../../shared/paths.js';
import { logger } from '../../../../utils/logger.js';
import { SettingsManager } from '../../SettingsManager.js';
import { getBranchInfo, switchBranch, pullUpdates } from '../../BranchManager.js';
import {
OBSERVATION_TYPES,
OBSERVATION_CONCEPTS,
ObservationType,
ObservationConcept
} from '../../../../constants/observation-metadata.js';
import { BaseRouteHandler } from '../BaseRouteHandler.js';
import { SettingsDefaultsManager } from '../../settings/SettingsDefaultsManager.js';
export class SettingsRoutes extends BaseRouteHandler {
constructor(
private settingsManager: SettingsManager
) {
super();
}
setupRoutes(app: express.Application): void {
// Settings endpoints
app.get('/api/settings', this.handleGetSettings.bind(this));
app.post('/api/settings', this.handleUpdateSettings.bind(this));
// MCP toggle endpoints
app.get('/api/mcp/status', this.handleGetMcpStatus.bind(this));
app.post('/api/mcp/toggle', this.handleToggleMcp.bind(this));
// Branch switching endpoints
app.get('/api/branch/status', this.handleGetBranchStatus.bind(this));
app.post('/api/branch/switch', this.handleSwitchBranch.bind(this));
app.post('/api/branch/update', this.handleUpdateBranch.bind(this));
}
/**
* Get environment settings (from ~/.claude/settings.json)
*/
private handleGetSettings = this.wrapHandler((req: Request, res: Response): void => {
const settingsPath = path.join(homedir(), '.claude-mem', 'settings.json');
const settings = SettingsDefaultsManager.loadFromFile(settingsPath);
res.json(settings);
});
/**
* Update environment settings (in ~/.claude/settings.json) with validation
*/
private handleUpdateSettings = this.wrapHandler((req: Request, res: Response): void => {
// Validate CLAUDE_MEM_CONTEXT_OBSERVATIONS
if (req.body.CLAUDE_MEM_CONTEXT_OBSERVATIONS) {
const obsCount = parseInt(req.body.CLAUDE_MEM_CONTEXT_OBSERVATIONS, 10);
if (isNaN(obsCount) || obsCount < 1 || obsCount > 200) {
res.status(400).json({
success: false,
error: 'CLAUDE_MEM_CONTEXT_OBSERVATIONS must be between 1 and 200'
});
return;
}
}
// Validate CLAUDE_MEM_WORKER_PORT
if (req.body.CLAUDE_MEM_WORKER_PORT) {
const port = parseInt(req.body.CLAUDE_MEM_WORKER_PORT, 10);
if (isNaN(port) || port < 1024 || port > 65535) {
res.status(400).json({
success: false,
error: 'CLAUDE_MEM_WORKER_PORT must be between 1024 and 65535'
});
return;
}
}
// Validate context settings
const validation = this.validateContextSettings(req.body);
if (!validation.valid) {
res.status(400).json({
success: false,
error: validation.error
});
return;
}
// Read existing settings
const settingsPath = path.join(homedir(), '.claude-mem', 'settings.json');
let settings: any = { env: {} };
if (existsSync(settingsPath)) {
const settingsData = readFileSync(settingsPath, 'utf-8');
settings = JSON.parse(settingsData);
if (!settings.env) {
settings.env = {};
}
}
// Update all settings from request body
const settingKeys = [
'CLAUDE_MEM_MODEL',
'CLAUDE_MEM_CONTEXT_OBSERVATIONS',
'CLAUDE_MEM_WORKER_PORT',
'CLAUDE_MEM_CONTEXT_SHOW_READ_TOKENS',
'CLAUDE_MEM_CONTEXT_SHOW_WORK_TOKENS',
'CLAUDE_MEM_CONTEXT_SHOW_SAVINGS_AMOUNT',
'CLAUDE_MEM_CONTEXT_SHOW_SAVINGS_PERCENT',
'CLAUDE_MEM_CONTEXT_OBSERVATION_TYPES',
'CLAUDE_MEM_CONTEXT_OBSERVATION_CONCEPTS',
'CLAUDE_MEM_CONTEXT_FULL_COUNT',
'CLAUDE_MEM_CONTEXT_FULL_FIELD',
'CLAUDE_MEM_CONTEXT_SESSION_COUNT',
'CLAUDE_MEM_CONTEXT_SHOW_LAST_SUMMARY',
'CLAUDE_MEM_CONTEXT_SHOW_LAST_MESSAGE',
];
for (const key of settingKeys) {
if (req.body[key] !== undefined) {
settings.env[key] = req.body[key];
}
}
// Write back
writeFileSync(settingsPath, JSON.stringify(settings, null, 2), 'utf-8');
logger.info('WORKER', 'Settings updated');
res.json({ success: true, message: 'Settings updated successfully' });
});
/**
* GET /api/mcp/status - Check if MCP search server is enabled
*/
private handleGetMcpStatus = this.wrapHandler((req: Request, res: Response): void => {
const enabled = this.isMcpEnabled();
res.json({ enabled });
});
/**
* POST /api/mcp/toggle - Toggle MCP search server on/off
* Body: { enabled: boolean }
*/
private handleToggleMcp = this.wrapHandler((req: Request, res: Response): void => {
const { enabled } = req.body;
if (typeof enabled !== 'boolean') {
this.badRequest(res, 'enabled must be a boolean');
return;
}
this.toggleMcp(enabled);
res.json({ success: true, enabled: this.isMcpEnabled() });
});
/**
* GET /api/branch/status - Get current branch information
*/
private handleGetBranchStatus = this.wrapHandler((req: Request, res: Response): void => {
const info = getBranchInfo();
res.json(info);
});
/**
* POST /api/branch/switch - Switch to a different branch
* Body: { branch: "main" | "beta/7.0" }
*/
private handleSwitchBranch = this.wrapHandler(async (req: Request, res: Response): Promise<void> => {
const { branch } = req.body;
if (!branch) {
res.status(400).json({ success: false, error: 'Missing branch parameter' });
return;
}
// Validate branch name
const allowedBranches = ['main', 'beta/7.0'];
if (!allowedBranches.includes(branch)) {
res.status(400).json({
success: false,
error: `Invalid branch. Allowed: ${allowedBranches.join(', ')}`
});
return;
}
logger.info('WORKER', 'Branch switch requested', { branch });
const result = await switchBranch(branch);
if (result.success) {
// Schedule worker restart after response is sent
setTimeout(() => {
logger.info('WORKER', 'Restarting worker after branch switch');
process.exit(0); // PM2 will restart the worker
}, 1000);
}
res.json(result);
});
/**
* POST /api/branch/update - Pull latest updates for current branch
*/
private handleUpdateBranch = this.wrapHandler(async (req: Request, res: Response): Promise<void> => {
logger.info('WORKER', 'Branch update requested');
const result = await pullUpdates();
if (result.success) {
// Schedule worker restart after response is sent
setTimeout(() => {
logger.info('WORKER', 'Restarting worker after branch update');
process.exit(0); // PM2 will restart the worker
}, 1000);
}
res.json(result);
});
/**
* Validate context settings from request body
*/
private validateContextSettings(settings: any): { valid: boolean; error?: string } {
// Validate boolean string values
const booleanSettings = [
'CLAUDE_MEM_CONTEXT_SHOW_READ_TOKENS',
'CLAUDE_MEM_CONTEXT_SHOW_WORK_TOKENS',
'CLAUDE_MEM_CONTEXT_SHOW_SAVINGS_AMOUNT',
'CLAUDE_MEM_CONTEXT_SHOW_SAVINGS_PERCENT',
'CLAUDE_MEM_CONTEXT_SHOW_LAST_SUMMARY',
'CLAUDE_MEM_CONTEXT_SHOW_LAST_MESSAGE',
];
for (const key of booleanSettings) {
if (settings[key] && !['true', 'false'].includes(settings[key])) {
return { valid: false, error: `${key} must be "true" or "false"` };
}
}
// Validate FULL_COUNT (0-20)
if (settings.CLAUDE_MEM_CONTEXT_FULL_COUNT) {
const count = parseInt(settings.CLAUDE_MEM_CONTEXT_FULL_COUNT, 10);
if (isNaN(count) || count < 0 || count > 20) {
return { valid: false, error: 'CLAUDE_MEM_CONTEXT_FULL_COUNT must be between 0 and 20' };
}
}
// Validate SESSION_COUNT (1-50)
if (settings.CLAUDE_MEM_CONTEXT_SESSION_COUNT) {
const count = parseInt(settings.CLAUDE_MEM_CONTEXT_SESSION_COUNT, 10);
if (isNaN(count) || count < 1 || count > 50) {
return { valid: false, error: 'CLAUDE_MEM_CONTEXT_SESSION_COUNT must be between 1 and 50' };
}
}
// Validate FULL_FIELD
if (settings.CLAUDE_MEM_CONTEXT_FULL_FIELD) {
if (!['narrative', 'facts'].includes(settings.CLAUDE_MEM_CONTEXT_FULL_FIELD)) {
return { valid: false, error: 'CLAUDE_MEM_CONTEXT_FULL_FIELD must be "narrative" or "facts"' };
}
}
// Validate observation types
if (settings.CLAUDE_MEM_CONTEXT_OBSERVATION_TYPES) {
const types = settings.CLAUDE_MEM_CONTEXT_OBSERVATION_TYPES.split(',').map((t: string) => t.trim());
for (const type of types) {
if (type && !OBSERVATION_TYPES.includes(type as ObservationType)) {
return { valid: false, error: `Invalid observation type: ${type}. Valid types: ${OBSERVATION_TYPES.join(', ')}` };
}
}
}
// Validate observation concepts
if (settings.CLAUDE_MEM_CONTEXT_OBSERVATION_CONCEPTS) {
const concepts = settings.CLAUDE_MEM_CONTEXT_OBSERVATION_CONCEPTS.split(',').map((c: string) => c.trim());
for (const concept of concepts) {
if (concept && !OBSERVATION_CONCEPTS.includes(concept as ObservationConcept)) {
return { valid: false, error: `Invalid observation concept: ${concept}. Valid concepts: ${OBSERVATION_CONCEPTS.join(', ')}` };
}
}
}
return { valid: true };
}
/**
* Check if MCP search server is enabled
*/
private isMcpEnabled(): boolean {
const packageRoot = getPackageRoot();
const mcpPath = path.join(packageRoot, 'plugin', '.mcp.json');
return existsSync(mcpPath);
}
/**
* Toggle MCP search server (rename .mcp.json <-> .mcp.json.disabled)
*/
private toggleMcp(enabled: boolean): void {
try {
const packageRoot = getPackageRoot();
const mcpPath = path.join(packageRoot, 'plugin', '.mcp.json');
const mcpDisabledPath = path.join(packageRoot, 'plugin', '.mcp.json.disabled');
if (enabled && existsSync(mcpDisabledPath)) {
// Enable: rename .mcp.json.disabled -> .mcp.json
renameSync(mcpDisabledPath, mcpPath);
logger.info('WORKER', 'MCP search server enabled');
} else if (!enabled && existsSync(mcpPath)) {
// Disable: rename .mcp.json -> .mcp.json.disabled
renameSync(mcpPath, mcpDisabledPath);
logger.info('WORKER', 'MCP search server disabled');
} else {
logger.debug('WORKER', 'MCP toggle no-op (already in desired state)', { enabled });
}
} catch (error) {
logger.failure('WORKER', 'Failed to toggle MCP', { enabled }, error as Error);
throw error;
}
}
}
@@ -0,0 +1,79 @@
/**
* Viewer Routes
*
* Handles health check, viewer UI, and SSE stream endpoints.
* These are used by the web viewer UI at http://localhost:37777
*/
import express, { Request, Response } from 'express';
import path from 'path';
import { readFileSync } from 'fs';
import { getPackageRoot } from '../../../../shared/paths.js';
import { SSEBroadcaster } from '../../SSEBroadcaster.js';
import { DatabaseManager } from '../../DatabaseManager.js';
import { SessionManager } from '../../SessionManager.js';
import { BaseRouteHandler } from '../BaseRouteHandler.js';
export class ViewerRoutes extends BaseRouteHandler {
constructor(
private sseBroadcaster: SSEBroadcaster,
private dbManager: DatabaseManager,
private sessionManager: SessionManager
) {
super();
}
setupRoutes(app: express.Application): void {
app.get('/health', this.handleHealth.bind(this));
app.get('/', this.handleViewerUI.bind(this));
app.get('/stream', this.handleSSEStream.bind(this));
}
/**
* Health check endpoint
*/
private handleHealth = this.wrapHandler((req: Request, res: Response): void => {
res.json({ status: 'ok', timestamp: Date.now() });
});
/**
* Serve viewer UI
*/
private handleViewerUI = this.wrapHandler((req: Request, res: Response): void => {
const packageRoot = getPackageRoot();
const viewerPath = path.join(packageRoot, 'plugin', 'ui', 'viewer.html');
const html = readFileSync(viewerPath, 'utf-8');
res.setHeader('Content-Type', 'text/html');
res.send(html);
});
/**
* SSE stream endpoint
*/
private handleSSEStream = this.wrapHandler((req: Request, res: Response): void => {
// Setup SSE headers
res.setHeader('Content-Type', 'text/event-stream');
res.setHeader('Cache-Control', 'no-cache');
res.setHeader('Connection', 'keep-alive');
// Add client to broadcaster
this.sseBroadcaster.addClient(res);
// Send initial_load event with projects list
const allProjects = this.dbManager.getSessionStore().getAllProjects();
this.sseBroadcaster.broadcast({
type: 'initial_load',
projects: allProjects,
timestamp: Date.now()
});
// Send initial processing status (based on queue depth + active generators)
const isProcessing = this.sessionManager.isAnySessionProcessing();
const queueDepth = this.sessionManager.getTotalActiveWork(); // Includes queued + actively processing
this.sseBroadcaster.broadcast({
type: 'processing_status',
isProcessing,
queueDepth
});
});
}
@@ -0,0 +1,62 @@
/**
* Session Completion Handler
*
* Consolidates session completion logic to eliminate duplication across
* three different completion endpoints (DELETE, POST by DB ID, POST by Claude ID).
*
* All completion flows follow the same pattern:
* 1. Delete session from SessionManager (aborts SDK agent)
* 2. Mark session complete in database
* 3. Broadcast session completed event
*/
import { SessionManager } from '../SessionManager.js';
import { DatabaseManager } from '../DatabaseManager.js';
import { SessionEventBroadcaster } from '../events/SessionEventBroadcaster.js';
export class SessionCompletionHandler {
constructor(
private sessionManager: SessionManager,
private dbManager: DatabaseManager,
private eventBroadcaster: SessionEventBroadcaster
) {}
/**
* Complete session by database ID
* Used by DELETE /api/sessions/:id and POST /api/sessions/:id/complete
*/
async completeByDbId(sessionDbId: number): Promise<void> {
// Delete from session manager (aborts SDK agent)
await this.sessionManager.deleteSession(sessionDbId);
// Mark session complete in database
this.dbManager.markSessionComplete(sessionDbId);
// Broadcast session completed event
this.eventBroadcaster.broadcastSessionCompleted(sessionDbId);
}
/**
* Complete session by Claude session ID
* Used by POST /api/sessions/complete (cleanup-hook endpoint)
*
* @returns true if session was found and completed, false if no active session found
*/
async completeByClaudeId(claudeSessionId: string): Promise<boolean> {
const store = this.dbManager.getSessionStore();
// Find session by claudeSessionId
const session = store.findActiveSDKSession(claudeSessionId);
if (!session) {
// No active session - nothing to clean up (may have already been completed)
return false;
}
const sessionDbId = session.id;
// Complete using standard flow
await this.completeByDbId(sessionDbId);
return true;
}
}
@@ -0,0 +1,110 @@
/**
* SettingsDefaultsManager
*
* Single source of truth for all default configuration values.
* Provides methods to get defaults with optional environment variable overrides.
*/
import { readFileSync, existsSync } from 'fs';
import { DEFAULT_OBSERVATION_TYPES_STRING, DEFAULT_OBSERVATION_CONCEPTS_STRING } from '../../../constants/observation-metadata.js';
export interface SettingsDefaults {
CLAUDE_MEM_MODEL: string;
CLAUDE_MEM_CONTEXT_OBSERVATIONS: string;
CLAUDE_MEM_WORKER_PORT: string;
// Token Economics
CLAUDE_MEM_CONTEXT_SHOW_READ_TOKENS: string;
CLAUDE_MEM_CONTEXT_SHOW_WORK_TOKENS: string;
CLAUDE_MEM_CONTEXT_SHOW_SAVINGS_AMOUNT: string;
CLAUDE_MEM_CONTEXT_SHOW_SAVINGS_PERCENT: string;
// Observation Filtering
CLAUDE_MEM_CONTEXT_OBSERVATION_TYPES: string;
CLAUDE_MEM_CONTEXT_OBSERVATION_CONCEPTS: string;
// Display Configuration
CLAUDE_MEM_CONTEXT_FULL_COUNT: string;
CLAUDE_MEM_CONTEXT_FULL_FIELD: string;
CLAUDE_MEM_CONTEXT_SESSION_COUNT: string;
// Feature Toggles
CLAUDE_MEM_CONTEXT_SHOW_LAST_SUMMARY: string;
CLAUDE_MEM_CONTEXT_SHOW_LAST_MESSAGE: string;
}
export class SettingsDefaultsManager {
/**
* Default values for all settings
*/
private static readonly DEFAULTS: SettingsDefaults = {
CLAUDE_MEM_MODEL: 'claude-haiku-4-5',
CLAUDE_MEM_CONTEXT_OBSERVATIONS: '50',
CLAUDE_MEM_WORKER_PORT: '37777',
// Token Economics
CLAUDE_MEM_CONTEXT_SHOW_READ_TOKENS: 'true',
CLAUDE_MEM_CONTEXT_SHOW_WORK_TOKENS: 'true',
CLAUDE_MEM_CONTEXT_SHOW_SAVINGS_AMOUNT: 'true',
CLAUDE_MEM_CONTEXT_SHOW_SAVINGS_PERCENT: 'true',
// Observation Filtering
CLAUDE_MEM_CONTEXT_OBSERVATION_TYPES: DEFAULT_OBSERVATION_TYPES_STRING,
CLAUDE_MEM_CONTEXT_OBSERVATION_CONCEPTS: DEFAULT_OBSERVATION_CONCEPTS_STRING,
// Display Configuration
CLAUDE_MEM_CONTEXT_FULL_COUNT: '5',
CLAUDE_MEM_CONTEXT_FULL_FIELD: 'narrative',
CLAUDE_MEM_CONTEXT_SESSION_COUNT: '10',
// Feature Toggles
CLAUDE_MEM_CONTEXT_SHOW_LAST_SUMMARY: 'true',
CLAUDE_MEM_CONTEXT_SHOW_LAST_MESSAGE: 'false',
};
/**
* Get all defaults as an object
*/
static getAllDefaults(): SettingsDefaults {
return { ...this.DEFAULTS };
}
/**
* Get a default value with optional environment variable override
*/
static get(key: keyof SettingsDefaults): string {
return process.env[key] || this.DEFAULTS[key];
}
/**
* Get an integer default value with optional environment variable override
*/
static getInt(key: keyof SettingsDefaults): number {
const value = this.get(key);
return parseInt(value, 10);
}
/**
* Get a boolean default value with optional environment variable override
*/
static getBool(key: keyof SettingsDefaults): boolean {
const value = this.get(key);
return value === 'true';
}
/**
* Load settings from file with fallback to defaults
* Returns merged settings with defaults as fallback
*/
static loadFromFile(settingsPath: string): SettingsDefaults {
if (!existsSync(settingsPath)) {
return this.getAllDefaults();
}
const settingsData = readFileSync(settingsPath, 'utf-8');
const settings = JSON.parse(settingsData);
const env = settings.env || {};
// Merge file settings with defaults
const result: SettingsDefaults = { ...this.DEFAULTS };
for (const key of Object.keys(this.DEFAULTS) as Array<keyof SettingsDefaults>) {
if (env[key] !== undefined) {
result[key] = env[key];
}
}
return result;
}
}
@@ -0,0 +1,41 @@
import { SessionStore } from '../../sqlite/SessionStore.js';
import { logger } from '../../../utils/logger.js';
/**
* Validates user prompt privacy for session operations
*
* Centralizes privacy checks to avoid duplicate validation logic across route handlers.
* If user prompt was entirely private (stripped to empty string), we skip processing.
*/
export class PrivacyCheckValidator {
/**
* Check if user prompt is public (not entirely private)
*
* @param store - SessionStore instance
* @param claudeSessionId - Claude session ID
* @param promptNumber - Prompt number within session
* @param operationType - Type of operation being validated ('observation' or 'summarize')
* @returns User prompt text if public, null if private
*/
static checkUserPromptPrivacy(
store: SessionStore,
claudeSessionId: string,
promptNumber: number,
operationType: 'observation' | 'summarize',
sessionDbId: number,
additionalContext?: Record<string, any>
): string | null {
const userPrompt = store.getUserPrompt(claudeSessionId, promptNumber);
if (!userPrompt || userPrompt.trim() === '') {
logger.debug('HOOK', `Skipping ${operationType} - user prompt was entirely private`, {
sessionId: sessionDbId,
promptNumber,
...additionalContext
});
return null;
}
return userPrompt;
}
}
+4 -12
View File
@@ -1,8 +1,8 @@
import path from "path";
import { homedir } from "os";
import { existsSync, readFileSync } from "fs";
import { spawnSync } from "child_process";
import { getPackageRoot } from "./paths.js";
import { SettingsDefaultsManager } from "../services/worker/settings/SettingsDefaultsManager.js";
// Named constants for health checks
const HEALTH_CHECK_TIMEOUT_MS = 100;
@@ -14,17 +14,9 @@ const WORKER_STARTUP_RETRIES = 10;
* Priority: ~/.claude-mem/settings.json > env var > default
*/
export function getWorkerPort(): number {
try {
const settingsPath = path.join(homedir(), '.claude-mem', 'settings.json');
if (existsSync(settingsPath)) {
const settings = JSON.parse(readFileSync(settingsPath, 'utf-8'));
const port = parseInt(settings.env?.CLAUDE_MEM_WORKER_PORT, 10);
if (!isNaN(port)) return port;
}
} catch {
// Fall through to env var or default
}
return parseInt(process.env.CLAUDE_MEM_WORKER_PORT || '37777', 10);
const settingsPath = path.join(homedir(), '.claude-mem', 'settings.json');
const settings = SettingsDefaultsManager.loadFromFile(settingsPath);
return parseInt(settings.CLAUDE_MEM_WORKER_PORT, 10);
}
/**
+138
View File
@@ -0,0 +1,138 @@
/**
* TypeScript types for database query results
* Provides type safety for better-sqlite3 query results
*/
/**
* Schema information from sqlite3 PRAGMA table_info
*/
export interface TableColumnInfo {
cid: number;
name: string;
type: string;
notnull: number;
dflt_value: string | null;
pk: number;
}
/**
* Index information from sqlite3 PRAGMA index_list
*/
export interface IndexInfo {
seq: number;
name: string;
unique: number;
origin: string;
partial: number;
}
/**
* Table name from sqlite_master
*/
export interface TableNameRow {
name: string;
}
/**
* Schema version record
*/
export interface SchemaVersion {
version: number;
}
/**
* SDK Session database record
*/
export interface SdkSessionRecord {
id: number;
claude_session_id: string;
sdk_session_id: string | null;
project: string;
user_prompt: string | null;
started_at: string;
started_at_epoch: number;
completed_at: string | null;
completed_at_epoch: number | null;
status: 'active' | 'completed' | 'failed';
worker_port?: number;
prompt_counter?: number;
}
/**
* Observation database record
*/
export interface ObservationRecord {
id: number;
sdk_session_id: string;
project: string;
text: string | null;
type: 'decision' | 'bugfix' | 'feature' | 'refactor' | 'discovery' | 'change';
created_at: string;
created_at_epoch: number;
title?: string;
concept?: string;
source_files?: string;
prompt_number?: number;
discovery_tokens?: number;
}
/**
* Session Summary database record
*/
export interface SessionSummaryRecord {
id: number;
sdk_session_id: string;
project: string;
request: string | null;
investigated: string | null;
learned: string | null;
completed: string | null;
next_steps: string | null;
created_at: string;
created_at_epoch: number;
prompt_number?: number;
discovery_tokens?: number;
}
/**
* User Prompt database record
*/
export interface UserPromptRecord {
id: number;
claude_session_id: string;
prompt_number: number;
prompt_text: string;
created_at: string;
created_at_epoch: number;
}
/**
* Latest user prompt with session join
*/
export interface LatestPromptResult {
id: number;
claude_session_id: string;
sdk_session_id: string;
project: string;
prompt_number: number;
prompt_text: string;
created_at_epoch: number;
}
/**
* Observation with context (for time-based queries)
*/
export interface ObservationWithContext {
id: number;
sdk_session_id: string;
project: string;
text: string | null;
type: string;
created_at: string;
created_at_epoch: number;
title?: string;
concept?: string;
source_files?: string;
prompt_number?: number;
discovery_tokens?: number;
}
+125
View File
@@ -0,0 +1,125 @@
/**
* Happy Path Test: Context Injection (SessionStart)
*
* Tests that when a session starts, the context hook can retrieve
* formatted context from the worker containing recent observations.
*/
import { describe, it, expect, beforeEach, vi } from 'vitest';
import { sampleObservation, featureObservation } from '../helpers/scenarios.js';
describe('Context Injection (SessionStart)', () => {
const WORKER_PORT = 37777;
const PROJECT_NAME = 'claude-mem';
beforeEach(() => {
vi.clearAllMocks();
});
it('returns formatted context when observations exist', async () => {
// This is a component test that verifies the happy path:
// Session starts → Hook calls worker → Worker queries database → Returns formatted context
// Setup: Mock fetch to simulate worker response
const mockContext = `# [claude-mem] recent context
## Recent Work (2 observations)
### [bugfix] Fixed parser bug
The XML parser was not handling empty tags correctly.
Files: /project/src/parser.ts
### [feature] Added search functionality
Implemented full-text search using FTS5.
Files: /project/src/services/search.ts`;
global.fetch = vi.fn().mockResolvedValue({
ok: true,
status: 200,
text: async () => mockContext
});
// Execute: Call context endpoint (what the hook does)
const response = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/context/inject?project=${encodeURIComponent(PROJECT_NAME)}`
);
// Verify: Response is successful
expect(response.ok).toBe(true);
expect(response.status).toBe(200);
// Verify: Context contains observations
const text = await response.text();
expect(text).toContain('recent context');
expect(text).toContain('Fixed parser bug');
expect(text).toContain('Added search functionality');
expect(text).toContain('bugfix');
expect(text).toContain('feature');
});
it('returns fallback message when worker is down', async () => {
// Setup: Mock fetch to simulate worker not available
global.fetch = vi.fn().mockRejectedValue(new Error('ECONNREFUSED'));
// Execute: Attempt to call context endpoint
try {
await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/context/inject?project=${encodeURIComponent(PROJECT_NAME)}`
);
} catch (error: any) {
// Verify: Error indicates worker is down
expect(error.message).toContain('ECONNREFUSED');
}
// The hook should handle this gracefully and return a fallback message
// (This would be tested in hook-specific tests, not the worker endpoint tests)
});
it('handles empty observations gracefully', async () => {
// Setup: Mock fetch to simulate no observations available
const emptyContext = `# [claude-mem] recent context
No observations found for this project.`;
global.fetch = vi.fn().mockResolvedValue({
ok: true,
status: 200,
text: async () => emptyContext
});
// Execute: Call context endpoint
const response = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/context/inject?project=${encodeURIComponent(PROJECT_NAME)}`
);
// Verify: Returns success with empty message
expect(response.ok).toBe(true);
const text = await response.text();
expect(text).toContain('No observations found');
});
it('supports colored output when requested', async () => {
// Setup: Mock fetch to simulate colored response
const coloredContext = `# [claude-mem] recent context
## Recent Work (1 observation)
### \x1b[33m[bugfix]\x1b[0m Fixed parser bug
The XML parser was not handling empty tags correctly.`;
global.fetch = vi.fn().mockResolvedValue({
ok: true,
status: 200,
text: async () => coloredContext
});
// Execute: Call context endpoint with colors parameter
const response = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/context/inject?project=${encodeURIComponent(PROJECT_NAME)}&colors=true`
);
// Verify: Response contains ANSI color codes
expect(response.ok).toBe(true);
const text = await response.text();
expect(text).toContain('\x1b['); // ANSI escape code
});
});
@@ -0,0 +1,283 @@
/**
* Happy Path Test: Observation Capture (PostToolUse)
*
* Tests that tool usage is captured and queued for SDK processing.
* This is the core functionality of claude-mem - turning tool usage
* into compressed observations.
*/
import { describe, it, expect, beforeEach, vi } from 'vitest';
import {
bashCommandScenario,
readFileScenario,
writeFileScenario,
editFileScenario,
grepScenario,
sessionScenario
} from '../helpers/scenarios.js';
describe('Observation Capture (PostToolUse)', () => {
const WORKER_PORT = 37777;
beforeEach(() => {
vi.clearAllMocks();
});
it('captures Bash command observation', async () => {
// Setup: Mock worker response
global.fetch = vi.fn().mockResolvedValue({
ok: true,
status: 200,
json: async () => ({ status: 'queued' })
});
// Execute: Send Bash tool observation
const response = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/sessions/observations`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
claudeSessionId: sessionScenario.claudeSessionId,
tool_name: bashCommandScenario.tool_name,
tool_input: bashCommandScenario.tool_input,
tool_response: bashCommandScenario.tool_response,
cwd: '/project/claude-mem'
})
}
);
// Verify: Observation queued successfully
expect(response.ok).toBe(true);
const result = await response.json();
expect(result.status).toBe('queued');
// Verify: Correct data sent to worker
const fetchCall = (global.fetch as any).mock.calls[0];
const requestBody = JSON.parse(fetchCall[1].body);
expect(requestBody.tool_name).toBe('Bash');
expect(requestBody.tool_input.command).toBe('git status');
});
it('captures Read file observation', async () => {
// Setup: Mock worker response
global.fetch = vi.fn().mockResolvedValue({
ok: true,
status: 200,
json: async () => ({ status: 'queued' })
});
// Execute: Send Read tool observation
const response = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/sessions/observations`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
claudeSessionId: sessionScenario.claudeSessionId,
tool_name: readFileScenario.tool_name,
tool_input: readFileScenario.tool_input,
tool_response: readFileScenario.tool_response,
cwd: '/project'
})
}
);
// Verify: Observation queued successfully
expect(response.ok).toBe(true);
const result = await response.json();
expect(result.status).toBe('queued');
// Verify: File path captured correctly
const fetchCall = (global.fetch as any).mock.calls[0];
const requestBody = JSON.parse(fetchCall[1].body);
expect(requestBody.tool_name).toBe('Read');
expect(requestBody.tool_input.file_path).toContain('index.ts');
});
it('captures Write file observation', async () => {
// Setup: Mock worker response
global.fetch = vi.fn().mockResolvedValue({
ok: true,
status: 200,
json: async () => ({ status: 'queued' })
});
// Execute: Send Write tool observation
const response = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/sessions/observations`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
claudeSessionId: sessionScenario.claudeSessionId,
tool_name: writeFileScenario.tool_name,
tool_input: writeFileScenario.tool_input,
tool_response: writeFileScenario.tool_response,
cwd: '/project'
})
}
);
// Verify: Observation queued successfully
expect(response.ok).toBe(true);
const result = await response.json();
expect(result.status).toBe('queued');
});
it('captures Edit file observation', async () => {
// Setup: Mock worker response
global.fetch = vi.fn().mockResolvedValue({
ok: true,
status: 200,
json: async () => ({ status: 'queued' })
});
// Execute: Send Edit tool observation
const response = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/sessions/observations`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
claudeSessionId: sessionScenario.claudeSessionId,
tool_name: editFileScenario.tool_name,
tool_input: editFileScenario.tool_input,
tool_response: editFileScenario.tool_response,
cwd: '/project'
})
}
);
// Verify: Observation queued successfully
expect(response.ok).toBe(true);
const result = await response.json();
expect(result.status).toBe('queued');
// Verify: Edit details captured
const fetchCall = (global.fetch as any).mock.calls[0];
const requestBody = JSON.parse(fetchCall[1].body);
expect(requestBody.tool_name).toBe('Edit');
expect(requestBody.tool_input.old_string).toBe('const PORT = 3000;');
expect(requestBody.tool_input.new_string).toBe('const PORT = 8080;');
});
it('captures Grep search observation', async () => {
// Setup: Mock worker response
global.fetch = vi.fn().mockResolvedValue({
ok: true,
status: 200,
json: async () => ({ status: 'queued' })
});
// Execute: Send Grep tool observation
const response = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/sessions/observations`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
claudeSessionId: sessionScenario.claudeSessionId,
tool_name: grepScenario.tool_name,
tool_input: grepScenario.tool_input,
tool_response: grepScenario.tool_response,
cwd: '/project'
})
}
);
// Verify: Observation queued successfully
expect(response.ok).toBe(true);
const result = await response.json();
expect(result.status).toBe('queued');
});
it('handles rapid succession of observations (burst mode)', async () => {
// Setup: Mock worker to accept all observations
let observationCount = 0;
global.fetch = vi.fn().mockImplementation(async () => {
const currentId = ++observationCount;
return {
ok: true,
status: 200,
json: async () => ({ status: 'queued', observationId: currentId })
};
});
// Execute: Send 5 observations rapidly (simulates active coding session)
const observations = [
bashCommandScenario,
readFileScenario,
writeFileScenario,
editFileScenario,
grepScenario
];
const promises = observations.map(obs =>
fetch(`http://127.0.0.1:${WORKER_PORT}/api/sessions/observations`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
claudeSessionId: sessionScenario.claudeSessionId,
tool_name: obs.tool_name,
tool_input: obs.tool_input,
tool_response: obs.tool_response,
cwd: '/project'
})
})
);
const responses = await Promise.all(promises);
// Verify: All observations queued successfully
expect(responses.every(r => r.ok)).toBe(true);
expect(observationCount).toBe(5);
// Verify: Each got unique ID
const results = await Promise.all(responses.map(r => r.json()));
const ids = results.map(r => r.observationId);
expect(new Set(ids).size).toBe(5); // All IDs unique
});
it('preserves tool metadata in observation', async () => {
// Setup: Mock worker response
global.fetch = vi.fn().mockResolvedValue({
ok: true,
status: 200,
json: async () => ({ status: 'queued' })
});
const complexTool = {
tool_name: 'Task',
tool_input: {
subagent_type: 'Explore',
prompt: 'Find authentication code',
description: 'Search for auth'
},
tool_response: {
result: 'Found auth in /src/auth.ts',
files_analyzed: ['/src/auth.ts', '/src/login.ts']
}
};
// Execute: Send complex tool observation
await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/sessions/observations`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
claudeSessionId: sessionScenario.claudeSessionId,
...complexTool,
cwd: '/project'
})
}
);
// Verify: All metadata preserved in request
const fetchCall = (global.fetch as any).mock.calls[0];
const requestBody = JSON.parse(fetchCall[1].body);
expect(requestBody.tool_name).toBe('Task');
expect(requestBody.tool_input.subagent_type).toBe('Explore');
expect(requestBody.tool_response.files_analyzed).toHaveLength(2);
});
});
+328
View File
@@ -0,0 +1,328 @@
/**
* Happy Path Test: Search (MCP Tools)
*
* Tests that the search functionality correctly finds and returns
* stored observations matching user queries.
*/
import { describe, it, expect, beforeEach, vi } from 'vitest';
import { sampleObservation, featureObservation } from '../helpers/scenarios.js';
describe('Search (MCP Tools)', () => {
const WORKER_PORT = 37777;
beforeEach(() => {
vi.clearAllMocks();
});
it('finds observations matching query', async () => {
// This tests the happy path:
// User asks "what did we do?" → Search skill queries worker →
// Worker searches database → Returns relevant observations
// Setup: Mock search response with matching observations
const searchResults = [
{
id: 1,
title: 'Parser bugfix',
content: 'Fixed XML parsing issue with self-closing tags',
type: 'bugfix',
created_at: '2024-01-01T10:00:00Z'
},
{
id: 2,
title: 'Parser optimization',
content: 'Improved parser performance by 50%',
type: 'feature',
created_at: '2024-01-02T10:00:00Z'
}
];
global.fetch = vi.fn().mockResolvedValue({
ok: true,
status: 200,
json: async () => ({ results: searchResults, total: 2 })
});
// Execute: Search for "parser"
const response = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/search?query=parser&project=claude-mem`
);
// Verify: Found matching observations
expect(response.ok).toBe(true);
const data = await response.json();
expect(data.results).toHaveLength(2);
expect(data.results[0].title).toContain('Parser');
expect(data.results[1].title).toContain('Parser');
});
it('returns empty results when no matches found', async () => {
// Setup: Mock empty search results
global.fetch = vi.fn().mockResolvedValue({
ok: true,
status: 200,
json: async () => ({ results: [], total: 0 })
});
// Execute: Search for non-existent term
const response = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/search?query=nonexistent&project=claude-mem`
);
// Verify: Returns empty results gracefully
expect(response.ok).toBe(true);
const data = await response.json();
expect(data.results).toHaveLength(0);
expect(data.total).toBe(0);
});
it('supports filtering by observation type', async () => {
// Setup: Mock filtered search results
const bugfixResults = [
{
id: 1,
title: 'Fixed parser bug',
type: 'bugfix',
created_at: '2024-01-01T10:00:00Z'
}
];
global.fetch = vi.fn().mockResolvedValue({
ok: true,
status: 200,
json: async () => ({ results: bugfixResults, total: 1 })
});
// Execute: Search for bugfixes only
const response = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/search/by-type?type=bugfix&project=claude-mem`
);
// Verify: Returns only bugfixes
expect(response.ok).toBe(true);
const data = await response.json();
expect(data.results).toHaveLength(1);
expect(data.results[0].type).toBe('bugfix');
});
it('supports filtering by concept tags', async () => {
// Setup: Mock concept-filtered results
const conceptResults = [
{
id: 1,
title: 'How parser works',
concepts: ['how-it-works', 'parser'],
created_at: '2024-01-01T10:00:00Z'
}
];
global.fetch = vi.fn().mockResolvedValue({
ok: true,
status: 200,
json: async () => ({ results: conceptResults, total: 1 })
});
// Execute: Search by concept
const response = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/search/by-concept?concept=how-it-works&project=claude-mem`
);
// Verify: Returns observations with that concept
expect(response.ok).toBe(true);
const data = await response.json();
expect(data.results).toHaveLength(1);
expect(data.results[0].concepts).toContain('how-it-works');
});
it('supports pagination for large result sets', async () => {
// Setup: Mock paginated results
const page1Results = Array.from({ length: 20 }, (_, i) => ({
id: i + 1,
title: `Observation ${i + 1}`,
created_at: '2024-01-01T10:00:00Z'
}));
global.fetch = vi.fn().mockResolvedValue({
ok: true,
status: 200,
json: async () => ({
results: page1Results,
total: 50,
page: 1,
limit: 20
})
});
// Execute: Search with pagination
const response = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/search?query=observation&project=claude-mem&limit=20&offset=0`
);
// Verify: Returns paginated results
expect(response.ok).toBe(true);
const data = await response.json();
expect(data.results).toHaveLength(20);
expect(data.total).toBe(50);
expect(data.page).toBe(1);
});
it('supports date range filtering', async () => {
// Setup: Mock date-filtered results
const recentResults = [
{
id: 5,
title: 'Recent observation',
created_at: '2024-01-05T10:00:00Z'
}
];
global.fetch = vi.fn().mockResolvedValue({
ok: true,
status: 200,
json: async () => ({ results: recentResults, total: 1 })
});
// Execute: Search with date range
const response = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/search?query=observation&project=claude-mem&dateStart=2024-01-05&dateEnd=2024-01-06`
);
// Verify: Returns observations in date range
expect(response.ok).toBe(true);
const data = await response.json();
expect(data.results).toHaveLength(1);
expect(data.results[0].created_at).toContain('2024-01-05');
});
it('returns observations with file references', async () => {
// Setup: Mock results with file paths
const fileResults = [
{
id: 1,
title: 'Updated parser',
files: ['src/parser.ts', 'tests/parser.test.ts'],
created_at: '2024-01-01T10:00:00Z'
}
];
global.fetch = vi.fn().mockResolvedValue({
ok: true,
status: 200,
json: async () => ({ results: fileResults, total: 1 })
});
// Execute: Search
const response = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/search?query=parser&project=claude-mem`
);
// Verify: File references included
expect(response.ok).toBe(true);
const data = await response.json();
expect(data.results[0].files).toHaveLength(2);
expect(data.results[0].files).toContain('src/parser.ts');
});
it('supports semantic search ranking', async () => {
// Setup: Mock results ordered by relevance
const rankedResults = [
{
id: 2,
title: 'Parser bug fix',
content: 'Fixed critical parser bug',
relevance: 0.95
},
{
id: 5,
title: 'Parser documentation',
content: 'Updated parser docs',
relevance: 0.72
},
{
id: 10,
title: 'Mentioned parser briefly',
content: 'Also updated the parser',
relevance: 0.45
}
];
global.fetch = vi.fn().mockResolvedValue({
ok: true,
status: 200,
json: async () => ({
results: rankedResults,
total: 3,
orderBy: 'relevance'
})
});
// Execute: Search with relevance ordering
const response = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/search?query=parser+bug&project=claude-mem&orderBy=relevance`
);
// Verify: Results ordered by relevance
expect(response.ok).toBe(true);
const data = await response.json();
expect(data.results).toHaveLength(3);
expect(data.results[0].relevance).toBeGreaterThan(data.results[1].relevance);
expect(data.results[1].relevance).toBeGreaterThan(data.results[2].relevance);
});
it('handles special characters in search queries', async () => {
// Setup: Mock results for special character query
global.fetch = vi.fn().mockResolvedValue({
ok: true,
status: 200,
json: async () => ({ results: [], total: 0 })
});
// Execute: Search with special characters
const queries = [
'function*',
'variable: string',
'array[0]',
'path/to/file',
'tag<content>',
'price $99'
];
for (const query of queries) {
await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/search?query=${encodeURIComponent(query)}&project=claude-mem`
);
}
// Verify: All queries processed without error
expect(global.fetch).toHaveBeenCalledTimes(queries.length);
});
it('supports project-specific search', async () => {
// Setup: Mock results from specific project
const projectResults = [
{
id: 1,
title: 'Claude-mem feature',
project: 'claude-mem',
created_at: '2024-01-01T10:00:00Z'
}
];
global.fetch = vi.fn().mockResolvedValue({
ok: true,
status: 200,
json: async () => ({ results: projectResults, total: 1 })
});
// Execute: Search specific project
const response = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/search?query=feature&project=claude-mem`
);
// Verify: Returns only results from that project
expect(response.ok).toBe(true);
const data = await response.json();
expect(data.results).toHaveLength(1);
expect(data.results[0].project).toBe('claude-mem');
});
});
+246
View File
@@ -0,0 +1,246 @@
/**
* Happy Path Test: Session Cleanup (SessionEnd)
*
* Tests that when a session ends, the worker marks it complete
* and performs necessary cleanup operations.
*/
import { describe, it, expect, beforeEach, vi } from 'vitest';
import { sessionScenario } from '../helpers/scenarios.js';
describe('Session Cleanup (SessionEnd)', () => {
const WORKER_PORT = 37777;
beforeEach(() => {
vi.clearAllMocks();
});
it('marks session complete and stops SDK agent', async () => {
// This tests the happy path:
// Session ends → Hook notifies worker → Worker marks session complete →
// SDK agent stopped → Resources cleaned up
// Setup: Mock successful response from worker
global.fetch = vi.fn().mockResolvedValue({
ok: true,
status: 200,
json: async () => ({ status: 'completed' })
});
// Execute: Send complete request (what cleanup-hook does)
const response = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/sessions/complete`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
claudeSessionId: sessionScenario.claudeSessionId,
reason: 'user_exit'
})
}
);
// Verify: Session marked complete
expect(response.ok).toBe(true);
const result = await response.json();
expect(result.status).toBe('completed');
// Verify: Correct data sent to worker
const fetchCall = (global.fetch as any).mock.calls[0];
const requestBody = JSON.parse(fetchCall[1].body);
expect(requestBody.claudeSessionId).toBe(sessionScenario.claudeSessionId);
expect(requestBody.reason).toBe('user_exit');
});
it('handles missing session ID gracefully', async () => {
// Setup: Mock error response
global.fetch = vi.fn().mockResolvedValue({
ok: false,
status: 400,
json: async () => ({ error: 'Missing claudeSessionId' })
});
// Execute: Send complete request without session ID
const response = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/sessions/complete`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
reason: 'user_exit'
})
}
);
// Verify: Returns error
expect(response.ok).toBe(false);
expect(response.status).toBe(400);
const error = await response.json();
expect(error.error).toContain('Missing claudeSessionId');
});
it('handles different session end reasons', async () => {
// Setup: Track all cleanup requests
const cleanupRequests: any[] = [];
global.fetch = vi.fn().mockImplementation(async (url, options) => {
const body = JSON.parse(options.body);
cleanupRequests.push(body);
return {
ok: true,
status: 200,
json: async () => ({ status: 'completed' })
};
});
// Test different end reasons
const reasons = [
'user_exit', // User explicitly ended session
'timeout', // Session timed out
'error', // Error occurred
'restart', // Session restarting
'clear' // User cleared context
];
// Execute: Send cleanup for each reason
for (const reason of reasons) {
await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/sessions/complete`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
claudeSessionId: `session-${reason}`,
reason
})
}
);
}
// Verify: All cleanup requests processed
expect(cleanupRequests.length).toBe(5);
expect(cleanupRequests.map(r => r.reason)).toEqual(reasons);
});
it('completes multiple sessions independently', async () => {
// Setup: Track session completions
const completedSessions: string[] = [];
global.fetch = vi.fn().mockImplementation(async (url, options) => {
const body = JSON.parse(options.body);
completedSessions.push(body.claudeSessionId);
return {
ok: true,
status: 200,
json: async () => ({ status: 'completed' })
};
});
const sessions = [
'session-abc-123',
'session-def-456',
'session-ghi-789'
];
// Execute: Complete multiple sessions
for (const sessionId of sessions) {
await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/sessions/complete`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
claudeSessionId: sessionId,
reason: 'user_exit'
})
}
);
}
// Verify: All sessions completed
expect(completedSessions).toEqual(sessions);
});
it('handles cleanup when session not found', async () => {
// Setup: Mock 404 response for non-existent session
global.fetch = vi.fn().mockResolvedValue({
ok: false,
status: 404,
json: async () => ({ error: 'Session not found' })
});
// Execute: Try to complete non-existent session
const response = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/sessions/complete`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
claudeSessionId: 'non-existent-session',
reason: 'user_exit'
})
}
);
// Verify: Returns 404 (graceful handling)
expect(response.ok).toBe(false);
expect(response.status).toBe(404);
});
it('supports optional metadata in cleanup request', async () => {
// Setup: Mock worker response
global.fetch = vi.fn().mockResolvedValue({
ok: true,
status: 200,
json: async () => ({ status: 'completed' })
});
// Execute: Send cleanup with additional metadata
await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/sessions/complete`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
claudeSessionId: sessionScenario.claudeSessionId,
reason: 'user_exit',
duration_seconds: 1800,
observations_count: 25,
project: 'claude-mem'
})
}
);
// Verify: Metadata included in request
const fetchCall = (global.fetch as any).mock.calls[0];
const requestBody = JSON.parse(fetchCall[1].body);
expect(requestBody.duration_seconds).toBe(1800);
expect(requestBody.observations_count).toBe(25);
expect(requestBody.project).toBe('claude-mem');
});
it('handles worker being down during cleanup', async () => {
// Setup: Mock worker unreachable
global.fetch = vi.fn().mockRejectedValue(new Error('ECONNREFUSED'));
// Execute: Attempt to complete session
try {
await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/sessions/complete`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
claudeSessionId: sessionScenario.claudeSessionId,
reason: 'user_exit'
})
}
);
// Should throw, so fail if we get here
expect(true).toBe(false);
} catch (error: any) {
// Verify: Error indicates worker is down
expect(error.message).toContain('ECONNREFUSED');
}
// The hook should log this but not fail the session end
// (This graceful degradation would be tested in hook-specific tests)
});
});
+181
View File
@@ -0,0 +1,181 @@
/**
* Happy Path Test: Session Initialization
*
* Tests that when a user's first tool use occurs, the session is
* created in the database and observations can be queued.
*/
import { describe, it, expect, beforeEach, vi } from 'vitest';
import { bashCommandScenario, sessionScenario } from '../helpers/scenarios.js';
describe('Session Initialization (UserPromptSubmit)', () => {
const WORKER_PORT = 37777;
beforeEach(() => {
vi.clearAllMocks();
});
it('creates session when first observation is sent', async () => {
// This tests the happy path:
// User types first prompt → Tool runs → Hook sends observation →
// Worker creates session → Observation queued for SDK processing
// Setup: Mock successful response from worker
global.fetch = vi.fn().mockResolvedValue({
ok: true,
status: 200,
json: async () => ({ status: 'queued', sessionId: 1 })
});
// Execute: Send first observation (what save-hook does)
const response = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/sessions/observations`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
claudeSessionId: sessionScenario.claudeSessionId,
tool_name: bashCommandScenario.tool_name,
tool_input: bashCommandScenario.tool_input,
tool_response: bashCommandScenario.tool_response,
cwd: '/project/claude-mem'
})
}
);
// Verify: Session created and observation queued
expect(response.ok).toBe(true);
const result = await response.json();
expect(result.status).toBe('queued');
expect(result.sessionId).toBeDefined();
// Verify: fetch was called with correct endpoint and data
expect(global.fetch).toHaveBeenCalledWith(
`http://127.0.0.1:${WORKER_PORT}/api/sessions/observations`,
expect.objectContaining({
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: expect.stringContaining(sessionScenario.claudeSessionId)
})
);
});
it('handles missing claudeSessionId gracefully', async () => {
// Setup: Mock error response for missing session ID
global.fetch = vi.fn().mockResolvedValue({
ok: false,
status: 400,
json: async () => ({ error: 'Missing claudeSessionId' })
});
// Execute: Send observation without session ID
const response = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/sessions/observations`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
tool_name: 'Bash',
tool_input: { command: 'ls' },
tool_response: { stdout: 'file.txt' }
})
}
);
// Verify: Returns 400 error
expect(response.ok).toBe(false);
expect(response.status).toBe(400);
const error = await response.json();
expect(error.error).toContain('Missing claudeSessionId');
});
it('queues multiple observations for the same session', async () => {
// Setup: Mock successful responses
let callCount = 0;
global.fetch = vi.fn().mockImplementation(async () => {
const currentId = ++callCount;
return {
ok: true,
status: 200,
json: async () => ({ status: 'queued', observationId: currentId })
};
});
const sessionId = sessionScenario.claudeSessionId;
// Execute: Send multiple observations for the same session
const obs1 = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/sessions/observations`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
claudeSessionId: sessionId,
tool_name: 'Read',
tool_input: { file_path: '/test.ts' },
tool_response: { content: 'code...' }
})
}
);
const obs2 = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/sessions/observations`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
claudeSessionId: sessionId,
tool_name: 'Edit',
tool_input: { file_path: '/test.ts', old_string: 'old', new_string: 'new' },
tool_response: { success: true }
})
}
);
// Verify: Both observations were queued successfully
expect(obs1.ok).toBe(true);
expect(obs2.ok).toBe(true);
const result1 = await obs1.json();
const result2 = await obs2.json();
expect(result1.status).toBe('queued');
expect(result2.status).toBe('queued');
expect(result1.observationId).toBe(1);
expect(result2.observationId).toBe(2);
});
it('includes project context from cwd', async () => {
// Setup: Mock successful response
global.fetch = vi.fn().mockResolvedValue({
ok: true,
status: 200,
json: async () => ({ status: 'queued' })
});
const projectPath = '/Users/alice/projects/my-app';
// Execute: Send observation with cwd
await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/sessions/observations`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
claudeSessionId: sessionScenario.claudeSessionId,
tool_name: 'Bash',
tool_input: { command: 'npm test' },
tool_response: { stdout: 'PASS', exit_code: 0 },
cwd: projectPath
})
}
);
// Verify: Request includes cwd
expect(global.fetch).toHaveBeenCalledWith(
expect.any(String),
expect.objectContaining({
body: expect.stringContaining(projectPath)
})
);
});
});
+247
View File
@@ -0,0 +1,247 @@
/**
* Happy Path Test: Session Summary (Stop)
*
* Tests that when a user pauses or stops a session, the SDK
* generates a summary from the conversation context.
*/
import { describe, it, expect, beforeEach, vi } from 'vitest';
import { sessionSummaryScenario, sessionScenario } from '../helpers/scenarios.js';
describe('Session Summary (Stop)', () => {
const WORKER_PORT = 37777;
beforeEach(() => {
vi.clearAllMocks();
});
it('generates summary from last messages', async () => {
// This tests the happy path:
// User stops/pauses → Hook sends last messages → Worker queues for SDK →
// SDK generates summary → Summary saved to database
// Setup: Mock successful response from worker
global.fetch = vi.fn().mockResolvedValue({
ok: true,
status: 200,
json: async () => ({ status: 'queued' })
});
// Execute: Send summarize request (what summary-hook does)
const response = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/sessions/summarize`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
claudeSessionId: sessionSummaryScenario.claudeSessionId,
last_user_message: sessionSummaryScenario.last_user_message,
last_assistant_message: sessionSummaryScenario.last_assistant_message,
cwd: '/project/claude-mem'
})
}
);
// Verify: Summary queued successfully
expect(response.ok).toBe(true);
const result = await response.json();
expect(result.status).toBe('queued');
// Verify: Correct data sent to worker
const fetchCall = (global.fetch as any).mock.calls[0];
const requestBody = JSON.parse(fetchCall[1].body);
expect(requestBody.last_user_message).toBe('Thanks, that fixed it!');
expect(requestBody.last_assistant_message).toContain('parser');
});
it('handles missing session ID gracefully', async () => {
// Setup: Mock error response
global.fetch = vi.fn().mockResolvedValue({
ok: false,
status: 400,
json: async () => ({ error: 'Missing claudeSessionId' })
});
// Execute: Send summarize without session ID
const response = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/sessions/summarize`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
last_user_message: 'Some message',
last_assistant_message: 'Some response'
})
}
);
// Verify: Returns error
expect(response.ok).toBe(false);
expect(response.status).toBe(400);
const error = await response.json();
expect(error.error).toContain('Missing claudeSessionId');
});
it('generates summary for different conversation types', async () => {
// Setup: Mock worker responses
const summaries: any[] = [];
global.fetch = vi.fn().mockImplementation(async (url, options) => {
const body = JSON.parse(options.body);
summaries.push(body);
return {
ok: true,
status: 200,
json: async () => ({ status: 'queued', summaryId: summaries.length })
};
});
// Test different conversation scenarios
const scenarios = [
{
type: 'bug_fix',
user: 'Thanks for fixing the parser bug!',
assistant: 'I fixed the XML parser to handle self-closing tags in src/parser.ts:42.'
},
{
type: 'feature_addition',
user: 'Perfect! The search feature works great.',
assistant: 'I added FTS5 full-text search in src/services/search.ts.'
},
{
type: 'exploration',
user: 'That helps me understand the codebase better.',
assistant: 'The authentication flow uses JWT tokens stored in localStorage.'
},
{
type: 'refactoring',
user: 'Much cleaner now!',
assistant: 'I refactored the duplicate code into a shared utility function in src/utils/helpers.ts.'
}
];
// Execute: Send summary for each scenario
for (const scenario of scenarios) {
await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/sessions/summarize`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
claudeSessionId: `session-${scenario.type}`,
last_user_message: scenario.user,
last_assistant_message: scenario.assistant,
cwd: '/project'
})
}
);
}
// Verify: All summaries queued
expect(summaries.length).toBe(4);
expect(summaries[0].last_user_message).toContain('parser bug');
expect(summaries[1].last_user_message).toContain('search');
expect(summaries[2].last_user_message).toContain('understand');
expect(summaries[3].last_user_message).toContain('cleaner');
});
it('preserves long conversation context', async () => {
// Setup: Mock worker response
global.fetch = vi.fn().mockResolvedValue({
ok: true,
status: 200,
json: async () => ({ status: 'queued' })
});
// Execute: Send summary with long messages (realistic scenario)
const longAssistantMessage = `I've fixed the bug in the parser. Here's what I did:
1. Added null check for empty tags in src/parser.ts:42
2. Updated the regex pattern to handle self-closing tags
3. Added unit tests to verify the fix works
4. Ran the test suite and confirmed all tests pass
The issue was that the parser wasn't handling XML tags like <tag/> correctly.
It was only expecting <tag></tag> format. Now it handles both formats.`;
const response = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/sessions/summarize`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
claudeSessionId: sessionScenario.claudeSessionId,
last_user_message: 'Thanks for the detailed explanation!',
last_assistant_message: longAssistantMessage,
cwd: '/project'
})
}
);
// Verify: Long message preserved
expect(response.ok).toBe(true);
const fetchCall = (global.fetch as any).mock.calls[0];
const requestBody = JSON.parse(fetchCall[1].body);
expect(requestBody.last_assistant_message.length).toBeGreaterThan(200);
expect(requestBody.last_assistant_message).toContain('parser.ts:42');
expect(requestBody.last_assistant_message).toContain('self-closing tags');
});
it('handles empty or minimal messages gracefully', async () => {
// Setup: Mock worker response
global.fetch = vi.fn().mockResolvedValue({
ok: true,
status: 200,
json: async () => ({ status: 'queued' })
});
// Execute: Send summary with minimal messages
const response = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/sessions/summarize`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
claudeSessionId: sessionScenario.claudeSessionId,
last_user_message: 'Thanks!',
last_assistant_message: 'Done.',
cwd: '/project'
})
}
);
// Verify: Still processes minimal messages
expect(response.ok).toBe(true);
const result = await response.json();
expect(result.status).toBe('queued');
});
it('includes project context from cwd', async () => {
// Setup: Mock worker response
global.fetch = vi.fn().mockResolvedValue({
ok: true,
status: 200,
json: async () => ({ status: 'queued' })
});
const projectPath = '/Users/alice/projects/my-app';
// Execute: Send summary with project context
await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/sessions/summarize`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
claudeSessionId: sessionScenario.claudeSessionId,
last_user_message: 'Great!',
last_assistant_message: 'Fixed the bug.',
cwd: projectPath
})
}
);
// Verify: Project context included
const fetchCall = (global.fetch as any).mock.calls[0];
const requestBody = JSON.parse(fetchCall[1].body);
expect(requestBody.cwd).toBe(projectPath);
});
});
+82
View File
@@ -0,0 +1,82 @@
/**
* Reusable mock factories for testing dependencies.
*/
import { vi } from 'vitest';
/**
* Mock fetch that succeeds with a JSON response
*/
export const mockFetchSuccess = (data: any = { success: true }) => {
return vi.fn().mockResolvedValue({
ok: true,
status: 200,
json: async () => data,
text: async () => JSON.stringify(data)
});
};
/**
* Mock fetch that fails with worker down error
*/
export const mockFetchWorkerDown = () => {
return vi.fn().mockRejectedValue(
new Error('ECONNREFUSED')
);
};
/**
* Mock fetch that returns 500 error
*/
export const mockFetchServerError = () => {
return vi.fn().mockResolvedValue({
ok: false,
status: 500,
json: async () => ({ error: 'Internal Server Error' }),
text: async () => 'Internal Server Error'
});
};
/**
* Mock database operations
*/
export const mockDb = {
createSDKSession: vi.fn().mockReturnValue(1),
addObservation: vi.fn().mockReturnValue(1),
getObservationById: vi.fn(),
getObservations: vi.fn().mockReturnValue([]),
searchObservations: vi.fn().mockReturnValue([]),
markSessionCompleted: vi.fn(),
getSession: vi.fn(),
getSessions: vi.fn().mockReturnValue([]),
};
/**
* Mock SDK agent
*/
export const mockSdkAgent = {
startSession: vi.fn(),
stopSession: vi.fn(),
processObservation: vi.fn(),
generateSummary: vi.fn(),
};
/**
* Mock session manager
*/
export const mockSessionManager = {
queueObservation: vi.fn(),
queueSummarize: vi.fn(),
getSession: vi.fn(),
createSession: vi.fn(),
completeSession: vi.fn(),
};
/**
* Helper to reset all mocks
*/
export const resetAllMocks = () => {
vi.clearAllMocks();
Object.values(mockDb).forEach(mock => mock.mockClear());
Object.values(mockSdkAgent).forEach(mock => mock.mockClear());
Object.values(mockSessionManager).forEach(mock => mock.mockClear());
};
+107
View File
@@ -0,0 +1,107 @@
/**
* Real-world test scenarios extracted from actual claude-mem usage.
* These represent typical tool usage patterns that generate observations.
*/
// A real Bash command observation
export const bashCommandScenario = {
tool_name: 'Bash',
tool_input: {
command: 'git status',
description: 'Check git status'
},
tool_response: {
stdout: 'On branch main\nnothing to commit, working tree clean',
exit_code: 0
}
};
// A real Read file observation
export const readFileScenario = {
tool_name: 'Read',
tool_input: {
file_path: '/project/src/index.ts'
},
tool_response: {
content: 'export function main() { console.log("Hello"); }'
}
};
// A real Write file observation
export const writeFileScenario = {
tool_name: 'Write',
tool_input: {
file_path: '/project/src/config.ts',
content: 'export const API_KEY = "test";'
},
tool_response: {
success: true
}
};
// A real Edit file observation
export const editFileScenario = {
tool_name: 'Edit',
tool_input: {
file_path: '/project/src/app.ts',
old_string: 'const PORT = 3000;',
new_string: 'const PORT = 8080;'
},
tool_response: {
success: true
}
};
// A real Grep search observation
export const grepScenario = {
tool_name: 'Grep',
tool_input: {
pattern: 'function.*main',
path: '/project/src'
},
tool_response: {
matches: [
'src/index.ts:10:export function main() {',
'src/cli.ts:5:function mainCli() {'
]
}
};
// A real session with prompts
export const sessionScenario = {
claudeSessionId: 'abc-123-def-456',
project: 'claude-mem',
userPrompt: 'Help me fix the bug in the parser'
};
// Another session scenario
export const sessionWithBuildScenario = {
claudeSessionId: 'xyz-789-ghi-012',
project: 'my-app',
userPrompt: 'Run the build and fix any type errors'
};
// Test observation data
export const sampleObservation = {
title: 'Fixed parser bug',
type: 'bugfix' as const,
content: 'The XML parser was not handling empty tags correctly. Added check for self-closing tags.',
files: ['/project/src/parser.ts'],
concepts: ['bugfix', 'parser', 'xml']
};
// Another observation
export const featureObservation = {
title: 'Added search functionality',
type: 'feature' as const,
content: 'Implemented full-text search using FTS5 for observations and sessions.',
files: ['/project/src/services/search.ts'],
concepts: ['feature', 'search', 'fts5']
};
// Session summary scenario
export const sessionSummaryScenario = {
claudeSessionId: 'abc-123-def-456',
last_user_message: 'Thanks, that fixed it!',
last_assistant_message: 'The bug was in the parser. I added a check for self-closing tags in src/parser.ts:42.'
};
+352
View File
@@ -0,0 +1,352 @@
/**
* Integration Test: Full Observation Lifecycle
*
* Tests the complete flow from tool usage to observation storage
* and retrieval through search. This validates that all components
* work together correctly.
*/
import { describe, it, expect, beforeEach, vi } from 'vitest';
import {
bashCommandScenario,
sessionScenario,
sampleObservation
} from '../helpers/scenarios.js';
describe('Full Observation Lifecycle', () => {
const WORKER_PORT = 37777;
let sessionId: string;
beforeEach(() => {
vi.clearAllMocks();
sessionId = sessionScenario.claudeSessionId;
});
it('observation flows from hook to database to search', async () => {
/**
* This integration test simulates the complete happy path:
*
* 1. Session starts Context injected
* 2. User types prompt First tool runs
* 3. Tool result captured Observation queued
* 4. SDK processes Observation saved
* 5. Search finds observation
* 6. Session ends Cleanup
*/
// === Step 1: Context Injection (SessionStart) ===
global.fetch = vi.fn().mockResolvedValueOnce({
ok: true,
status: 200,
text: async () => '# [claude-mem] recent context\n\nNo observations yet.'
});
const contextResponse = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/context/inject?project=claude-mem`
);
expect(contextResponse.ok).toBe(true);
const contextText = await contextResponse.text();
expect(contextText).toContain('recent context');
// === Step 2 & 3: Tool runs, Observation captured (PostToolUse) ===
global.fetch = vi.fn().mockResolvedValueOnce({
ok: true,
status: 200,
json: async () => ({ status: 'queued', observationId: 1 })
});
const observationResponse = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/sessions/observations`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
claudeSessionId: sessionId,
tool_name: bashCommandScenario.tool_name,
tool_input: bashCommandScenario.tool_input,
tool_response: bashCommandScenario.tool_response,
cwd: '/project/claude-mem'
})
}
);
expect(observationResponse.ok).toBe(true);
const obsResult = await observationResponse.json();
expect(obsResult.status).toBe('queued');
// === Step 4: Simulate SDK processing and saving observation ===
// In a real flow, the SDK would process the tool data and generate an observation
// For this test, we simulate the observation being saved to the database
// === Step 5: Search finds the observation ===
global.fetch = vi.fn().mockResolvedValueOnce({
ok: true,
status: 200,
json: async () => ({
results: [
{
id: 1,
title: 'Git status check',
content: 'Checked repository status, working tree clean',
type: 'discovery',
files: [],
created_at: new Date().toISOString()
}
],
total: 1
})
});
const searchResponse = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/search?query=git+status&project=claude-mem`
);
expect(searchResponse.ok).toBe(true);
const searchResults = await searchResponse.json();
expect(searchResults.results).toHaveLength(1);
expect(searchResults.results[0].title).toContain('Git');
// === Step 6: Session summary (Stop) ===
global.fetch = vi.fn().mockResolvedValueOnce({
ok: true,
status: 200,
json: async () => ({ status: 'queued' })
});
const summaryResponse = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/sessions/summarize`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
claudeSessionId: sessionId,
last_user_message: 'Thanks!',
last_assistant_message: 'Checked git status successfully.',
cwd: '/project/claude-mem'
})
}
);
expect(summaryResponse.ok).toBe(true);
// === Step 7: Session cleanup (SessionEnd) ===
global.fetch = vi.fn().mockResolvedValueOnce({
ok: true,
status: 200,
json: async () => ({ status: 'completed' })
});
const cleanupResponse = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/sessions/complete`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
claudeSessionId: sessionId,
reason: 'user_exit'
})
}
);
expect(cleanupResponse.ok).toBe(true);
// Verify: All steps completed successfully
expect(global.fetch).toHaveBeenCalled();
});
it('handles multiple observations in a single session', async () => {
/**
* Tests a more realistic session with multiple tool uses
* and observations being generated.
*/
// Track all observations in this session
const observations: any[] = [];
// Mock worker to accept multiple observations
let obsCount = 0;
global.fetch = vi.fn().mockImplementation(async (url: string, options?: any) => {
if (url.includes('/api/sessions/observations') && options?.method === 'POST') {
obsCount++;
const body = JSON.parse(options.body);
observations.push(body);
return {
ok: true,
status: 200,
json: async () => ({ status: 'queued', observationId: obsCount })
};
}
if (url.includes('/api/search')) {
return {
ok: true,
status: 200,
json: async () => ({
results: observations.map((obs, i) => ({
id: i + 1,
title: `Observation ${i + 1}`,
content: `Tool: ${obs.tool_name}`,
type: 'discovery',
created_at: new Date().toISOString()
})),
total: observations.length
})
};
}
return { ok: true, status: 200, json: async () => ({}) };
});
// Simulate 5 different tool uses
const tools = [
{ name: 'Bash', input: { command: 'npm test' } },
{ name: 'Read', input: { file_path: '/src/index.ts' } },
{ name: 'Edit', input: { file_path: '/src/index.ts', old_string: 'old', new_string: 'new' } },
{ name: 'Grep', input: { pattern: 'function', path: '/src' } },
{ name: 'Write', input: { file_path: '/src/new.ts', content: 'code' } }
];
// Send observations for each tool
for (const tool of tools) {
const response = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/sessions/observations`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
claudeSessionId: sessionId,
tool_name: tool.name,
tool_input: tool.input,
tool_response: { success: true },
cwd: '/project'
})
}
);
expect(response.ok).toBe(true);
}
// Verify: All observations were queued
expect(observations).toHaveLength(5);
expect(observations.map(o => o.tool_name)).toEqual(['Bash', 'Read', 'Edit', 'Grep', 'Write']);
// Search finds all observations
const searchResponse = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/search?query=observation&project=test-project`
);
const searchResults = await searchResponse.json();
expect(searchResults.results).toHaveLength(5);
});
it('preserves context across session lifecycle', async () => {
/**
* Tests that observations from one session can be found
* when starting a new session in the same project.
*/
// Session 1: Create some observations
global.fetch = vi.fn().mockImplementation(async (url: string, options?: any) => {
if (url.includes('/api/sessions/observations')) {
return {
ok: true,
status: 200,
json: async () => ({ status: 'queued', observationId: 1 })
};
}
if (url.includes('/api/context/inject')) {
return {
ok: true,
status: 200,
text: async () => `# [test-project] recent context
## Recent Work (1 observation)
### [bugfix] Fixed parser bug
The XML parser now handles self-closing tags correctly.
Files: /src/parser.ts`
};
}
return { ok: true, status: 200, json: async () => ({}) };
});
// Session 1: Add observation
await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/sessions/observations`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
claudeSessionId: 'session-1',
tool_name: 'Edit',
tool_input: { file_path: '/src/parser.ts' },
tool_response: { success: true },
cwd: '/project/test-project'
})
}
);
// Session 2: Start new session, should see context from session 1
const contextResponse = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/context/inject?project=test-project`
);
const context = await contextResponse.text();
// Verify: Context includes previous session's work
expect(context).toContain('Fixed parser bug');
expect(context).toContain('parser.ts');
});
it('handles error recovery gracefully', async () => {
/**
* Tests that the system continues to work even if some
* operations fail along the way.
*/
let callCount = 0;
global.fetch = vi.fn().mockImplementation(async () => {
callCount++;
// First call fails (simulating transient error)
if (callCount === 1) {
return {
ok: false,
status: 500,
json: async () => ({ error: 'Temporary error' })
};
}
// Subsequent calls succeed
return {
ok: true,
status: 200,
json: async () => ({ status: 'queued' })
};
});
// First attempt fails
const firstAttempt = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/sessions/observations`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
claudeSessionId: sessionId,
tool_name: 'Bash',
tool_input: { command: 'test' },
tool_response: {},
cwd: '/project'
})
}
);
expect(firstAttempt.ok).toBe(false);
// Retry succeeds
const secondAttempt = await fetch(
`http://127.0.0.1:${WORKER_PORT}/api/sessions/observations`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
claudeSessionId: sessionId,
tool_name: 'Bash',
tool_input: { command: 'test' },
tool_response: {},
cwd: '/project'
})
}
);
expect(secondAttempt.ok).toBe(true);
});
});
+15
View File
@@ -0,0 +1,15 @@
import { defineConfig } from 'vitest/config';
export default defineConfig({
test: {
globals: true,
include: ['tests/**/*.test.ts'],
exclude: [
'**/node_modules/**',
'**/dist/**',
// Exclude node:test format files (they use node's native test runner)
'tests/strip-memory-tags.test.ts',
'tests/user-prompt-tag-stripping.test.ts'
],
},
});