Refactor hooks and worker service for improved error handling and initialization

- Removed try-catch blocks in new-hook, save-hook, and summary-hook for cleaner flow.
- Enhanced error handling in save and summary hooks to throw errors instead of logging and returning.
- Introduced ensureWorkerRunning utility to manage worker service lifecycle and health checks.
- Replaced dynamic port allocation with a fixed port for the worker service.
- Simplified path management and removed unused port allocator utility.
- Added database schema initialization for fresh installations and improved migration handling.
This commit is contained in:
Alex Newman
2025-10-19 00:57:49 -04:00
parent cf9d1d4a0b
commit 7ff611feb5
23 changed files with 832 additions and 434 deletions
+2 -1
View File
@@ -6,4 +6,5 @@ node_modules/
*.tmp
*.temp
.claude/
plugin/data/
plugin/data/
plugin/data.backup/
+11 -11
View File
@@ -344,8 +344,8 @@ Context is stored in SQLite database. Location varies by version:
Query the database directly:
```bash
# v4.0+ - find your plugin directory first
sqlite3 path/to/plugin/data/claude-mem.db
# v4.0+ uses ~/.claude-mem directory
sqlite3 ~/.claude-mem/claude-mem.db
# View recent sessions
SELECT session_id, project, created_at, status FROM sdk_sessions ORDER BY created_at DESC LIMIT 10;
@@ -660,14 +660,14 @@ cat plugin/hooks/hooks.json | jq .
**Problem**: Context not appearing
```bash
# Check if summaries exist (adjust path for v4.0+)
sqlite3 path/to/plugin/data/claude-mem.db "SELECT COUNT(*) FROM session_summaries;"
# Check if summaries exist
sqlite3 ~/.claude-mem/claude-mem.db "SELECT COUNT(*) FROM session_summaries;"
# View recent sessions
npm run test:context:verbose
# Check database integrity
sqlite3 path/to/plugin/data/claude-mem.db "PRAGMA integrity_check;"
sqlite3 ~/.claude-mem/claude-mem.db "PRAGMA integrity_check;"
```
### Database Issues
@@ -678,12 +678,12 @@ sqlite3 path/to/plugin/data/claude-mem.db "PRAGMA integrity_check;"
# Close all connections
pm2 stop claude-mem-worker
# Check for stale locks (v4.0+ path)
lsof path/to/plugin/data/claude-mem.db
# Check for stale locks
lsof ~/.claude-mem/claude-mem.db
# Backup and recreate (nuclear option) - adjust paths for v4.0+
cp path/to/plugin/data/claude-mem.db path/to/plugin/data/claude-mem.db.backup
rm path/to/plugin/data/claude-mem.db
# Backup and recreate (nuclear option)
cp ~/.claude-mem/claude-mem.db ~/.claude-mem/claude-mem.db.backup
rm ~/.claude-mem/claude-mem.db
npm run worker:start # Will recreate schema
```
@@ -702,7 +702,7 @@ npm run worker:logs
Check correlation IDs to trace observations through the pipeline:
```bash
sqlite3 path/to/plugin/data/claude-mem.db "SELECT correlation_id, tool_name, created_at FROM observations WHERE session_id = 'YOUR_SESSION_ID' ORDER BY created_at;"
sqlite3 ~/.claude-mem/claude-mem.db "SELECT correlation_id, tool_name, created_at FROM observations WHERE session_id = 'YOUR_SESSION_ID' ORDER BY created_at;"
```
---
+93 -31
View File
File diff suppressed because one or more lines are too long
+1 -1
View File
@@ -31,7 +31,7 @@ module.exports = {
env: {
NODE_ENV: 'production',
CLAUDE_MEM_WORKER_PORT: 0, // Dynamic port allocation
CLAUDE_MEM_WORKER_PORT: 37777, // Fixed port for reliability
FORCE_COLOR: '1'
},
+1 -1
View File
@@ -1,6 +1,6 @@
{
"name": "claude-mem",
"version": "4.0.0",
"version": "4.0.1",
"description": "Memory compression system for Claude Code - persist context across sessions",
"keywords": [
"claude",
+71 -9
View File
@@ -1,7 +1,69 @@
#!/usr/bin/env node
import N from"better-sqlite3";import{join as i,dirname as H,basename as $}from"path";import{homedir as S}from"os";import{existsSync as W,mkdirSync as I}from"fs";var C=()=>process.env.CLAUDE_PLUGIN_ROOT?i(process.env.CLAUDE_PLUGIN_ROOT,"data"):process.env.CLAUDE_MEM_DATA_DIR?process.env.CLAUDE_MEM_DATA_DIR:i(S(),".claude-mem"),c=C(),m=process.env.CLAUDE_CONFIG_DIR||i(S(),".claude"),G=i(c,"archives"),q=i(c,"logs"),K=i(c,"trash"),Y=i(c,"backups"),J=i(c,"settings.json"),R=i(c,"claude-mem.db"),V=i(m,"settings.json"),Q=i(m,"commands"),z=i(m,"CLAUDE.md");function L(o){I(o,{recursive:!0})}var _=(n=>(n[n.DEBUG=0]="DEBUG",n[n.INFO=1]="INFO",n[n.WARN=2]="WARN",n[n.ERROR=3]="ERROR",n[n.SILENT=4]="SILENT",n))(_||{}),E=class{level;useColor;constructor(){let e=process.env.CLAUDE_MEM_LOG_LEVEL?.toUpperCase()||"INFO";this.level=_[e]??1,this.useColor=process.stdout.isTTY??!1}correlationId(e,t){return`obs-${e}-${t}`}sessionId(e){return`session-${e}`}formatData(e){if(e==null)return"";if(typeof e=="string")return e;if(typeof e=="number"||typeof e=="boolean")return e.toString();if(typeof e=="object"){if(e instanceof Error)return this.level===0?`${e.message}
${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let t=Object.keys(e);return t.length===0?"{}":t.length<=3?JSON.stringify(e):`{${t.length} keys: ${t.slice(0,3).join(", ")}...}`}return String(e)}formatTool(e,t){if(!t)return e;try{let s=typeof t=="string"?JSON.parse(t):t;if(e==="Bash"&&s.command){let r=s.command.length>50?s.command.substring(0,50)+"...":s.command;return`${e}(${r})`}if(e==="Read"&&s.file_path){let r=s.file_path.split("/").pop()||s.file_path;return`${e}(${r})`}if(e==="Edit"&&s.file_path){let r=s.file_path.split("/").pop()||s.file_path;return`${e}(${r})`}if(e==="Write"&&s.file_path){let r=s.file_path.split("/").pop()||s.file_path;return`${e}(${r})`}return e}catch{return e}}log(e,t,s,r,n){if(e<this.level)return;let d=new Date().toISOString().replace("T"," ").substring(0,23),p=_[e].padEnd(5),T=t.padEnd(6),a="";r?.correlationId?a=`[${r.correlationId}] `:r?.sessionId&&(a=`[session-${r.sessionId}] `);let l="";n!=null&&(this.level===0&&typeof n=="object"?l=`
`+JSON.stringify(n,null,2):l=" "+this.formatData(n));let b="";if(r){let{sessionId:y,sdkSessionId:x,correlationId:U,...h}=r;Object.keys(h).length>0&&(b=` {${Object.entries(h).map(([O,v])=>`${O}=${v}`).join(", ")}}`)}let f=`[${d}] [${p}] [${T}] ${a}${s}${b}${l}`;e===3?console.error(f):console.log(f)}debug(e,t,s,r){this.log(0,e,t,s,r)}info(e,t,s,r){this.log(1,e,t,s,r)}warn(e,t,s,r){this.log(2,e,t,s,r)}error(e,t,s,r){this.log(3,e,t,s,r)}dataIn(e,t,s,r){this.info(e,`\u2192 ${t}`,s,r)}dataOut(e,t,s,r){this.info(e,`\u2190 ${t}`,s,r)}success(e,t,s,r){this.info(e,`\u2713 ${t}`,s,r)}failure(e,t,s,r){this.error(e,`\u2717 ${t}`,s,r)}timing(e,t,s,r){this.info(e,`\u23F1 ${t}`,r,{duration:`${s}ms`})}},A=new E;var u=class{db;constructor(){L(c),this.db=new N(R),this.db.pragma("journal_mode = WAL"),this.db.pragma("synchronous = NORMAL"),this.db.pragma("foreign_keys = ON"),this.ensureWorkerPortColumn(),this.ensurePromptTrackingColumns(),this.removeSessionSummariesUniqueConstraint(),this.addObservationHierarchicalFields(),this.makeObservationsTextNullable()}ensureWorkerPortColumn(){try{this.db.pragma("table_info(sdk_sessions)").some(s=>s.name==="worker_port")||(this.db.exec("ALTER TABLE sdk_sessions ADD COLUMN worker_port INTEGER"),console.error("[SessionStore] Added worker_port column to sdk_sessions table"))}catch(e){console.error("[SessionStore] Migration error:",e.message)}}ensurePromptTrackingColumns(){try{this.db.pragma("table_info(sdk_sessions)").some(a=>a.name==="prompt_counter")||(this.db.exec("ALTER TABLE sdk_sessions ADD COLUMN prompt_counter INTEGER DEFAULT 0"),console.error("[SessionStore] Added prompt_counter column to sdk_sessions table")),this.db.pragma("table_info(observations)").some(a=>a.name==="prompt_number")||(this.db.exec("ALTER TABLE observations ADD COLUMN prompt_number INTEGER"),console.error("[SessionStore] Added prompt_number column to observations table")),this.db.pragma("table_info(session_summaries)").some(a=>a.name==="prompt_number")||(this.db.exec("ALTER TABLE session_summaries ADD COLUMN prompt_number INTEGER"),console.error("[SessionStore] Added prompt_number column to session_summaries table"));let T=this.db.pragma("index_list(session_summaries)").some(a=>a.unique===1)}catch(e){console.error("[SessionStore] Prompt tracking migration error:",e.message)}}removeSessionSummariesUniqueConstraint(){try{if(!this.db.pragma("index_list(session_summaries)").some(s=>s.unique===1))return;console.error("[SessionStore] Removing UNIQUE constraint from session_summaries.sdk_session_id..."),this.db.exec("BEGIN TRANSACTION");try{this.db.exec(`
import x from"better-sqlite3";import{join as i,dirname as C,basename as P}from"path";import{homedir as h}from"os";import{existsSync as j,mkdirSync as D}from"fs";import{fileURLToPath as k}from"url";function y(){return typeof __dirname<"u"?__dirname:C(k(import.meta.url))}var W=y(),a=process.env.CLAUDE_MEM_DATA_DIR||i(h(),".claude-mem"),m=process.env.CLAUDE_CONFIG_DIR||i(h(),".claude"),K=i(a,"archives"),Y=i(a,"logs"),q=i(a,"trash"),V=i(a,"backups"),J=i(a,"settings.json"),I=i(a,"claude-mem.db"),Q=i(m,"settings.json"),z=i(m,"commands"),Z=i(m,"CLAUDE.md");function R(o){D(o,{recursive:!0})}var u=(n=>(n[n.DEBUG=0]="DEBUG",n[n.INFO=1]="INFO",n[n.WARN=2]="WARN",n[n.ERROR=3]="ERROR",n[n.SILENT=4]="SILENT",n))(u||{}),T=class{level;useColor;constructor(){let e=process.env.CLAUDE_MEM_LOG_LEVEL?.toUpperCase()||"INFO";this.level=u[e]??1,this.useColor=process.stdout.isTTY??!1}correlationId(e,t){return`obs-${e}-${t}`}sessionId(e){return`session-${e}`}formatData(e){if(e==null)return"";if(typeof e=="string")return e;if(typeof e=="number"||typeof e=="boolean")return e.toString();if(typeof e=="object"){if(e instanceof Error)return this.level===0?`${e.message}
${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let t=Object.keys(e);return t.length===0?"{}":t.length<=3?JSON.stringify(e):`{${t.length} keys: ${t.slice(0,3).join(", ")}...}`}return String(e)}formatTool(e,t){if(!t)return e;try{let s=typeof t=="string"?JSON.parse(t):t;if(e==="Bash"&&s.command){let r=s.command.length>50?s.command.substring(0,50)+"...":s.command;return`${e}(${r})`}if(e==="Read"&&s.file_path){let r=s.file_path.split("/").pop()||s.file_path;return`${e}(${r})`}if(e==="Edit"&&s.file_path){let r=s.file_path.split("/").pop()||s.file_path;return`${e}(${r})`}if(e==="Write"&&s.file_path){let r=s.file_path.split("/").pop()||s.file_path;return`${e}(${r})`}return e}catch{return e}}log(e,t,s,r,n){if(e<this.level)return;let d=new Date().toISOString().replace("T"," ").substring(0,23),p=u[e].padEnd(5),c=t.padEnd(6),_="";r?.correlationId?_=`[${r.correlationId}] `:r?.sessionId&&(_=`[session-${r.sessionId}] `);let l="";n!=null&&(this.level===0&&typeof n=="object"?l=`
`+JSON.stringify(n,null,2):l=" "+this.formatData(n));let S="";if(r){let{sessionId:U,sdkSessionId:X,correlationId:w,...b}=r;Object.keys(b).length>0&&(S=` {${Object.entries(b).map(([v,A])=>`${v}=${A}`).join(", ")}}`)}let N=`[${d}] [${p}] [${c}] ${_}${s}${S}${l}`;e===3?console.error(N):console.log(N)}debug(e,t,s,r){this.log(0,e,t,s,r)}info(e,t,s,r){this.log(1,e,t,s,r)}warn(e,t,s,r){this.log(2,e,t,s,r)}error(e,t,s,r){this.log(3,e,t,s,r)}dataIn(e,t,s,r){this.info(e,`\u2192 ${t}`,s,r)}dataOut(e,t,s,r){this.info(e,`\u2190 ${t}`,s,r)}success(e,t,s,r){this.info(e,`\u2713 ${t}`,s,r)}failure(e,t,s,r){this.error(e,`\u2717 ${t}`,s,r)}timing(e,t,s,r){this.info(e,`\u23F1 ${t}`,r,{duration:`${s}ms`})}},O=new T;var E=class{db;constructor(){R(a),this.db=new x(I),this.db.pragma("journal_mode = WAL"),this.db.pragma("synchronous = NORMAL"),this.db.pragma("foreign_keys = ON"),this.initializeSchema(),this.ensureWorkerPortColumn(),this.ensurePromptTrackingColumns(),this.removeSessionSummariesUniqueConstraint(),this.addObservationHierarchicalFields(),this.makeObservationsTextNullable()}initializeSchema(){try{this.db.exec(`
CREATE TABLE IF NOT EXISTS schema_versions (
id INTEGER PRIMARY KEY,
version INTEGER UNIQUE NOT NULL,
applied_at TEXT NOT NULL
)
`);let e=this.db.prepare("SELECT version FROM schema_versions ORDER BY version").all();(e.length>0?Math.max(...e.map(s=>s.version)):0)===0&&(console.error("[SessionStore] Initializing fresh database with migration004..."),this.db.exec(`
CREATE TABLE IF NOT EXISTS sdk_sessions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
claude_session_id TEXT UNIQUE NOT NULL,
sdk_session_id TEXT UNIQUE,
project TEXT NOT NULL,
user_prompt TEXT,
started_at TEXT NOT NULL,
started_at_epoch INTEGER NOT NULL,
completed_at TEXT,
completed_at_epoch INTEGER,
status TEXT CHECK(status IN ('active', 'completed', 'failed')) NOT NULL DEFAULT 'active'
);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_claude_id ON sdk_sessions(claude_session_id);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_sdk_id ON sdk_sessions(sdk_session_id);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_project ON sdk_sessions(project);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_status ON sdk_sessions(status);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_started ON sdk_sessions(started_at_epoch DESC);
CREATE TABLE IF NOT EXISTS observations (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sdk_session_id TEXT NOT NULL,
project TEXT NOT NULL,
text TEXT NOT NULL,
type TEXT NOT NULL CHECK(type IN ('decision', 'bugfix', 'feature', 'refactor', 'discovery')),
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
FOREIGN KEY(sdk_session_id) REFERENCES sdk_sessions(sdk_session_id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_observations_sdk_session ON observations(sdk_session_id);
CREATE INDEX IF NOT EXISTS idx_observations_project ON observations(project);
CREATE INDEX IF NOT EXISTS idx_observations_type ON observations(type);
CREATE INDEX IF NOT EXISTS idx_observations_created ON observations(created_at_epoch DESC);
CREATE TABLE IF NOT EXISTS session_summaries (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sdk_session_id TEXT UNIQUE NOT NULL,
project TEXT NOT NULL,
request TEXT,
investigated TEXT,
learned TEXT,
completed TEXT,
next_steps TEXT,
files_read TEXT,
files_edited TEXT,
notes TEXT,
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
FOREIGN KEY(sdk_session_id) REFERENCES sdk_sessions(sdk_session_id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_session_summaries_sdk_session ON session_summaries(sdk_session_id);
CREATE INDEX IF NOT EXISTS idx_session_summaries_project ON session_summaries(project);
CREATE INDEX IF NOT EXISTS idx_session_summaries_created ON session_summaries(created_at_epoch DESC);
`),this.db.prepare("INSERT INTO schema_versions (version, applied_at) VALUES (?, ?)").run(4,new Date().toISOString()),console.error("[SessionStore] Migration004 applied successfully"))}catch(e){throw console.error("[SessionStore] Schema initialization error:",e.message),e}}ensureWorkerPortColumn(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(5))return;this.db.pragma("table_info(sdk_sessions)").some(r=>r.name==="worker_port")||(this.db.exec("ALTER TABLE sdk_sessions ADD COLUMN worker_port INTEGER"),console.error("[SessionStore] Added worker_port column to sdk_sessions table")),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(5,new Date().toISOString())}catch(e){console.error("[SessionStore] Migration error:",e.message)}}ensurePromptTrackingColumns(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(6))return;this.db.pragma("table_info(sdk_sessions)").some(c=>c.name==="prompt_counter")||(this.db.exec("ALTER TABLE sdk_sessions ADD COLUMN prompt_counter INTEGER DEFAULT 0"),console.error("[SessionStore] Added prompt_counter column to sdk_sessions table")),this.db.pragma("table_info(observations)").some(c=>c.name==="prompt_number")||(this.db.exec("ALTER TABLE observations ADD COLUMN prompt_number INTEGER"),console.error("[SessionStore] Added prompt_number column to observations table")),this.db.pragma("table_info(session_summaries)").some(c=>c.name==="prompt_number")||(this.db.exec("ALTER TABLE session_summaries ADD COLUMN prompt_number INTEGER"),console.error("[SessionStore] Added prompt_number column to session_summaries table")),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(6,new Date().toISOString())}catch(e){console.error("[SessionStore] Prompt tracking migration error:",e.message)}}removeSessionSummariesUniqueConstraint(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(7))return;if(!this.db.pragma("index_list(session_summaries)").some(r=>r.unique===1)){this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(7,new Date().toISOString());return}console.error("[SessionStore] Removing UNIQUE constraint from session_summaries.sdk_session_id..."),this.db.exec("BEGIN TRANSACTION");try{this.db.exec(`
CREATE TABLE session_summaries_new (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sdk_session_id TEXT NOT NULL,
@@ -29,7 +91,7 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let t=Obje
CREATE INDEX idx_session_summaries_sdk_session ON session_summaries(sdk_session_id);
CREATE INDEX idx_session_summaries_project ON session_summaries(project);
CREATE INDEX idx_session_summaries_created ON session_summaries(created_at_epoch DESC);
`),this.db.exec("COMMIT"),console.error("[SessionStore] Successfully removed UNIQUE constraint from session_summaries.sdk_session_id")}catch(s){throw this.db.exec("ROLLBACK"),s}}catch(e){console.error("[SessionStore] Migration error (remove UNIQUE constraint):",e.message)}}addObservationHierarchicalFields(){try{if(this.db.pragma("table_info(observations)").some(s=>s.name==="title"))return;console.error("[SessionStore] Adding hierarchical fields to observations table..."),this.db.exec(`
`),this.db.exec("COMMIT"),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(7,new Date().toISOString()),console.error("[SessionStore] Successfully removed UNIQUE constraint from session_summaries.sdk_session_id")}catch(r){throw this.db.exec("ROLLBACK"),r}}catch(e){console.error("[SessionStore] Migration error (remove UNIQUE constraint):",e.message)}}addObservationHierarchicalFields(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(8))return;if(this.db.pragma("table_info(observations)").some(r=>r.name==="title")){this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(8,new Date().toISOString());return}console.error("[SessionStore] Adding hierarchical fields to observations table..."),this.db.exec(`
ALTER TABLE observations ADD COLUMN title TEXT;
ALTER TABLE observations ADD COLUMN subtitle TEXT;
ALTER TABLE observations ADD COLUMN facts TEXT;
@@ -37,7 +99,7 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let t=Obje
ALTER TABLE observations ADD COLUMN concepts TEXT;
ALTER TABLE observations ADD COLUMN files_read TEXT;
ALTER TABLE observations ADD COLUMN files_modified TEXT;
`),console.error("[SessionStore] Successfully added hierarchical fields to observations table")}catch(e){console.error("[SessionStore] Migration error (add hierarchical fields):",e.message)}}makeObservationsTextNullable(){try{let t=this.db.pragma("table_info(observations)").find(s=>s.name==="text");if(!t||t.notnull===0)return;console.error("[SessionStore] Making observations.text nullable..."),this.db.exec("BEGIN TRANSACTION");try{this.db.exec(`
`),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(8,new Date().toISOString()),console.error("[SessionStore] Successfully added hierarchical fields to observations table")}catch(e){console.error("[SessionStore] Migration error (add hierarchical fields):",e.message)}}makeObservationsTextNullable(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(9))return;let s=this.db.pragma("table_info(observations)").find(r=>r.name==="text");if(!s||s.notnull===0){this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(9,new Date().toISOString());return}console.error("[SessionStore] Making observations.text nullable..."),this.db.exec("BEGIN TRANSACTION");try{this.db.exec(`
CREATE TABLE observations_new (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sdk_session_id TEXT NOT NULL,
@@ -67,7 +129,7 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let t=Obje
CREATE INDEX idx_observations_project ON observations(project);
CREATE INDEX idx_observations_type ON observations(type);
CREATE INDEX idx_observations_created ON observations(created_at_epoch DESC);
`),this.db.exec("COMMIT"),console.error("[SessionStore] Successfully made observations.text nullable")}catch(s){throw this.db.exec("ROLLBACK"),s}}catch(e){console.error("[SessionStore] Migration error (make text nullable):",e.message)}}getRecentSummaries(e,t=10){return this.db.prepare(`
`),this.db.exec("COMMIT"),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(9,new Date().toISOString()),console.error("[SessionStore] Successfully made observations.text nullable")}catch(r){throw this.db.exec("ROLLBACK"),r}}catch(e){console.error("[SessionStore] Migration error (make text nullable):",e.message)}}getRecentSummaries(e,t=10){return this.db.prepare(`
SELECT
request, investigated, learned, completed, next_steps,
files_read, files_edited, notes, prompt_number, created_at
@@ -146,7 +208,7 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let t=Obje
UPDATE sdk_sessions
SET sdk_session_id = ?
WHERE id = ? AND sdk_session_id IS NULL
`).run(t,e).changes===0?(A.debug("DB","sdk_session_id already set, skipping update",{sessionId:e,sdkSessionId:t}),!1):!0}setWorkerPort(e,t){this.db.prepare(`
`).run(t,e).changes===0?(O.debug("DB","sdk_session_id already set, skipping update",{sessionId:e,sdkSessionId:t}),!1):!0}setWorkerPort(e,t){this.db.prepare(`
UPDATE sdk_sessions
SET worker_port = ?
WHERE id = ?
@@ -177,5 +239,5 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let t=Obje
UPDATE sdk_sessions
SET status = 'failed', completed_at = ?, completed_at_epoch = ?
WHERE status = 'active'
`).run(e.toISOString(),t).changes}close(){this.db.close()}};async function k(o){try{console.error("[claude-mem cleanup] Hook fired",{input:o?{session_id:o.session_id,cwd:o.cwd,reason:o.reason}:null}),o||(console.log("No input provided - this script is designed to run as a Claude Code SessionEnd hook"),console.log(`
Expected input format:`),console.log(JSON.stringify({session_id:"string",cwd:"string",transcript_path:"string",hook_event_name:"SessionEnd",reason:"exit"},null,2)),process.exit(0));let{session_id:e,reason:t}=o;console.error("[claude-mem cleanup] Searching for active SDK session",{session_id:e,reason:t});let s=new u,r=s.findActiveSDKSession(e);if(r||(console.error("[claude-mem cleanup] No active SDK session found",{session_id:e}),s.close(),console.log('{"continue": true, "suppressOutput": true}'),process.exit(0)),console.error("[claude-mem cleanup] Active SDK session found",{session_id:r.id,sdk_session_id:r.sdk_session_id,project:r.project,worker_port:r.worker_port}),r.worker_port)try{let n=await fetch(`http://127.0.0.1:${r.worker_port}/sessions/${r.id}`,{method:"DELETE",signal:AbortSignal.timeout(5e3)});n.ok?console.error("[claude-mem cleanup] Session deleted successfully via HTTP"):console.error("[claude-mem cleanup] Failed to delete session:",await n.text())}catch(n){console.error("[claude-mem cleanup] HTTP DELETE error:",n.message)}else console.error("[claude-mem cleanup] No worker port, cannot send DELETE request");try{s.markSessionFailed(r.id),console.error("[claude-mem cleanup] Session marked as failed in database")}catch(n){console.error("[claude-mem cleanup] Failed to mark session as failed:",n)}s.close(),console.error("[claude-mem cleanup] Cleanup completed successfully"),console.log('{"continue": true, "suppressOutput": true}'),process.exit(0)}catch(e){console.error("[claude-mem cleanup] Unexpected error in hook",{error:e.message,stack:e.stack,name:e.name}),console.log('{"continue": true, "suppressOutput": true}'),process.exit(0)}}import{stdin as D}from"process";var g="";D.on("data",o=>g+=o);D.on("end",async()=>{try{let o=g.trim()?JSON.parse(g):void 0;await k(o)}catch(o){console.error(`[claude-mem cleanup-hook error: ${o.message}]`),console.log('{"continue": true, "suppressOutput": true}'),process.exit(0)}});
`).run(e.toISOString(),t).changes}close(){this.db.close()}};async function f(o){try{console.error("[claude-mem cleanup] Hook fired",{input:o?{session_id:o.session_id,cwd:o.cwd,reason:o.reason}:null}),o||(console.log("No input provided - this script is designed to run as a Claude Code SessionEnd hook"),console.log(`
Expected input format:`),console.log(JSON.stringify({session_id:"string",cwd:"string",transcript_path:"string",hook_event_name:"SessionEnd",reason:"exit"},null,2)),process.exit(0));let{session_id:e,reason:t}=o;console.error("[claude-mem cleanup] Searching for active SDK session",{session_id:e,reason:t});let s=new E,r=s.findActiveSDKSession(e);if(r||(console.error("[claude-mem cleanup] No active SDK session found",{session_id:e}),s.close(),console.log('{"continue": true, "suppressOutput": true}'),process.exit(0)),console.error("[claude-mem cleanup] Active SDK session found",{session_id:r.id,sdk_session_id:r.sdk_session_id,project:r.project,worker_port:r.worker_port}),r.worker_port)try{let n=await fetch(`http://127.0.0.1:${r.worker_port}/sessions/${r.id}`,{method:"DELETE",signal:AbortSignal.timeout(5e3)});n.ok?console.error("[claude-mem cleanup] Session deleted successfully via HTTP"):console.error("[claude-mem cleanup] Failed to delete session:",await n.text())}catch(n){console.error("[claude-mem cleanup] HTTP DELETE error:",n.message)}else console.error("[claude-mem cleanup] No worker port, cannot send DELETE request");try{s.markSessionFailed(r.id),console.error("[claude-mem cleanup] Session marked as failed in database")}catch(n){console.error("[claude-mem cleanup] Failed to mark session as failed:",n)}s.close(),console.error("[claude-mem cleanup] Cleanup completed successfully"),console.log('{"continue": true, "suppressOutput": true}'),process.exit(0)}catch(e){console.error("[claude-mem cleanup] Unexpected error in hook",{error:e.message,stack:e.stack,name:e.name}),console.log('{"continue": true, "suppressOutput": true}'),process.exit(0)}}import{stdin as L}from"process";var g="";L.on("data",o=>g+=o);L.on("end",async()=>{try{let o=g.trim()?JSON.parse(g):void 0;await f(o)}catch(o){console.error(`[claude-mem cleanup-hook error: ${o.message}]`),console.log('{"continue": true, "suppressOutput": true}'),process.exit(0)}});
+77 -15
View File
@@ -1,7 +1,69 @@
#!/usr/bin/env node
var _=(i=>typeof require<"u"?require:typeof Proxy<"u"?new Proxy(i,{get:(e,t)=>(typeof require<"u"?require:e)[t]}):i)(function(i){if(typeof require<"u")return require.apply(this,arguments);throw Error('Dynamic require of "'+i+'" is not supported')});import b from"path";import{existsSync as T}from"fs";import{spawn as I}from"child_process";import j from"better-sqlite3";import{join as d,dirname as E,basename as J}from"path";import{homedir as O}from"os";import{existsSync as w,mkdirSync as M}from"fs";import{fileURLToPath as $}from"url";var P=()=>process.env.CLAUDE_PLUGIN_ROOT?d(process.env.CLAUDE_PLUGIN_ROOT,"data"):process.env.CLAUDE_MEM_DATA_DIR?process.env.CLAUDE_MEM_DATA_DIR:d(O(),".claude-mem"),u=P(),g=process.env.CLAUDE_CONFIG_DIR||d(O(),".claude"),z=d(u,"archives"),Z=d(u,"logs"),ee=d(u,"trash"),se=d(u,"backups"),te=d(u,"settings.json"),v=d(u,"claude-mem.db"),re=d(g,"settings.json"),ne=d(g,"commands"),oe=d(g,"CLAUDE.md");function y(){return d(u,"worker.port")}function D(i){M(i,{recursive:!0})}function N(){try{let t=_.resolve("claude-mem/package.json");return E(t)}catch{}let i=$(import.meta.url),e=E(i);for(let t=0;t<10;t++){let s=d(e,"package.json");if(w(s)&&_(s).name==="claude-mem")return e;let n=E(e);if(n===e)break;e=n}throw new Error("Cannot locate claude-mem package root. Ensure claude-mem is properly installed.")}var h=(r=>(r[r.DEBUG=0]="DEBUG",r[r.INFO=1]="INFO",r[r.WARN=2]="WARN",r[r.ERROR=3]="ERROR",r[r.SILENT=4]="SILENT",r))(h||{}),f=class{level;useColor;constructor(){let e=process.env.CLAUDE_MEM_LOG_LEVEL?.toUpperCase()||"INFO";this.level=h[e]??1,this.useColor=process.stdout.isTTY??!1}correlationId(e,t){return`obs-${e}-${t}`}sessionId(e){return`session-${e}`}formatData(e){if(e==null)return"";if(typeof e=="string")return e;if(typeof e=="number"||typeof e=="boolean")return e.toString();if(typeof e=="object"){if(e instanceof Error)return this.level===0?`${e.message}
${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let t=Object.keys(e);return t.length===0?"{}":t.length<=3?JSON.stringify(e):`{${t.length} keys: ${t.slice(0,3).join(", ")}...}`}return String(e)}formatTool(e,t){if(!t)return e;try{let s=typeof t=="string"?JSON.parse(t):t;if(e==="Bash"&&s.command){let n=s.command.length>50?s.command.substring(0,50)+"...":s.command;return`${e}(${n})`}if(e==="Read"&&s.file_path){let n=s.file_path.split("/").pop()||s.file_path;return`${e}(${n})`}if(e==="Edit"&&s.file_path){let n=s.file_path.split("/").pop()||s.file_path;return`${e}(${n})`}if(e==="Write"&&s.file_path){let n=s.file_path.split("/").pop()||s.file_path;return`${e}(${n})`}return e}catch{return e}}log(e,t,s,n,r){if(e<this.level)return;let a=new Date().toISOString().replace("T"," ").substring(0,23),o=h[e].padEnd(5),m=t.padEnd(6),c="";n?.correlationId?c=`[${n.correlationId}] `:n?.sessionId&&(c=`[session-${n.sessionId}] `);let p="";r!=null&&(this.level===0&&typeof r=="object"?p=`
`+JSON.stringify(r,null,2):p=" "+this.formatData(r));let L="";if(n){let{sessionId:B,sdkSessionId:W,correlationId:H,...k}=n;Object.keys(k).length>0&&(L=` {${Object.entries(k).map(([x,U])=>`${x}=${U}`).join(", ")}}`)}let A=`[${a}] [${o}] [${m}] ${c}${s}${L}${p}`;e===3?console.error(A):console.log(A)}debug(e,t,s,n){this.log(0,e,t,s,n)}info(e,t,s,n){this.log(1,e,t,s,n)}warn(e,t,s,n){this.log(2,e,t,s,n)}error(e,t,s,n){this.log(3,e,t,s,n)}dataIn(e,t,s,n){this.info(e,`\u2192 ${t}`,s,n)}dataOut(e,t,s,n){this.info(e,`\u2190 ${t}`,s,n)}success(e,t,s,n){this.info(e,`\u2713 ${t}`,s,n)}failure(e,t,s,n){this.error(e,`\u2717 ${t}`,s,n)}timing(e,t,s,n){this.info(e,`\u23F1 ${t}`,n,{duration:`${s}ms`})}},C=new f;var l=class{db;constructor(){D(u),this.db=new j(v),this.db.pragma("journal_mode = WAL"),this.db.pragma("synchronous = NORMAL"),this.db.pragma("foreign_keys = ON"),this.ensureWorkerPortColumn(),this.ensurePromptTrackingColumns(),this.removeSessionSummariesUniqueConstraint(),this.addObservationHierarchicalFields(),this.makeObservationsTextNullable()}ensureWorkerPortColumn(){try{this.db.pragma("table_info(sdk_sessions)").some(s=>s.name==="worker_port")||(this.db.exec("ALTER TABLE sdk_sessions ADD COLUMN worker_port INTEGER"),console.error("[SessionStore] Added worker_port column to sdk_sessions table"))}catch(e){console.error("[SessionStore] Migration error:",e.message)}}ensurePromptTrackingColumns(){try{this.db.pragma("table_info(sdk_sessions)").some(c=>c.name==="prompt_counter")||(this.db.exec("ALTER TABLE sdk_sessions ADD COLUMN prompt_counter INTEGER DEFAULT 0"),console.error("[SessionStore] Added prompt_counter column to sdk_sessions table")),this.db.pragma("table_info(observations)").some(c=>c.name==="prompt_number")||(this.db.exec("ALTER TABLE observations ADD COLUMN prompt_number INTEGER"),console.error("[SessionStore] Added prompt_number column to observations table")),this.db.pragma("table_info(session_summaries)").some(c=>c.name==="prompt_number")||(this.db.exec("ALTER TABLE session_summaries ADD COLUMN prompt_number INTEGER"),console.error("[SessionStore] Added prompt_number column to session_summaries table"));let m=this.db.pragma("index_list(session_summaries)").some(c=>c.unique===1)}catch(e){console.error("[SessionStore] Prompt tracking migration error:",e.message)}}removeSessionSummariesUniqueConstraint(){try{if(!this.db.pragma("index_list(session_summaries)").some(s=>s.unique===1))return;console.error("[SessionStore] Removing UNIQUE constraint from session_summaries.sdk_session_id..."),this.db.exec("BEGIN TRANSACTION");try{this.db.exec(`
import j from"path";import $ from"better-sqlite3";import{join as c,dirname as X,basename as q}from"path";import{homedir as O}from"os";import{existsSync as z,mkdirSync as w}from"fs";import{fileURLToPath as M}from"url";function F(){return typeof __dirname<"u"?__dirname:X(M(import.meta.url))}var P=F(),u=process.env.CLAUDE_MEM_DATA_DIR||c(O(),".claude-mem"),_=process.env.CLAUDE_CONFIG_DIR||c(O(),".claude"),ee=c(u,"archives"),se=c(u,"logs"),te=c(u,"trash"),re=c(u,"backups"),ne=c(u,"settings.json"),I=c(u,"claude-mem.db"),oe=c(_,"settings.json"),ie=c(_,"commands"),ae=c(_,"CLAUDE.md");function L(a){w(a,{recursive:!0})}function v(){return c(P,"..","..")}var l=(n=>(n[n.DEBUG=0]="DEBUG",n[n.INFO=1]="INFO",n[n.WARN=2]="WARN",n[n.ERROR=3]="ERROR",n[n.SILENT=4]="SILENT",n))(l||{}),T=class{level;useColor;constructor(){let e=process.env.CLAUDE_MEM_LOG_LEVEL?.toUpperCase()||"INFO";this.level=l[e]??1,this.useColor=process.stdout.isTTY??!1}correlationId(e,t){return`obs-${e}-${t}`}sessionId(e){return`session-${e}`}formatData(e){if(e==null)return"";if(typeof e=="string")return e;if(typeof e=="number"||typeof e=="boolean")return e.toString();if(typeof e=="object"){if(e instanceof Error)return this.level===0?`${e.message}
${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let t=Object.keys(e);return t.length===0?"{}":t.length<=3?JSON.stringify(e):`{${t.length} keys: ${t.slice(0,3).join(", ")}...}`}return String(e)}formatTool(e,t){if(!t)return e;try{let s=typeof t=="string"?JSON.parse(t):t;if(e==="Bash"&&s.command){let r=s.command.length>50?s.command.substring(0,50)+"...":s.command;return`${e}(${r})`}if(e==="Read"&&s.file_path){let r=s.file_path.split("/").pop()||s.file_path;return`${e}(${r})`}if(e==="Edit"&&s.file_path){let r=s.file_path.split("/").pop()||s.file_path;return`${e}(${r})`}if(e==="Write"&&s.file_path){let r=s.file_path.split("/").pop()||s.file_path;return`${e}(${r})`}return e}catch{return e}}log(e,t,s,r,n){if(e<this.level)return;let i=new Date().toISOString().replace("T"," ").substring(0,23),o=l[e].padEnd(5),d=t.padEnd(6),E="";r?.correlationId?E=`[${r.correlationId}] `:r?.sessionId&&(E=`[session-${r.sessionId}] `);let p="";n!=null&&(this.level===0&&typeof n=="object"?p=`
`+JSON.stringify(n,null,2):p=" "+this.formatData(n));let b="";if(r){let{sessionId:H,sdkSessionId:B,correlationId:G,...R}=r;Object.keys(R).length>0&&(b=` {${Object.entries(R).map(([x,U])=>`${x}=${U}`).join(", ")}}`)}let N=`[${i}] [${o}] [${d}] ${E}${s}${b}${p}`;e===3?console.error(N):console.log(N)}debug(e,t,s,r){this.log(0,e,t,s,r)}info(e,t,s,r){this.log(1,e,t,s,r)}warn(e,t,s,r){this.log(2,e,t,s,r)}error(e,t,s,r){this.log(3,e,t,s,r)}dataIn(e,t,s,r){this.info(e,`\u2192 ${t}`,s,r)}dataOut(e,t,s,r){this.info(e,`\u2190 ${t}`,s,r)}success(e,t,s,r){this.info(e,`\u2713 ${t}`,s,r)}failure(e,t,s,r){this.error(e,`\u2717 ${t}`,s,r)}timing(e,t,s,r){this.info(e,`\u23F1 ${t}`,r,{duration:`${s}ms`})}},A=new T;var m=class{db;constructor(){L(u),this.db=new $(I),this.db.pragma("journal_mode = WAL"),this.db.pragma("synchronous = NORMAL"),this.db.pragma("foreign_keys = ON"),this.initializeSchema(),this.ensureWorkerPortColumn(),this.ensurePromptTrackingColumns(),this.removeSessionSummariesUniqueConstraint(),this.addObservationHierarchicalFields(),this.makeObservationsTextNullable()}initializeSchema(){try{this.db.exec(`
CREATE TABLE IF NOT EXISTS schema_versions (
id INTEGER PRIMARY KEY,
version INTEGER UNIQUE NOT NULL,
applied_at TEXT NOT NULL
)
`);let e=this.db.prepare("SELECT version FROM schema_versions ORDER BY version").all();(e.length>0?Math.max(...e.map(s=>s.version)):0)===0&&(console.error("[SessionStore] Initializing fresh database with migration004..."),this.db.exec(`
CREATE TABLE IF NOT EXISTS sdk_sessions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
claude_session_id TEXT UNIQUE NOT NULL,
sdk_session_id TEXT UNIQUE,
project TEXT NOT NULL,
user_prompt TEXT,
started_at TEXT NOT NULL,
started_at_epoch INTEGER NOT NULL,
completed_at TEXT,
completed_at_epoch INTEGER,
status TEXT CHECK(status IN ('active', 'completed', 'failed')) NOT NULL DEFAULT 'active'
);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_claude_id ON sdk_sessions(claude_session_id);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_sdk_id ON sdk_sessions(sdk_session_id);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_project ON sdk_sessions(project);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_status ON sdk_sessions(status);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_started ON sdk_sessions(started_at_epoch DESC);
CREATE TABLE IF NOT EXISTS observations (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sdk_session_id TEXT NOT NULL,
project TEXT NOT NULL,
text TEXT NOT NULL,
type TEXT NOT NULL CHECK(type IN ('decision', 'bugfix', 'feature', 'refactor', 'discovery')),
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
FOREIGN KEY(sdk_session_id) REFERENCES sdk_sessions(sdk_session_id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_observations_sdk_session ON observations(sdk_session_id);
CREATE INDEX IF NOT EXISTS idx_observations_project ON observations(project);
CREATE INDEX IF NOT EXISTS idx_observations_type ON observations(type);
CREATE INDEX IF NOT EXISTS idx_observations_created ON observations(created_at_epoch DESC);
CREATE TABLE IF NOT EXISTS session_summaries (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sdk_session_id TEXT UNIQUE NOT NULL,
project TEXT NOT NULL,
request TEXT,
investigated TEXT,
learned TEXT,
completed TEXT,
next_steps TEXT,
files_read TEXT,
files_edited TEXT,
notes TEXT,
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
FOREIGN KEY(sdk_session_id) REFERENCES sdk_sessions(sdk_session_id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_session_summaries_sdk_session ON session_summaries(sdk_session_id);
CREATE INDEX IF NOT EXISTS idx_session_summaries_project ON session_summaries(project);
CREATE INDEX IF NOT EXISTS idx_session_summaries_created ON session_summaries(created_at_epoch DESC);
`),this.db.prepare("INSERT INTO schema_versions (version, applied_at) VALUES (?, ?)").run(4,new Date().toISOString()),console.error("[SessionStore] Migration004 applied successfully"))}catch(e){throw console.error("[SessionStore] Schema initialization error:",e.message),e}}ensureWorkerPortColumn(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(5))return;this.db.pragma("table_info(sdk_sessions)").some(r=>r.name==="worker_port")||(this.db.exec("ALTER TABLE sdk_sessions ADD COLUMN worker_port INTEGER"),console.error("[SessionStore] Added worker_port column to sdk_sessions table")),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(5,new Date().toISOString())}catch(e){console.error("[SessionStore] Migration error:",e.message)}}ensurePromptTrackingColumns(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(6))return;this.db.pragma("table_info(sdk_sessions)").some(d=>d.name==="prompt_counter")||(this.db.exec("ALTER TABLE sdk_sessions ADD COLUMN prompt_counter INTEGER DEFAULT 0"),console.error("[SessionStore] Added prompt_counter column to sdk_sessions table")),this.db.pragma("table_info(observations)").some(d=>d.name==="prompt_number")||(this.db.exec("ALTER TABLE observations ADD COLUMN prompt_number INTEGER"),console.error("[SessionStore] Added prompt_number column to observations table")),this.db.pragma("table_info(session_summaries)").some(d=>d.name==="prompt_number")||(this.db.exec("ALTER TABLE session_summaries ADD COLUMN prompt_number INTEGER"),console.error("[SessionStore] Added prompt_number column to session_summaries table")),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(6,new Date().toISOString())}catch(e){console.error("[SessionStore] Prompt tracking migration error:",e.message)}}removeSessionSummariesUniqueConstraint(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(7))return;if(!this.db.pragma("index_list(session_summaries)").some(r=>r.unique===1)){this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(7,new Date().toISOString());return}console.error("[SessionStore] Removing UNIQUE constraint from session_summaries.sdk_session_id..."),this.db.exec("BEGIN TRANSACTION");try{this.db.exec(`
CREATE TABLE session_summaries_new (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sdk_session_id TEXT NOT NULL,
@@ -29,7 +91,7 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let t=Obje
CREATE INDEX idx_session_summaries_sdk_session ON session_summaries(sdk_session_id);
CREATE INDEX idx_session_summaries_project ON session_summaries(project);
CREATE INDEX idx_session_summaries_created ON session_summaries(created_at_epoch DESC);
`),this.db.exec("COMMIT"),console.error("[SessionStore] Successfully removed UNIQUE constraint from session_summaries.sdk_session_id")}catch(s){throw this.db.exec("ROLLBACK"),s}}catch(e){console.error("[SessionStore] Migration error (remove UNIQUE constraint):",e.message)}}addObservationHierarchicalFields(){try{if(this.db.pragma("table_info(observations)").some(s=>s.name==="title"))return;console.error("[SessionStore] Adding hierarchical fields to observations table..."),this.db.exec(`
`),this.db.exec("COMMIT"),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(7,new Date().toISOString()),console.error("[SessionStore] Successfully removed UNIQUE constraint from session_summaries.sdk_session_id")}catch(r){throw this.db.exec("ROLLBACK"),r}}catch(e){console.error("[SessionStore] Migration error (remove UNIQUE constraint):",e.message)}}addObservationHierarchicalFields(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(8))return;if(this.db.pragma("table_info(observations)").some(r=>r.name==="title")){this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(8,new Date().toISOString());return}console.error("[SessionStore] Adding hierarchical fields to observations table..."),this.db.exec(`
ALTER TABLE observations ADD COLUMN title TEXT;
ALTER TABLE observations ADD COLUMN subtitle TEXT;
ALTER TABLE observations ADD COLUMN facts TEXT;
@@ -37,7 +99,7 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let t=Obje
ALTER TABLE observations ADD COLUMN concepts TEXT;
ALTER TABLE observations ADD COLUMN files_read TEXT;
ALTER TABLE observations ADD COLUMN files_modified TEXT;
`),console.error("[SessionStore] Successfully added hierarchical fields to observations table")}catch(e){console.error("[SessionStore] Migration error (add hierarchical fields):",e.message)}}makeObservationsTextNullable(){try{let t=this.db.pragma("table_info(observations)").find(s=>s.name==="text");if(!t||t.notnull===0)return;console.error("[SessionStore] Making observations.text nullable..."),this.db.exec("BEGIN TRANSACTION");try{this.db.exec(`
`),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(8,new Date().toISOString()),console.error("[SessionStore] Successfully added hierarchical fields to observations table")}catch(e){console.error("[SessionStore] Migration error (add hierarchical fields):",e.message)}}makeObservationsTextNullable(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(9))return;let s=this.db.pragma("table_info(observations)").find(r=>r.name==="text");if(!s||s.notnull===0){this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(9,new Date().toISOString());return}console.error("[SessionStore] Making observations.text nullable..."),this.db.exec("BEGIN TRANSACTION");try{this.db.exec(`
CREATE TABLE observations_new (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sdk_session_id TEXT NOT NULL,
@@ -67,7 +129,7 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let t=Obje
CREATE INDEX idx_observations_project ON observations(project);
CREATE INDEX idx_observations_type ON observations(type);
CREATE INDEX idx_observations_created ON observations(created_at_epoch DESC);
`),this.db.exec("COMMIT"),console.error("[SessionStore] Successfully made observations.text nullable")}catch(s){throw this.db.exec("ROLLBACK"),s}}catch(e){console.error("[SessionStore] Migration error (make text nullable):",e.message)}}getRecentSummaries(e,t=10){return this.db.prepare(`
`),this.db.exec("COMMIT"),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(9,new Date().toISOString()),console.error("[SessionStore] Successfully made observations.text nullable")}catch(r){throw this.db.exec("ROLLBACK"),r}}catch(e){console.error("[SessionStore] Migration error (make text nullable):",e.message)}}getRecentSummaries(e,t=10){return this.db.prepare(`
SELECT
request, investigated, learned, completed, next_steps,
files_read, files_edited, notes, prompt_number, created_at
@@ -138,15 +200,15 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let t=Obje
SELECT prompt_counter FROM sdk_sessions WHERE id = ?
`).get(e)?.prompt_counter||1}getPromptCounter(e){return this.db.prepare(`
SELECT prompt_counter FROM sdk_sessions WHERE id = ?
`).get(e)?.prompt_counter||0}createSDKSession(e,t,s){let n=new Date,r=n.getTime();return this.db.prepare(`
`).get(e)?.prompt_counter||0}createSDKSession(e,t,s){let r=new Date,n=r.getTime();return this.db.prepare(`
INSERT INTO sdk_sessions
(claude_session_id, project, user_prompt, started_at, started_at_epoch, status)
VALUES (?, ?, ?, ?, ?, 'active')
`).run(e,t,s,n.toISOString(),r).lastInsertRowid}updateSDKSessionId(e,t){return this.db.prepare(`
`).run(e,t,s,r.toISOString(),n).lastInsertRowid}updateSDKSessionId(e,t){return this.db.prepare(`
UPDATE sdk_sessions
SET sdk_session_id = ?
WHERE id = ? AND sdk_session_id IS NULL
`).run(t,e).changes===0?(C.debug("DB","sdk_session_id already set, skipping update",{sessionId:e,sdkSessionId:t}),!1):!0}setWorkerPort(e,t){this.db.prepare(`
`).run(t,e).changes===0?(A.debug("DB","sdk_session_id already set, skipping update",{sessionId:e,sdkSessionId:t}),!1):!0}setWorkerPort(e,t){this.db.prepare(`
UPDATE sdk_sessions
SET worker_port = ?
WHERE id = ?
@@ -155,17 +217,17 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let t=Obje
FROM sdk_sessions
WHERE id = ?
LIMIT 1
`).get(e)?.worker_port||null}storeObservation(e,t,s,n){let r=new Date,a=r.getTime();this.db.prepare(`
`).get(e)?.worker_port||null}storeObservation(e,t,s,r){let n=new Date,i=n.getTime();this.db.prepare(`
INSERT INTO observations
(sdk_session_id, project, type, title, subtitle, facts, narrative, concepts,
files_read, files_modified, prompt_number, created_at, created_at_epoch)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`).run(e,t,s.type,s.title,s.subtitle,JSON.stringify(s.facts),s.narrative,JSON.stringify(s.concepts),JSON.stringify(s.files_read),JSON.stringify(s.files_modified),n||null,r.toISOString(),a)}storeSummary(e,t,s,n){let r=new Date,a=r.getTime();this.db.prepare(`
`).run(e,t,s.type,s.title,s.subtitle,JSON.stringify(s.facts),s.narrative,JSON.stringify(s.concepts),JSON.stringify(s.files_read),JSON.stringify(s.files_modified),r||null,n.toISOString(),i)}storeSummary(e,t,s,r){let n=new Date,i=n.getTime();this.db.prepare(`
INSERT INTO session_summaries
(sdk_session_id, project, request, investigated, learned, completed,
next_steps, notes, prompt_number, created_at, created_at_epoch)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`).run(e,t,s.request,s.investigated,s.learned,s.completed,s.next_steps,s.notes,n||null,r.toISOString(),a)}markSessionCompleted(e){let t=new Date,s=t.getTime();this.db.prepare(`
`).run(e,t,s.request,s.investigated,s.learned,s.completed,s.next_steps,s.notes,r||null,n.toISOString(),i)}markSessionCompleted(e){let t=new Date,s=t.getTime();this.db.prepare(`
UPDATE sdk_sessions
SET status = 'completed', completed_at = ?, completed_at_epoch = ?
WHERE id = ?
@@ -177,7 +239,7 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let t=Obje
UPDATE sdk_sessions
SET status = 'failed', completed_at = ?, completed_at_epoch = ?
WHERE status = 'active'
`).run(e.toISOString(),t).changes}close(){this.db.close()}};function F(){try{let i=y();if(T(i))return;console.error("[claude-mem] Worker not running, starting...");let e=N(),t=b.join(e,"dist","worker-service.cjs");if(!T(t)){console.error(`[claude-mem] Worker service not found at ${t}`);return}let s=b.join(e,"ecosystem.config.cjs");if(T(s))try{I("pm2",["start",s],{detached:!0,stdio:"ignore",cwd:e}).unref(),console.error("[claude-mem] Worker started with PM2");return}catch{console.error("[claude-mem] PM2 not available, using direct spawn")}I("node",[t],{detached:!0,stdio:"ignore",env:{...process.env,NODE_ENV:"production"}}).unref(),console.error("[claude-mem] Worker started in background")}catch(i){console.error(`[claude-mem] Failed to start worker: ${i.message}`)}}function S(i){F();let e=i?.cwd??process.cwd(),t=e?b.basename(e):"unknown-project",s=new l;try{let n=s.getRecentSessionsWithStatus(t,3);if(n.length===0)return`# Recent Session Context
`).run(e.toISOString(),t).changes}close(){this.db.close()}};import C from"path";import{existsSync as k}from"fs";import{spawn as g}from"child_process";var h=parseInt(process.env.CLAUDE_MEM_WORKER_PORT||"37777",10),W=`http://127.0.0.1:${h}/health`;async function D(){try{return(await fetch(W,{signal:AbortSignal.timeout(500)})).ok}catch{return!1}}async function y(){try{if(await D())return!0;console.error("[claude-mem] Worker not responding, starting...");let a=v(),e=C.join(a,"dist","worker-service.cjs");if(!k(e))return console.error(`[claude-mem] Worker service not found at ${e}`),!1;let t=C.join(a,"ecosystem.config.cjs");if(k(t))try{g("pm2",["start",t],{detached:!0,stdio:"ignore",cwd:a}).unref(),console.error("[claude-mem] Worker started with PM2")}catch{console.error("[claude-mem] PM2 not available, using direct spawn"),g("node",[e],{detached:!0,stdio:"ignore",env:{...process.env,NODE_ENV:"production",CLAUDE_MEM_WORKER_PORT:h.toString()}}).unref(),console.error("[claude-mem] Worker started in background")}else g("node",[e],{detached:!0,stdio:"ignore",env:{...process.env,NODE_ENV:"production",CLAUDE_MEM_WORKER_PORT:h.toString()}}).unref(),console.error("[claude-mem] Worker started in background");for(let s=0;s<3;s++)if(await new Promise(r=>setTimeout(r,500)),await D())return console.error("[claude-mem] Worker is healthy"),!0;return console.error("[claude-mem] Worker failed to become healthy after startup"),!1}catch(a){return console.error(`[claude-mem] Failed to start worker: ${a.message}`),!1}}function S(a){y();let e=a?.cwd??process.cwd(),t=e?j.basename(e):"unknown-project",s=new m;try{let r=s.getRecentSessionsWithStatus(t,3);if(r.length===0)return`# Recent Session Context
No previous sessions found for this project yet.`;let r=[];r.push("# Recent Session Context"),r.push(""),r.push(`Showing last ${n.length} session(s) for **${t}**:`),r.push("");for(let a of n)if(a.sdk_session_id){if(r.push("---"),r.push(""),a.has_summary){let o=s.getSummaryForSession(a.sdk_session_id);if(o){let m=o.prompt_number?` (Prompt #${o.prompt_number})`:"";if(r.push(`**Summary${m}**`),r.push(""),o.request&&r.push(`**Request:** ${o.request}`),o.completed&&r.push(`**Completed:** ${o.completed}`),o.learned&&r.push(`**Learned:** ${o.learned}`),o.next_steps&&r.push(`**Next Steps:** ${o.next_steps}`),o.files_read)try{let p=JSON.parse(o.files_read);Array.isArray(p)&&p.length>0&&r.push(`**Files Read:** ${p.join(", ")}`)}catch{o.files_read.trim()&&r.push(`**Files Read:** ${o.files_read}`)}if(o.files_edited)try{let p=JSON.parse(o.files_edited);Array.isArray(p)&&p.length>0&&r.push(`**Files Edited:** ${p.join(", ")}`)}catch{o.files_edited.trim()&&r.push(`**Files Edited:** ${o.files_edited}`)}let c=new Date(o.created_at).toLocaleString();r.push(`**Date:** ${c}`)}}else if(a.status==="active"){r.push("**In Progress**"),r.push(""),a.user_prompt&&r.push(`**Request:** ${a.user_prompt}`);let o=s.getObservationsForSession(a.sdk_session_id);if(o.length>0){r.push(""),r.push(`**Observations (${o.length}):**`);for(let c of o)r.push(`- ${c.title}`)}else r.push(""),r.push("*No observations yet*");r.push(""),r.push("**Status:** Active - summary pending");let m=new Date(a.started_at).toLocaleString();r.push(`**Date:** ${m}`)}else{let o=a.status==="failed"?"stopped":a.status;r.push(`**${o.charAt(0).toUpperCase()+o.slice(1)}**`),r.push(""),a.user_prompt&&r.push(`**Request:** ${a.user_prompt}`),r.push(""),r.push(`**Status:** ${o} - no summary available`);let m=new Date(a.started_at).toLocaleString();r.push(`**Date:** ${m}`)}r.push("")}return r.join(`
`)}finally{s.close()}}import{stdin as R}from"process";try{if(R.isTTY){let e={hookSpecificOutput:{hookEventName:"SessionStart",additionalContext:S()}};console.log(JSON.stringify(e)),process.exit(0)}else{let i="";R.on("data",e=>i+=e),R.on("end",()=>{let e=i.trim()?JSON.parse(i):void 0,s={hookSpecificOutput:{hookEventName:"SessionStart",additionalContext:S(e)}};console.log(JSON.stringify(s)),process.exit(0)})}}catch(i){console.error(`[claude-mem context-hook error: ${i.message}]`),process.exit(0)}
No previous sessions found for this project yet.`;let n=[];n.push("# Recent Session Context"),n.push(""),n.push(`Showing last ${r.length} session(s) for **${t}**:`),n.push("");for(let i of r)if(i.sdk_session_id){if(n.push("---"),n.push(""),i.has_summary){let o=s.getSummaryForSession(i.sdk_session_id);if(o){let d=o.prompt_number?` (Prompt #${o.prompt_number})`:"";if(n.push(`**Summary${d}**`),n.push(""),o.request&&n.push(`**Request:** ${o.request}`),o.completed&&n.push(`**Completed:** ${o.completed}`),o.learned&&n.push(`**Learned:** ${o.learned}`),o.next_steps&&n.push(`**Next Steps:** ${o.next_steps}`),o.files_read)try{let p=JSON.parse(o.files_read);Array.isArray(p)&&p.length>0&&n.push(`**Files Read:** ${p.join(", ")}`)}catch{o.files_read.trim()&&n.push(`**Files Read:** ${o.files_read}`)}if(o.files_edited)try{let p=JSON.parse(o.files_edited);Array.isArray(p)&&p.length>0&&n.push(`**Files Edited:** ${p.join(", ")}`)}catch{o.files_edited.trim()&&n.push(`**Files Edited:** ${o.files_edited}`)}let E=new Date(o.created_at).toLocaleString();n.push(`**Date:** ${E}`)}}else if(i.status==="active"){n.push("**In Progress**"),n.push(""),i.user_prompt&&n.push(`**Request:** ${i.user_prompt}`);let o=s.getObservationsForSession(i.sdk_session_id);if(o.length>0){n.push(""),n.push(`**Observations (${o.length}):**`);for(let E of o)n.push(`- ${E.title}`)}else n.push(""),n.push("*No observations yet*");n.push(""),n.push("**Status:** Active - summary pending");let d=new Date(i.started_at).toLocaleString();n.push(`**Date:** ${d}`)}else{let o=i.status==="failed"?"stopped":i.status;n.push(`**${o.charAt(0).toUpperCase()+o.slice(1)}**`),n.push(""),i.user_prompt&&n.push(`**Request:** ${i.user_prompt}`),n.push(""),n.push(`**Status:** ${o} - no summary available`);let d=new Date(i.started_at).toLocaleString();n.push(`**Date:** ${d}`)}n.push("")}return n.join(`
`)}finally{s.close()}}import{stdin as f}from"process";try{if(f.isTTY){let e={hookSpecificOutput:{hookEventName:"SessionStart",additionalContext:S()}};console.log(JSON.stringify(e)),process.exit(0)}else{let a="";f.on("data",e=>a+=e),f.on("end",()=>{let e=a.trim()?JSON.parse(a):void 0,s={hookSpecificOutput:{hookEventName:"SessionStart",additionalContext:S(e)}};console.log(JSON.stringify(s)),process.exit(0)})}}catch(a){console.error(`[claude-mem context-hook error: ${a.message}]`),process.exit(0)}
+73 -11
View File
@@ -1,7 +1,69 @@
#!/usr/bin/env node
import U from"path";import x from"better-sqlite3";import{join as p,dirname as W,basename as G}from"path";import{homedir as R}from"os";import{existsSync as Y,mkdirSync as v}from"fs";var y=()=>process.env.CLAUDE_PLUGIN_ROOT?p(process.env.CLAUDE_PLUGIN_ROOT,"data"):process.env.CLAUDE_MEM_DATA_DIR?process.env.CLAUDE_MEM_DATA_DIR:p(R(),".claude-mem"),m=y(),g=process.env.CLAUDE_CONFIG_DIR||p(R(),".claude"),V=p(m,"archives"),Q=p(m,"logs"),z=p(m,"trash"),Z=p(m,"backups"),ee=p(m,"settings.json"),O=p(m,"claude-mem.db"),se=p(g,"settings.json"),te=p(g,"commands"),re=p(g,"CLAUDE.md");function k(){return p(m,"worker.port")}function A(o){v(o,{recursive:!0})}var b=(n=>(n[n.DEBUG=0]="DEBUG",n[n.INFO=1]="INFO",n[n.WARN=2]="WARN",n[n.ERROR=3]="ERROR",n[n.SILENT=4]="SILENT",n))(b||{}),T=class{level;useColor;constructor(){let e=process.env.CLAUDE_MEM_LOG_LEVEL?.toUpperCase()||"INFO";this.level=b[e]??1,this.useColor=process.stdout.isTTY??!1}correlationId(e,s){return`obs-${e}-${s}`}sessionId(e){return`session-${e}`}formatData(e){if(e==null)return"";if(typeof e=="string")return e;if(typeof e=="number"||typeof e=="boolean")return e.toString();if(typeof e=="object"){if(e instanceof Error)return this.level===0?`${e.message}
${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let s=Object.keys(e);return s.length===0?"{}":s.length<=3?JSON.stringify(e):`{${s.length} keys: ${s.slice(0,3).join(", ")}...}`}return String(e)}formatTool(e,s){if(!s)return e;try{let t=typeof s=="string"?JSON.parse(s):s;if(e==="Bash"&&t.command){let r=t.command.length>50?t.command.substring(0,50)+"...":t.command;return`${e}(${r})`}if(e==="Read"&&t.file_path){let r=t.file_path.split("/").pop()||t.file_path;return`${e}(${r})`}if(e==="Edit"&&t.file_path){let r=t.file_path.split("/").pop()||t.file_path;return`${e}(${r})`}if(e==="Write"&&t.file_path){let r=t.file_path.split("/").pop()||t.file_path;return`${e}(${r})`}return e}catch{return e}}log(e,s,t,r,n){if(e<this.level)return;let a=new Date().toISOString().replace("T"," ").substring(0,23),i=b[e].padEnd(5),d=s.padEnd(6),c="";r?.correlationId?c=`[${r.correlationId}] `:r?.sessionId&&(c=`[session-${r.sessionId}] `);let u="";n!=null&&(this.level===0&&typeof n=="object"?u=`
`+JSON.stringify(n,null,2):u=" "+this.formatData(n));let l="";if(r){let{sessionId:M,sdkSessionId:H,correlationId:$,...h}=r;Object.keys(h).length>0&&(l=` {${Object.entries(h).map(([D,I])=>`${D}=${I}`).join(", ")}}`)}let S=`[${a}] [${i}] [${d}] ${c}${t}${l}${u}`;e===3?console.error(S):console.log(S)}debug(e,s,t,r){this.log(0,e,s,t,r)}info(e,s,t,r){this.log(1,e,s,t,r)}warn(e,s,t,r){this.log(2,e,s,t,r)}error(e,s,t,r){this.log(3,e,s,t,r)}dataIn(e,s,t,r){this.info(e,`\u2192 ${s}`,t,r)}dataOut(e,s,t,r){this.info(e,`\u2190 ${s}`,t,r)}success(e,s,t,r){this.info(e,`\u2713 ${s}`,t,r)}failure(e,s,t,r){this.error(e,`\u2717 ${s}`,t,r)}timing(e,s,t,r){this.info(e,`\u23F1 ${s}`,r,{duration:`${t}ms`})}},L=new T;var _=class{db;constructor(){A(m),this.db=new x(O),this.db.pragma("journal_mode = WAL"),this.db.pragma("synchronous = NORMAL"),this.db.pragma("foreign_keys = ON"),this.ensureWorkerPortColumn(),this.ensurePromptTrackingColumns(),this.removeSessionSummariesUniqueConstraint(),this.addObservationHierarchicalFields(),this.makeObservationsTextNullable()}ensureWorkerPortColumn(){try{this.db.pragma("table_info(sdk_sessions)").some(t=>t.name==="worker_port")||(this.db.exec("ALTER TABLE sdk_sessions ADD COLUMN worker_port INTEGER"),console.error("[SessionStore] Added worker_port column to sdk_sessions table"))}catch(e){console.error("[SessionStore] Migration error:",e.message)}}ensurePromptTrackingColumns(){try{this.db.pragma("table_info(sdk_sessions)").some(c=>c.name==="prompt_counter")||(this.db.exec("ALTER TABLE sdk_sessions ADD COLUMN prompt_counter INTEGER DEFAULT 0"),console.error("[SessionStore] Added prompt_counter column to sdk_sessions table")),this.db.pragma("table_info(observations)").some(c=>c.name==="prompt_number")||(this.db.exec("ALTER TABLE observations ADD COLUMN prompt_number INTEGER"),console.error("[SessionStore] Added prompt_number column to observations table")),this.db.pragma("table_info(session_summaries)").some(c=>c.name==="prompt_number")||(this.db.exec("ALTER TABLE session_summaries ADD COLUMN prompt_number INTEGER"),console.error("[SessionStore] Added prompt_number column to session_summaries table"));let d=this.db.pragma("index_list(session_summaries)").some(c=>c.unique===1)}catch(e){console.error("[SessionStore] Prompt tracking migration error:",e.message)}}removeSessionSummariesUniqueConstraint(){try{if(!this.db.pragma("index_list(session_summaries)").some(t=>t.unique===1))return;console.error("[SessionStore] Removing UNIQUE constraint from session_summaries.sdk_session_id..."),this.db.exec("BEGIN TRANSACTION");try{this.db.exec(`
import K from"path";import j from"better-sqlite3";import{join as a,dirname as P,basename as z}from"path";import{homedir as h}from"os";import{existsSync as te,mkdirSync as F}from"fs";import{fileURLToPath as H}from"url";function W(){return typeof __dirname<"u"?__dirname:P(H(import.meta.url))}var $=W(),E=process.env.CLAUDE_MEM_DATA_DIR||a(h(),".claude-mem"),g=process.env.CLAUDE_CONFIG_DIR||a(h(),".claude"),ne=a(E,"archives"),oe=a(E,"logs"),ie=a(E,"trash"),ae=a(E,"backups"),de=a(E,"settings.json"),O=a(E,"claude-mem.db"),pe=a(g,"settings.json"),ce=a(g,"commands"),ue=a(g,"CLAUDE.md");function I(o){F(o,{recursive:!0})}function L(){return a($,"..","..")}var S=(n=>(n[n.DEBUG=0]="DEBUG",n[n.INFO=1]="INFO",n[n.WARN=2]="WARN",n[n.ERROR=3]="ERROR",n[n.SILENT=4]="SILENT",n))(S||{}),b=class{level;useColor;constructor(){let e=process.env.CLAUDE_MEM_LOG_LEVEL?.toUpperCase()||"INFO";this.level=S[e]??1,this.useColor=process.stdout.isTTY??!1}correlationId(e,s){return`obs-${e}-${s}`}sessionId(e){return`session-${e}`}formatData(e){if(e==null)return"";if(typeof e=="string")return e;if(typeof e=="number"||typeof e=="boolean")return e.toString();if(typeof e=="object"){if(e instanceof Error)return this.level===0?`${e.message}
${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let s=Object.keys(e);return s.length===0?"{}":s.length<=3?JSON.stringify(e):`{${s.length} keys: ${s.slice(0,3).join(", ")}...}`}return String(e)}formatTool(e,s){if(!s)return e;try{let t=typeof s=="string"?JSON.parse(s):s;if(e==="Bash"&&t.command){let r=t.command.length>50?t.command.substring(0,50)+"...":t.command;return`${e}(${r})`}if(e==="Read"&&t.file_path){let r=t.file_path.split("/").pop()||t.file_path;return`${e}(${r})`}if(e==="Edit"&&t.file_path){let r=t.file_path.split("/").pop()||t.file_path;return`${e}(${r})`}if(e==="Write"&&t.file_path){let r=t.file_path.split("/").pop()||t.file_path;return`${e}(${r})`}return e}catch{return e}}log(e,s,t,r,n){if(e<this.level)return;let c=new Date().toISOString().replace("T"," ").substring(0,23),i=S[e].padEnd(5),d=s.padEnd(6),_="";r?.correlationId?_=`[${r.correlationId}] `:r?.sessionId&&(_=`[session-${r.sessionId}] `);let m="";n!=null&&(this.level===0&&typeof n=="object"?m=`
`+JSON.stringify(n,null,2):m=" "+this.formatData(n));let p="";if(r){let{sessionId:Y,sdkSessionId:q,correlationId:V,...f}=r;Object.keys(f).length>0&&(p=` {${Object.entries(f).map(([X,M])=>`${X}=${M}`).join(", ")}}`)}let u=`[${c}] [${i}] [${d}] ${_}${t}${p}${m}`;e===3?console.error(u):console.log(u)}debug(e,s,t,r){this.log(0,e,s,t,r)}info(e,s,t,r){this.log(1,e,s,t,r)}warn(e,s,t,r){this.log(2,e,s,t,r)}error(e,s,t,r){this.log(3,e,s,t,r)}dataIn(e,s,t,r){this.info(e,`\u2192 ${s}`,t,r)}dataOut(e,s,t,r){this.info(e,`\u2190 ${s}`,t,r)}success(e,s,t,r){this.info(e,`\u2713 ${s}`,t,r)}failure(e,s,t,r){this.error(e,`\u2717 ${s}`,t,r)}timing(e,s,t,r){this.info(e,`\u23F1 ${s}`,r,{duration:`${t}ms`})}},v=new b;var l=class{db;constructor(){I(E),this.db=new j(O),this.db.pragma("journal_mode = WAL"),this.db.pragma("synchronous = NORMAL"),this.db.pragma("foreign_keys = ON"),this.initializeSchema(),this.ensureWorkerPortColumn(),this.ensurePromptTrackingColumns(),this.removeSessionSummariesUniqueConstraint(),this.addObservationHierarchicalFields(),this.makeObservationsTextNullable()}initializeSchema(){try{this.db.exec(`
CREATE TABLE IF NOT EXISTS schema_versions (
id INTEGER PRIMARY KEY,
version INTEGER UNIQUE NOT NULL,
applied_at TEXT NOT NULL
)
`);let e=this.db.prepare("SELECT version FROM schema_versions ORDER BY version").all();(e.length>0?Math.max(...e.map(t=>t.version)):0)===0&&(console.error("[SessionStore] Initializing fresh database with migration004..."),this.db.exec(`
CREATE TABLE IF NOT EXISTS sdk_sessions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
claude_session_id TEXT UNIQUE NOT NULL,
sdk_session_id TEXT UNIQUE,
project TEXT NOT NULL,
user_prompt TEXT,
started_at TEXT NOT NULL,
started_at_epoch INTEGER NOT NULL,
completed_at TEXT,
completed_at_epoch INTEGER,
status TEXT CHECK(status IN ('active', 'completed', 'failed')) NOT NULL DEFAULT 'active'
);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_claude_id ON sdk_sessions(claude_session_id);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_sdk_id ON sdk_sessions(sdk_session_id);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_project ON sdk_sessions(project);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_status ON sdk_sessions(status);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_started ON sdk_sessions(started_at_epoch DESC);
CREATE TABLE IF NOT EXISTS observations (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sdk_session_id TEXT NOT NULL,
project TEXT NOT NULL,
text TEXT NOT NULL,
type TEXT NOT NULL CHECK(type IN ('decision', 'bugfix', 'feature', 'refactor', 'discovery')),
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
FOREIGN KEY(sdk_session_id) REFERENCES sdk_sessions(sdk_session_id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_observations_sdk_session ON observations(sdk_session_id);
CREATE INDEX IF NOT EXISTS idx_observations_project ON observations(project);
CREATE INDEX IF NOT EXISTS idx_observations_type ON observations(type);
CREATE INDEX IF NOT EXISTS idx_observations_created ON observations(created_at_epoch DESC);
CREATE TABLE IF NOT EXISTS session_summaries (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sdk_session_id TEXT UNIQUE NOT NULL,
project TEXT NOT NULL,
request TEXT,
investigated TEXT,
learned TEXT,
completed TEXT,
next_steps TEXT,
files_read TEXT,
files_edited TEXT,
notes TEXT,
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
FOREIGN KEY(sdk_session_id) REFERENCES sdk_sessions(sdk_session_id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_session_summaries_sdk_session ON session_summaries(sdk_session_id);
CREATE INDEX IF NOT EXISTS idx_session_summaries_project ON session_summaries(project);
CREATE INDEX IF NOT EXISTS idx_session_summaries_created ON session_summaries(created_at_epoch DESC);
`),this.db.prepare("INSERT INTO schema_versions (version, applied_at) VALUES (?, ?)").run(4,new Date().toISOString()),console.error("[SessionStore] Migration004 applied successfully"))}catch(e){throw console.error("[SessionStore] Schema initialization error:",e.message),e}}ensureWorkerPortColumn(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(5))return;this.db.pragma("table_info(sdk_sessions)").some(r=>r.name==="worker_port")||(this.db.exec("ALTER TABLE sdk_sessions ADD COLUMN worker_port INTEGER"),console.error("[SessionStore] Added worker_port column to sdk_sessions table")),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(5,new Date().toISOString())}catch(e){console.error("[SessionStore] Migration error:",e.message)}}ensurePromptTrackingColumns(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(6))return;this.db.pragma("table_info(sdk_sessions)").some(d=>d.name==="prompt_counter")||(this.db.exec("ALTER TABLE sdk_sessions ADD COLUMN prompt_counter INTEGER DEFAULT 0"),console.error("[SessionStore] Added prompt_counter column to sdk_sessions table")),this.db.pragma("table_info(observations)").some(d=>d.name==="prompt_number")||(this.db.exec("ALTER TABLE observations ADD COLUMN prompt_number INTEGER"),console.error("[SessionStore] Added prompt_number column to observations table")),this.db.pragma("table_info(session_summaries)").some(d=>d.name==="prompt_number")||(this.db.exec("ALTER TABLE session_summaries ADD COLUMN prompt_number INTEGER"),console.error("[SessionStore] Added prompt_number column to session_summaries table")),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(6,new Date().toISOString())}catch(e){console.error("[SessionStore] Prompt tracking migration error:",e.message)}}removeSessionSummariesUniqueConstraint(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(7))return;if(!this.db.pragma("index_list(session_summaries)").some(r=>r.unique===1)){this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(7,new Date().toISOString());return}console.error("[SessionStore] Removing UNIQUE constraint from session_summaries.sdk_session_id..."),this.db.exec("BEGIN TRANSACTION");try{this.db.exec(`
CREATE TABLE session_summaries_new (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sdk_session_id TEXT NOT NULL,
@@ -29,7 +91,7 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let s=Obje
CREATE INDEX idx_session_summaries_sdk_session ON session_summaries(sdk_session_id);
CREATE INDEX idx_session_summaries_project ON session_summaries(project);
CREATE INDEX idx_session_summaries_created ON session_summaries(created_at_epoch DESC);
`),this.db.exec("COMMIT"),console.error("[SessionStore] Successfully removed UNIQUE constraint from session_summaries.sdk_session_id")}catch(t){throw this.db.exec("ROLLBACK"),t}}catch(e){console.error("[SessionStore] Migration error (remove UNIQUE constraint):",e.message)}}addObservationHierarchicalFields(){try{if(this.db.pragma("table_info(observations)").some(t=>t.name==="title"))return;console.error("[SessionStore] Adding hierarchical fields to observations table..."),this.db.exec(`
`),this.db.exec("COMMIT"),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(7,new Date().toISOString()),console.error("[SessionStore] Successfully removed UNIQUE constraint from session_summaries.sdk_session_id")}catch(r){throw this.db.exec("ROLLBACK"),r}}catch(e){console.error("[SessionStore] Migration error (remove UNIQUE constraint):",e.message)}}addObservationHierarchicalFields(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(8))return;if(this.db.pragma("table_info(observations)").some(r=>r.name==="title")){this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(8,new Date().toISOString());return}console.error("[SessionStore] Adding hierarchical fields to observations table..."),this.db.exec(`
ALTER TABLE observations ADD COLUMN title TEXT;
ALTER TABLE observations ADD COLUMN subtitle TEXT;
ALTER TABLE observations ADD COLUMN facts TEXT;
@@ -37,7 +99,7 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let s=Obje
ALTER TABLE observations ADD COLUMN concepts TEXT;
ALTER TABLE observations ADD COLUMN files_read TEXT;
ALTER TABLE observations ADD COLUMN files_modified TEXT;
`),console.error("[SessionStore] Successfully added hierarchical fields to observations table")}catch(e){console.error("[SessionStore] Migration error (add hierarchical fields):",e.message)}}makeObservationsTextNullable(){try{let s=this.db.pragma("table_info(observations)").find(t=>t.name==="text");if(!s||s.notnull===0)return;console.error("[SessionStore] Making observations.text nullable..."),this.db.exec("BEGIN TRANSACTION");try{this.db.exec(`
`),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(8,new Date().toISOString()),console.error("[SessionStore] Successfully added hierarchical fields to observations table")}catch(e){console.error("[SessionStore] Migration error (add hierarchical fields):",e.message)}}makeObservationsTextNullable(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(9))return;let t=this.db.pragma("table_info(observations)").find(r=>r.name==="text");if(!t||t.notnull===0){this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(9,new Date().toISOString());return}console.error("[SessionStore] Making observations.text nullable..."),this.db.exec("BEGIN TRANSACTION");try{this.db.exec(`
CREATE TABLE observations_new (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sdk_session_id TEXT NOT NULL,
@@ -67,7 +129,7 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let s=Obje
CREATE INDEX idx_observations_project ON observations(project);
CREATE INDEX idx_observations_type ON observations(type);
CREATE INDEX idx_observations_created ON observations(created_at_epoch DESC);
`),this.db.exec("COMMIT"),console.error("[SessionStore] Successfully made observations.text nullable")}catch(t){throw this.db.exec("ROLLBACK"),t}}catch(e){console.error("[SessionStore] Migration error (make text nullable):",e.message)}}getRecentSummaries(e,s=10){return this.db.prepare(`
`),this.db.exec("COMMIT"),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(9,new Date().toISOString()),console.error("[SessionStore] Successfully made observations.text nullable")}catch(r){throw this.db.exec("ROLLBACK"),r}}catch(e){console.error("[SessionStore] Migration error (make text nullable):",e.message)}}getRecentSummaries(e,s=10){return this.db.prepare(`
SELECT
request, investigated, learned, completed, next_steps,
files_read, files_edited, notes, prompt_number, created_at
@@ -146,7 +208,7 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let s=Obje
UPDATE sdk_sessions
SET sdk_session_id = ?
WHERE id = ? AND sdk_session_id IS NULL
`).run(s,e).changes===0?(L.debug("DB","sdk_session_id already set, skipping update",{sessionId:e,sdkSessionId:s}),!1):!0}setWorkerPort(e,s){this.db.prepare(`
`).run(s,e).changes===0?(v.debug("DB","sdk_session_id already set, skipping update",{sessionId:e,sdkSessionId:s}),!1):!0}setWorkerPort(e,s){this.db.prepare(`
UPDATE sdk_sessions
SET worker_port = ?
WHERE id = ?
@@ -155,17 +217,17 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let s=Obje
FROM sdk_sessions
WHERE id = ?
LIMIT 1
`).get(e)?.worker_port||null}storeObservation(e,s,t,r){let n=new Date,a=n.getTime();this.db.prepare(`
`).get(e)?.worker_port||null}storeObservation(e,s,t,r){let n=new Date,c=n.getTime();this.db.prepare(`
INSERT INTO observations
(sdk_session_id, project, type, title, subtitle, facts, narrative, concepts,
files_read, files_modified, prompt_number, created_at, created_at_epoch)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`).run(e,s,t.type,t.title,t.subtitle,JSON.stringify(t.facts),t.narrative,JSON.stringify(t.concepts),JSON.stringify(t.files_read),JSON.stringify(t.files_modified),r||null,n.toISOString(),a)}storeSummary(e,s,t,r){let n=new Date,a=n.getTime();this.db.prepare(`
`).run(e,s,t.type,t.title,t.subtitle,JSON.stringify(t.facts),t.narrative,JSON.stringify(t.concepts),JSON.stringify(t.files_read),JSON.stringify(t.files_modified),r||null,n.toISOString(),c)}storeSummary(e,s,t,r){let n=new Date,c=n.getTime();this.db.prepare(`
INSERT INTO session_summaries
(sdk_session_id, project, request, investigated, learned, completed,
next_steps, notes, prompt_number, created_at, created_at_epoch)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`).run(e,s,t.request,t.investigated,t.learned,t.completed,t.next_steps,t.notes,r||null,n.toISOString(),a)}markSessionCompleted(e){let s=new Date,t=s.getTime();this.db.prepare(`
`).run(e,s,t.request,t.investigated,t.learned,t.completed,t.next_steps,t.notes,r||null,n.toISOString(),c)}markSessionCompleted(e){let s=new Date,t=s.getTime();this.db.prepare(`
UPDATE sdk_sessions
SET status = 'completed', completed_at = ?, completed_at_epoch = ?
WHERE id = ?
@@ -177,4 +239,4 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let s=Obje
UPDATE sdk_sessions
SET status = 'failed', completed_at = ?, completed_at_epoch = ?
WHERE status = 'active'
`).run(e.toISOString(),s).changes}close(){this.db.close()}};function w(o,e,s){return o==="PreCompact"?e?{continue:!0,suppressOutput:!0}:{continue:!1,stopReason:s.reason||"Pre-compact operation failed",suppressOutput:!0}:o==="SessionStart"?e&&s.context?{continue:!0,suppressOutput:!0,hookSpecificOutput:{hookEventName:"SessionStart",additionalContext:s.context}}:{continue:!0,suppressOutput:!0}:o==="UserPromptSubmit"||o==="PostToolUse"?{continue:!0,suppressOutput:!0}:o==="Stop"?{continue:!0,suppressOutput:!0}:{continue:e,suppressOutput:!0,...s.reason&&!e?{stopReason:s.reason}:{}}}function E(o,e,s={}){let t=w(o,e,s);return JSON.stringify(t)}async function P(){let{readFileSync:o,existsSync:e}=await import("fs"),s=k();if(!e(s))return null;try{let t=o(s,"utf8").trim();return parseInt(t,10)}catch{return null}}async function C(o){if(!o)throw new Error("newHook requires input");let{session_id:e,cwd:s,prompt:t}=o,r=U.basename(s),n=new _;try{let a=n.findActiveSDKSession(e),i,d=!1;if(a){i=a.id;let u=n.incrementPromptCounter(i);console.error(`[new-hook] Continuing session ${i}, prompt #${u}`)}else{let u=n.findAnySDKSession(e);if(u){i=u.id,n.reactivateSession(i,t);let l=n.incrementPromptCounter(i);d=!0,console.error(`[new-hook] Reactivated session ${i}, prompt #${l}`)}else{i=n.createSDKSession(e,r,t);let l=n.incrementPromptCounter(i);d=!0,console.error(`[new-hook] Created new session ${i}, prompt #${l}`)}}let c=await P();if(!c){console.error("[new-hook] Worker service not running. Start with: npm run worker:start"),console.log(E("UserPromptSubmit",!0));return}if(d){let u=await fetch(`http://127.0.0.1:${c}/sessions/${i}/init`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({project:r,userPrompt:t}),signal:AbortSignal.timeout(5e3)});u.ok||console.error("[new-hook] Failed to init session:",await u.text())}console.log(E("UserPromptSubmit",!0))}catch(a){console.error("[new-hook] FATAL ERROR:",a.message),console.error("[new-hook] Stack:",a.stack),console.error("[new-hook] Full error:",JSON.stringify(a,Object.getOwnPropertyNames(a))),console.log(E("UserPromptSubmit",!0))}finally{n.close()}}import{stdin as N}from"process";var f="";N.on("data",o=>f+=o);N.on("end",async()=>{try{let o=f.trim()?JSON.parse(f):void 0;await C(o),process.exit(0)}catch(o){console.error(`[claude-mem new-hook error: ${o.message}]`),console.log('{"continue": true, "suppressOutput": true}'),process.exit(0)}});
`).run(e.toISOString(),s).changes}close(){this.db.close()}};function B(o,e,s){return o==="PreCompact"?e?{continue:!0,suppressOutput:!0}:{continue:!1,stopReason:s.reason||"Pre-compact operation failed",suppressOutput:!0}:o==="SessionStart"?e&&s.context?{continue:!0,suppressOutput:!0,hookSpecificOutput:{hookEventName:"SessionStart",additionalContext:s.context}}:{continue:!0,suppressOutput:!0}:o==="UserPromptSubmit"||o==="PostToolUse"?{continue:!0,suppressOutput:!0}:o==="Stop"?{continue:!0,suppressOutput:!0}:{continue:e,suppressOutput:!0,...s.reason&&!e?{stopReason:s.reason}:{}}}function A(o,e,s={}){let t=B(o,e,s);return JSON.stringify(t)}import k from"path";import{existsSync as C}from"fs";import{spawn as N}from"child_process";var T=parseInt(process.env.CLAUDE_MEM_WORKER_PORT||"37777",10),G=`http://127.0.0.1:${T}/health`;async function D(){try{return(await fetch(G,{signal:AbortSignal.timeout(500)})).ok}catch{return!1}}async function y(){try{if(await D())return!0;console.error("[claude-mem] Worker not responding, starting...");let o=L(),e=k.join(o,"dist","worker-service.cjs");if(!C(e))return console.error(`[claude-mem] Worker service not found at ${e}`),!1;let s=k.join(o,"ecosystem.config.cjs");if(C(s))try{N("pm2",["start",s],{detached:!0,stdio:"ignore",cwd:o}).unref(),console.error("[claude-mem] Worker started with PM2")}catch{console.error("[claude-mem] PM2 not available, using direct spawn"),N("node",[e],{detached:!0,stdio:"ignore",env:{...process.env,NODE_ENV:"production",CLAUDE_MEM_WORKER_PORT:T.toString()}}).unref(),console.error("[claude-mem] Worker started in background")}else N("node",[e],{detached:!0,stdio:"ignore",env:{...process.env,NODE_ENV:"production",CLAUDE_MEM_WORKER_PORT:T.toString()}}).unref(),console.error("[claude-mem] Worker started in background");for(let t=0;t<3;t++)if(await new Promise(r=>setTimeout(r,500)),await D())return console.error("[claude-mem] Worker is healthy"),!0;return console.error("[claude-mem] Worker failed to become healthy after startup"),!1}catch(o){return console.error(`[claude-mem] Failed to start worker: ${o.message}`),!1}}function x(){return T}async function U(o){if(!o)throw new Error("newHook requires input");let{session_id:e,cwd:s,prompt:t}=o,r=K.basename(s),n=new l;try{let c=n.findActiveSDKSession(e),i,d=!1;if(c){i=c.id;let p=n.incrementPromptCounter(i);console.error(`[new-hook] Continuing session ${i}, prompt #${p}`)}else{let p=n.findAnySDKSession(e);if(p){i=p.id,n.reactivateSession(i,t);let u=n.incrementPromptCounter(i);d=!0,console.error(`[new-hook] Reactivated session ${i}, prompt #${u}`)}else{i=n.createSDKSession(e,r,t);let u=n.incrementPromptCounter(i);d=!0,console.error(`[new-hook] Created new session ${i}, prompt #${u}`)}}if(!await y())throw new Error("Worker service failed to start or become healthy");let m=x();if(d){let p=await fetch(`http://127.0.0.1:${m}/sessions/${i}/init`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({project:r,userPrompt:t}),signal:AbortSignal.timeout(5e3)});if(!p.ok){let u=await p.text();throw new Error(`Failed to initialize session: ${p.status} ${u}`)}}console.log(A("UserPromptSubmit",!0))}finally{n.close()}}import{stdin as w}from"process";var R="";w.on("data",o=>R+=o);w.on("end",async()=>{let o=R.trim()?JSON.parse(R):void 0;await U(o),process.exit(0)});
+70 -8
View File
@@ -1,7 +1,69 @@
#!/usr/bin/env node
import y from"better-sqlite3";import{join as c,dirname as F,basename as j}from"path";import{homedir as O}from"os";import{existsSync as q,mkdirSync as C}from"fs";var D=()=>process.env.CLAUDE_PLUGIN_ROOT?c(process.env.CLAUDE_PLUGIN_ROOT,"data"):process.env.CLAUDE_MEM_DATA_DIR?process.env.CLAUDE_MEM_DATA_DIR:c(O(),".claude-mem"),p=D(),g=process.env.CLAUDE_CONFIG_DIR||c(O(),".claude"),J=c(p,"archives"),Y=c(p,"logs"),V=c(p,"trash"),Q=c(p,"backups"),z=c(p,"settings.json"),L=c(p,"claude-mem.db"),Z=c(g,"settings.json"),ee=c(g,"commands"),se=c(g,"CLAUDE.md");function A(n){C(n,{recursive:!0})}var T=(o=>(o[o.DEBUG=0]="DEBUG",o[o.INFO=1]="INFO",o[o.WARN=2]="WARN",o[o.ERROR=3]="ERROR",o[o.SILENT=4]="SILENT",o))(T||{}),b=class{level;useColor;constructor(){let e=process.env.CLAUDE_MEM_LOG_LEVEL?.toUpperCase()||"INFO";this.level=T[e]??1,this.useColor=process.stdout.isTTY??!1}correlationId(e,s){return`obs-${e}-${s}`}sessionId(e){return`session-${e}`}formatData(e){if(e==null)return"";if(typeof e=="string")return e;if(typeof e=="number"||typeof e=="boolean")return e.toString();if(typeof e=="object"){if(e instanceof Error)return this.level===0?`${e.message}
${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let s=Object.keys(e);return s.length===0?"{}":s.length<=3?JSON.stringify(e):`{${s.length} keys: ${s.slice(0,3).join(", ")}...}`}return String(e)}formatTool(e,s){if(!s)return e;try{let t=typeof s=="string"?JSON.parse(s):s;if(e==="Bash"&&t.command){let r=t.command.length>50?t.command.substring(0,50)+"...":t.command;return`${e}(${r})`}if(e==="Read"&&t.file_path){let r=t.file_path.split("/").pop()||t.file_path;return`${e}(${r})`}if(e==="Edit"&&t.file_path){let r=t.file_path.split("/").pop()||t.file_path;return`${e}(${r})`}if(e==="Write"&&t.file_path){let r=t.file_path.split("/").pop()||t.file_path;return`${e}(${r})`}return e}catch{return e}}log(e,s,t,r,o){if(e<this.level)return;let i=new Date().toISOString().replace("T"," ").substring(0,23),d=T[e].padEnd(5),_=s.padEnd(6),a="";r?.correlationId?a=`[${r.correlationId}] `:r?.sessionId&&(a=`[session-${r.sessionId}] `);let m="";o!=null&&(this.level===0&&typeof o=="object"?m=`
`+JSON.stringify(o,null,2):m=" "+this.formatData(o));let S="";if(r){let{sessionId:P,sdkSessionId:w,correlationId:M,...R}=r;Object.keys(R).length>0&&(S=` {${Object.entries(R).map(([v,N])=>`${v}=${N}`).join(", ")}}`)}let h=`[${i}] [${d}] [${_}] ${a}${t}${S}${m}`;e===3?console.error(h):console.log(h)}debug(e,s,t,r){this.log(0,e,s,t,r)}info(e,s,t,r){this.log(1,e,s,t,r)}warn(e,s,t,r){this.log(2,e,s,t,r)}error(e,s,t,r){this.log(3,e,s,t,r)}dataIn(e,s,t,r){this.info(e,`\u2192 ${s}`,t,r)}dataOut(e,s,t,r){this.info(e,`\u2190 ${s}`,t,r)}success(e,s,t,r){this.info(e,`\u2713 ${s}`,t,r)}failure(e,s,t,r){this.error(e,`\u2717 ${s}`,t,r)}timing(e,s,t,r){this.info(e,`\u23F1 ${s}`,r,{duration:`${t}ms`})}},u=new b;var E=class{db;constructor(){A(p),this.db=new y(L),this.db.pragma("journal_mode = WAL"),this.db.pragma("synchronous = NORMAL"),this.db.pragma("foreign_keys = ON"),this.ensureWorkerPortColumn(),this.ensurePromptTrackingColumns(),this.removeSessionSummariesUniqueConstraint(),this.addObservationHierarchicalFields(),this.makeObservationsTextNullable()}ensureWorkerPortColumn(){try{this.db.pragma("table_info(sdk_sessions)").some(t=>t.name==="worker_port")||(this.db.exec("ALTER TABLE sdk_sessions ADD COLUMN worker_port INTEGER"),console.error("[SessionStore] Added worker_port column to sdk_sessions table"))}catch(e){console.error("[SessionStore] Migration error:",e.message)}}ensurePromptTrackingColumns(){try{this.db.pragma("table_info(sdk_sessions)").some(a=>a.name==="prompt_counter")||(this.db.exec("ALTER TABLE sdk_sessions ADD COLUMN prompt_counter INTEGER DEFAULT 0"),console.error("[SessionStore] Added prompt_counter column to sdk_sessions table")),this.db.pragma("table_info(observations)").some(a=>a.name==="prompt_number")||(this.db.exec("ALTER TABLE observations ADD COLUMN prompt_number INTEGER"),console.error("[SessionStore] Added prompt_number column to observations table")),this.db.pragma("table_info(session_summaries)").some(a=>a.name==="prompt_number")||(this.db.exec("ALTER TABLE session_summaries ADD COLUMN prompt_number INTEGER"),console.error("[SessionStore] Added prompt_number column to session_summaries table"));let _=this.db.pragma("index_list(session_summaries)").some(a=>a.unique===1)}catch(e){console.error("[SessionStore] Prompt tracking migration error:",e.message)}}removeSessionSummariesUniqueConstraint(){try{if(!this.db.pragma("index_list(session_summaries)").some(t=>t.unique===1))return;console.error("[SessionStore] Removing UNIQUE constraint from session_summaries.sdk_session_id..."),this.db.exec("BEGIN TRANSACTION");try{this.db.exec(`
import U from"better-sqlite3";import{join as a,dirname as k,basename as G}from"path";import{homedir as I}from"os";import{existsSync as K,mkdirSync as D}from"fs";import{fileURLToPath as y}from"url";function x(){return typeof __dirname<"u"?__dirname:k(y(import.meta.url))}var q=x(),c=process.env.CLAUDE_MEM_DATA_DIR||a(I(),".claude-mem"),T=process.env.CLAUDE_CONFIG_DIR||a(I(),".claude"),V=a(c,"archives"),J=a(c,"logs"),Q=a(c,"trash"),z=a(c,"backups"),Z=a(c,"settings.json"),f=a(c,"claude-mem.db"),ee=a(T,"settings.json"),se=a(T,"commands"),te=a(T,"CLAUDE.md");function h(n){D(n,{recursive:!0})}var S=(o=>(o[o.DEBUG=0]="DEBUG",o[o.INFO=1]="INFO",o[o.WARN=2]="WARN",o[o.ERROR=3]="ERROR",o[o.SILENT=4]="SILENT",o))(S||{}),g=class{level;useColor;constructor(){let e=process.env.CLAUDE_MEM_LOG_LEVEL?.toUpperCase()||"INFO";this.level=S[e]??1,this.useColor=process.stdout.isTTY??!1}correlationId(e,s){return`obs-${e}-${s}`}sessionId(e){return`session-${e}`}formatData(e){if(e==null)return"";if(typeof e=="string")return e;if(typeof e=="number"||typeof e=="boolean")return e.toString();if(typeof e=="object"){if(e instanceof Error)return this.level===0?`${e.message}
${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let s=Object.keys(e);return s.length===0?"{}":s.length<=3?JSON.stringify(e):`{${s.length} keys: ${s.slice(0,3).join(", ")}...}`}return String(e)}formatTool(e,s){if(!s)return e;try{let t=typeof s=="string"?JSON.parse(s):s;if(e==="Bash"&&t.command){let r=t.command.length>50?t.command.substring(0,50)+"...":t.command;return`${e}(${r})`}if(e==="Read"&&t.file_path){let r=t.file_path.split("/").pop()||t.file_path;return`${e}(${r})`}if(e==="Edit"&&t.file_path){let r=t.file_path.split("/").pop()||t.file_path;return`${e}(${r})`}if(e==="Write"&&t.file_path){let r=t.file_path.split("/").pop()||t.file_path;return`${e}(${r})`}return e}catch{return e}}log(e,s,t,r,o){if(e<this.level)return;let i=new Date().toISOString().replace("T"," ").substring(0,23),u=S[e].padEnd(5),d=s.padEnd(6),p="";r?.correlationId?p=`[${r.correlationId}] `:r?.sessionId&&(p=`[session-${r.sessionId}] `);let _="";o!=null&&(this.level===0&&typeof o=="object"?_=`
`+JSON.stringify(o,null,2):_=" "+this.formatData(o));let N="";if(r){let{sessionId:M,sdkSessionId:P,correlationId:F,...R}=r;Object.keys(R).length>0&&(N=` {${Object.entries(R).map(([A,C])=>`${A}=${C}`).join(", ")}}`)}let O=`[${i}] [${u}] [${d}] ${p}${t}${N}${_}`;e===3?console.error(O):console.log(O)}debug(e,s,t,r){this.log(0,e,s,t,r)}info(e,s,t,r){this.log(1,e,s,t,r)}warn(e,s,t,r){this.log(2,e,s,t,r)}error(e,s,t,r){this.log(3,e,s,t,r)}dataIn(e,s,t,r){this.info(e,`\u2192 ${s}`,t,r)}dataOut(e,s,t,r){this.info(e,`\u2190 ${s}`,t,r)}success(e,s,t,r){this.info(e,`\u2713 ${s}`,t,r)}failure(e,s,t,r){this.error(e,`\u2717 ${s}`,t,r)}timing(e,s,t,r){this.info(e,`\u23F1 ${s}`,r,{duration:`${t}ms`})}},E=new g;var m=class{db;constructor(){h(c),this.db=new U(f),this.db.pragma("journal_mode = WAL"),this.db.pragma("synchronous = NORMAL"),this.db.pragma("foreign_keys = ON"),this.initializeSchema(),this.ensureWorkerPortColumn(),this.ensurePromptTrackingColumns(),this.removeSessionSummariesUniqueConstraint(),this.addObservationHierarchicalFields(),this.makeObservationsTextNullable()}initializeSchema(){try{this.db.exec(`
CREATE TABLE IF NOT EXISTS schema_versions (
id INTEGER PRIMARY KEY,
version INTEGER UNIQUE NOT NULL,
applied_at TEXT NOT NULL
)
`);let e=this.db.prepare("SELECT version FROM schema_versions ORDER BY version").all();(e.length>0?Math.max(...e.map(t=>t.version)):0)===0&&(console.error("[SessionStore] Initializing fresh database with migration004..."),this.db.exec(`
CREATE TABLE IF NOT EXISTS sdk_sessions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
claude_session_id TEXT UNIQUE NOT NULL,
sdk_session_id TEXT UNIQUE,
project TEXT NOT NULL,
user_prompt TEXT,
started_at TEXT NOT NULL,
started_at_epoch INTEGER NOT NULL,
completed_at TEXT,
completed_at_epoch INTEGER,
status TEXT CHECK(status IN ('active', 'completed', 'failed')) NOT NULL DEFAULT 'active'
);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_claude_id ON sdk_sessions(claude_session_id);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_sdk_id ON sdk_sessions(sdk_session_id);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_project ON sdk_sessions(project);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_status ON sdk_sessions(status);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_started ON sdk_sessions(started_at_epoch DESC);
CREATE TABLE IF NOT EXISTS observations (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sdk_session_id TEXT NOT NULL,
project TEXT NOT NULL,
text TEXT NOT NULL,
type TEXT NOT NULL CHECK(type IN ('decision', 'bugfix', 'feature', 'refactor', 'discovery')),
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
FOREIGN KEY(sdk_session_id) REFERENCES sdk_sessions(sdk_session_id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_observations_sdk_session ON observations(sdk_session_id);
CREATE INDEX IF NOT EXISTS idx_observations_project ON observations(project);
CREATE INDEX IF NOT EXISTS idx_observations_type ON observations(type);
CREATE INDEX IF NOT EXISTS idx_observations_created ON observations(created_at_epoch DESC);
CREATE TABLE IF NOT EXISTS session_summaries (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sdk_session_id TEXT UNIQUE NOT NULL,
project TEXT NOT NULL,
request TEXT,
investigated TEXT,
learned TEXT,
completed TEXT,
next_steps TEXT,
files_read TEXT,
files_edited TEXT,
notes TEXT,
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
FOREIGN KEY(sdk_session_id) REFERENCES sdk_sessions(sdk_session_id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_session_summaries_sdk_session ON session_summaries(sdk_session_id);
CREATE INDEX IF NOT EXISTS idx_session_summaries_project ON session_summaries(project);
CREATE INDEX IF NOT EXISTS idx_session_summaries_created ON session_summaries(created_at_epoch DESC);
`),this.db.prepare("INSERT INTO schema_versions (version, applied_at) VALUES (?, ?)").run(4,new Date().toISOString()),console.error("[SessionStore] Migration004 applied successfully"))}catch(e){throw console.error("[SessionStore] Schema initialization error:",e.message),e}}ensureWorkerPortColumn(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(5))return;this.db.pragma("table_info(sdk_sessions)").some(r=>r.name==="worker_port")||(this.db.exec("ALTER TABLE sdk_sessions ADD COLUMN worker_port INTEGER"),console.error("[SessionStore] Added worker_port column to sdk_sessions table")),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(5,new Date().toISOString())}catch(e){console.error("[SessionStore] Migration error:",e.message)}}ensurePromptTrackingColumns(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(6))return;this.db.pragma("table_info(sdk_sessions)").some(d=>d.name==="prompt_counter")||(this.db.exec("ALTER TABLE sdk_sessions ADD COLUMN prompt_counter INTEGER DEFAULT 0"),console.error("[SessionStore] Added prompt_counter column to sdk_sessions table")),this.db.pragma("table_info(observations)").some(d=>d.name==="prompt_number")||(this.db.exec("ALTER TABLE observations ADD COLUMN prompt_number INTEGER"),console.error("[SessionStore] Added prompt_number column to observations table")),this.db.pragma("table_info(session_summaries)").some(d=>d.name==="prompt_number")||(this.db.exec("ALTER TABLE session_summaries ADD COLUMN prompt_number INTEGER"),console.error("[SessionStore] Added prompt_number column to session_summaries table")),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(6,new Date().toISOString())}catch(e){console.error("[SessionStore] Prompt tracking migration error:",e.message)}}removeSessionSummariesUniqueConstraint(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(7))return;if(!this.db.pragma("index_list(session_summaries)").some(r=>r.unique===1)){this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(7,new Date().toISOString());return}console.error("[SessionStore] Removing UNIQUE constraint from session_summaries.sdk_session_id..."),this.db.exec("BEGIN TRANSACTION");try{this.db.exec(`
CREATE TABLE session_summaries_new (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sdk_session_id TEXT NOT NULL,
@@ -29,7 +91,7 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let s=Obje
CREATE INDEX idx_session_summaries_sdk_session ON session_summaries(sdk_session_id);
CREATE INDEX idx_session_summaries_project ON session_summaries(project);
CREATE INDEX idx_session_summaries_created ON session_summaries(created_at_epoch DESC);
`),this.db.exec("COMMIT"),console.error("[SessionStore] Successfully removed UNIQUE constraint from session_summaries.sdk_session_id")}catch(t){throw this.db.exec("ROLLBACK"),t}}catch(e){console.error("[SessionStore] Migration error (remove UNIQUE constraint):",e.message)}}addObservationHierarchicalFields(){try{if(this.db.pragma("table_info(observations)").some(t=>t.name==="title"))return;console.error("[SessionStore] Adding hierarchical fields to observations table..."),this.db.exec(`
`),this.db.exec("COMMIT"),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(7,new Date().toISOString()),console.error("[SessionStore] Successfully removed UNIQUE constraint from session_summaries.sdk_session_id")}catch(r){throw this.db.exec("ROLLBACK"),r}}catch(e){console.error("[SessionStore] Migration error (remove UNIQUE constraint):",e.message)}}addObservationHierarchicalFields(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(8))return;if(this.db.pragma("table_info(observations)").some(r=>r.name==="title")){this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(8,new Date().toISOString());return}console.error("[SessionStore] Adding hierarchical fields to observations table..."),this.db.exec(`
ALTER TABLE observations ADD COLUMN title TEXT;
ALTER TABLE observations ADD COLUMN subtitle TEXT;
ALTER TABLE observations ADD COLUMN facts TEXT;
@@ -37,7 +99,7 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let s=Obje
ALTER TABLE observations ADD COLUMN concepts TEXT;
ALTER TABLE observations ADD COLUMN files_read TEXT;
ALTER TABLE observations ADD COLUMN files_modified TEXT;
`),console.error("[SessionStore] Successfully added hierarchical fields to observations table")}catch(e){console.error("[SessionStore] Migration error (add hierarchical fields):",e.message)}}makeObservationsTextNullable(){try{let s=this.db.pragma("table_info(observations)").find(t=>t.name==="text");if(!s||s.notnull===0)return;console.error("[SessionStore] Making observations.text nullable..."),this.db.exec("BEGIN TRANSACTION");try{this.db.exec(`
`),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(8,new Date().toISOString()),console.error("[SessionStore] Successfully added hierarchical fields to observations table")}catch(e){console.error("[SessionStore] Migration error (add hierarchical fields):",e.message)}}makeObservationsTextNullable(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(9))return;let t=this.db.pragma("table_info(observations)").find(r=>r.name==="text");if(!t||t.notnull===0){this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(9,new Date().toISOString());return}console.error("[SessionStore] Making observations.text nullable..."),this.db.exec("BEGIN TRANSACTION");try{this.db.exec(`
CREATE TABLE observations_new (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sdk_session_id TEXT NOT NULL,
@@ -67,7 +129,7 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let s=Obje
CREATE INDEX idx_observations_project ON observations(project);
CREATE INDEX idx_observations_type ON observations(type);
CREATE INDEX idx_observations_created ON observations(created_at_epoch DESC);
`),this.db.exec("COMMIT"),console.error("[SessionStore] Successfully made observations.text nullable")}catch(t){throw this.db.exec("ROLLBACK"),t}}catch(e){console.error("[SessionStore] Migration error (make text nullable):",e.message)}}getRecentSummaries(e,s=10){return this.db.prepare(`
`),this.db.exec("COMMIT"),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(9,new Date().toISOString()),console.error("[SessionStore] Successfully made observations.text nullable")}catch(r){throw this.db.exec("ROLLBACK"),r}}catch(e){console.error("[SessionStore] Migration error (make text nullable):",e.message)}}getRecentSummaries(e,s=10){return this.db.prepare(`
SELECT
request, investigated, learned, completed, next_steps,
files_read, files_edited, notes, prompt_number, created_at
@@ -146,7 +208,7 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let s=Obje
UPDATE sdk_sessions
SET sdk_session_id = ?
WHERE id = ? AND sdk_session_id IS NULL
`).run(s,e).changes===0?(u.debug("DB","sdk_session_id already set, skipping update",{sessionId:e,sdkSessionId:s}),!1):!0}setWorkerPort(e,s){this.db.prepare(`
`).run(s,e).changes===0?(E.debug("DB","sdk_session_id already set, skipping update",{sessionId:e,sdkSessionId:s}),!1):!0}setWorkerPort(e,s){this.db.prepare(`
UPDATE sdk_sessions
SET worker_port = ?
WHERE id = ?
@@ -177,4 +239,4 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let s=Obje
UPDATE sdk_sessions
SET status = 'failed', completed_at = ?, completed_at_epoch = ?
WHERE status = 'active'
`).run(e.toISOString(),s).changes}close(){this.db.close()}};function x(n,e,s){return n==="PreCompact"?e?{continue:!0,suppressOutput:!0}:{continue:!1,stopReason:s.reason||"Pre-compact operation failed",suppressOutput:!0}:n==="SessionStart"?e&&s.context?{continue:!0,suppressOutput:!0,hookSpecificOutput:{hookEventName:"SessionStart",additionalContext:s.context}}:{continue:!0,suppressOutput:!0}:n==="UserPromptSubmit"||n==="PostToolUse"?{continue:!0,suppressOutput:!0}:n==="Stop"?{continue:!0,suppressOutput:!0}:{continue:e,suppressOutput:!0,...s.reason&&!e?{stopReason:s.reason}:{}}}function l(n,e,s={}){let t=x(n,e,s);return JSON.stringify(t)}var U=new Set(["ListMcpResourcesTool"]);async function k(n){if(!n)throw new Error("saveHook requires input");let{session_id:e,tool_name:s,tool_input:t,tool_output:r}=n;if(U.has(s)){console.log(l("PostToolUse",!0));return}let o=new E,i=o.findActiveSDKSession(e);if(!i){o.close(),console.log(l("PostToolUse",!0));return}if(!i.worker_port){o.close(),u.error("HOOK","No worker port for session",{sessionId:i.id}),console.log(l("PostToolUse",!0));return}let d=o.getPromptCounter(i.id);o.close();let _=u.formatTool(s,t);try{u.dataIn("HOOK",`PostToolUse: ${_}`,{sessionId:i.id,workerPort:i.worker_port});let a=await fetch(`http://127.0.0.1:${i.worker_port}/sessions/${i.id}/observations`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({tool_name:s,tool_input:t!==void 0?JSON.stringify(t):"{}",tool_output:r!==void 0?JSON.stringify(r):"{}",prompt_number:d}),signal:AbortSignal.timeout(2e3)});if(a.ok)u.debug("HOOK","Observation sent successfully",{sessionId:i.id,toolName:s});else{let m=await a.text();u.failure("HOOK","Failed to send observation",{sessionId:i.id,status:a.status},m)}}catch(a){u.failure("HOOK","Error sending observation",{sessionId:i.id},a)}finally{console.log(l("PostToolUse",!0))}}import{stdin as I}from"process";var f="";I.on("data",n=>f+=n);I.on("end",async()=>{try{let n=f.trim()?JSON.parse(f):void 0;await k(n),process.exit(0)}catch(n){console.error(`[claude-mem save-hook error: ${n.message}]`),console.log('{"continue": true, "suppressOutput": true}'),process.exit(0)}});
`).run(e.toISOString(),s).changes}close(){this.db.close()}};function X(n,e,s){return n==="PreCompact"?e?{continue:!0,suppressOutput:!0}:{continue:!1,stopReason:s.reason||"Pre-compact operation failed",suppressOutput:!0}:n==="SessionStart"?e&&s.context?{continue:!0,suppressOutput:!0,hookSpecificOutput:{hookEventName:"SessionStart",additionalContext:s.context}}:{continue:!0,suppressOutput:!0}:n==="UserPromptSubmit"||n==="PostToolUse"?{continue:!0,suppressOutput:!0}:n==="Stop"?{continue:!0,suppressOutput:!0}:{continue:e,suppressOutput:!0,...s.reason&&!e?{stopReason:s.reason}:{}}}function l(n,e,s={}){let t=X(n,e,s);return JSON.stringify(t)}var w=new Set(["ListMcpResourcesTool"]);async function L(n){if(!n)throw new Error("saveHook requires input");let{session_id:e,tool_name:s,tool_input:t,tool_output:r}=n;if(w.has(s)){console.log(l("PostToolUse",!0));return}let o=new m,i=o.findActiveSDKSession(e);if(!i){o.close(),console.log(l("PostToolUse",!0));return}if(!i.worker_port)throw o.close(),E.error("HOOK","No worker port for session",{sessionId:i.id}),new Error("No worker port for session - session may not be properly initialized");let u=o.getPromptCounter(i.id);o.close();let d=E.formatTool(s,t);E.dataIn("HOOK",`PostToolUse: ${d}`,{sessionId:i.id,workerPort:i.worker_port});let p=await fetch(`http://127.0.0.1:${i.worker_port}/sessions/${i.id}/observations`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({tool_name:s,tool_input:t!==void 0?JSON.stringify(t):"{}",tool_output:r!==void 0?JSON.stringify(r):"{}",prompt_number:u}),signal:AbortSignal.timeout(2e3)});if(!p.ok){let _=await p.text();throw E.failure("HOOK","Failed to send observation",{sessionId:i.id,status:p.status},_),new Error(`Failed to send observation to worker: ${p.status} ${_}`)}E.debug("HOOK","Observation sent successfully",{sessionId:i.id,toolName:s}),console.log(l("PostToolUse",!0))}import{stdin as v}from"process";var b="";v.on("data",n=>b+=n);v.on("end",async()=>{let n=b.trim()?JSON.parse(b):void 0;await L(n),process.exit(0)});
File diff suppressed because one or more lines are too long
+83 -21
View File
@@ -1,7 +1,69 @@
#!/usr/bin/env node
import v from"better-sqlite3";import{join as i,dirname as X,basename as F}from"path";import{homedir as O}from"os";import{existsSync as q,mkdirSync as N}from"fs";var D=()=>process.env.CLAUDE_PLUGIN_ROOT?i(process.env.CLAUDE_PLUGIN_ROOT,"data"):process.env.CLAUDE_MEM_DATA_DIR?process.env.CLAUDE_MEM_DATA_DIR:i(O(),".claude-mem"),c=D(),E=process.env.CLAUDE_CONFIG_DIR||i(O(),".claude"),K=i(c,"archives"),J=i(c,"logs"),Y=i(c,"trash"),V=i(c,"backups"),Q=i(c,"settings.json"),L=i(c,"claude-mem.db"),z=i(E,"settings.json"),Z=i(E,"commands"),ee=i(E,"CLAUDE.md");function A(o){N(o,{recursive:!0})}var g=(n=>(n[n.DEBUG=0]="DEBUG",n[n.INFO=1]="INFO",n[n.WARN=2]="WARN",n[n.ERROR=3]="ERROR",n[n.SILENT=4]="SILENT",n))(g||{}),T=class{level;useColor;constructor(){let e=process.env.CLAUDE_MEM_LOG_LEVEL?.toUpperCase()||"INFO";this.level=g[e]??1,this.useColor=process.stdout.isTTY??!1}correlationId(e,t){return`obs-${e}-${t}`}sessionId(e){return`session-${e}`}formatData(e){if(e==null)return"";if(typeof e=="string")return e;if(typeof e=="number"||typeof e=="boolean")return e.toString();if(typeof e=="object"){if(e instanceof Error)return this.level===0?`${e.message}
${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let t=Object.keys(e);return t.length===0?"{}":t.length<=3?JSON.stringify(e):`{${t.length} keys: ${t.slice(0,3).join(", ")}...}`}return String(e)}formatTool(e,t){if(!t)return e;try{let s=typeof t=="string"?JSON.parse(t):t;if(e==="Bash"&&s.command){let r=s.command.length>50?s.command.substring(0,50)+"...":s.command;return`${e}(${r})`}if(e==="Read"&&s.file_path){let r=s.file_path.split("/").pop()||s.file_path;return`${e}(${r})`}if(e==="Edit"&&s.file_path){let r=s.file_path.split("/").pop()||s.file_path;return`${e}(${r})`}if(e==="Write"&&s.file_path){let r=s.file_path.split("/").pop()||s.file_path;return`${e}(${r})`}return e}catch{return e}}log(e,t,s,r,n){if(e<this.level)return;let u=new Date().toISOString().replace("T"," ").substring(0,23),d=g[e].padEnd(5),f=t.padEnd(6),a="";r?.correlationId?a=`[${r.correlationId}] `:r?.sessionId&&(a=`[session-${r.sessionId}] `);let _="";n!=null&&(this.level===0&&typeof n=="object"?_=`
`+JSON.stringify(n,null,2):_=" "+this.formatData(n));let S="";if(r){let{sessionId:U,sdkSessionId:w,correlationId:P,...R}=r;Object.keys(R).length>0&&(S=` {${Object.entries(R).map(([C,y])=>`${C}=${y}`).join(", ")}}`)}let h=`[${u}] [${d}] [${f}] ${a}${s}${S}${_}`;e===3?console.error(h):console.log(h)}debug(e,t,s,r){this.log(0,e,t,s,r)}info(e,t,s,r){this.log(1,e,t,s,r)}warn(e,t,s,r){this.log(2,e,t,s,r)}error(e,t,s,r){this.log(3,e,t,s,r)}dataIn(e,t,s,r){this.info(e,`\u2192 ${t}`,s,r)}dataOut(e,t,s,r){this.info(e,`\u2190 ${t}`,s,r)}success(e,t,s,r){this.info(e,`\u2713 ${t}`,s,r)}failure(e,t,s,r){this.error(e,`\u2717 ${t}`,s,r)}timing(e,t,s,r){this.info(e,`\u23F1 ${t}`,r,{duration:`${s}ms`})}},p=new T;var m=class{db;constructor(){A(c),this.db=new v(L),this.db.pragma("journal_mode = WAL"),this.db.pragma("synchronous = NORMAL"),this.db.pragma("foreign_keys = ON"),this.ensureWorkerPortColumn(),this.ensurePromptTrackingColumns(),this.removeSessionSummariesUniqueConstraint(),this.addObservationHierarchicalFields(),this.makeObservationsTextNullable()}ensureWorkerPortColumn(){try{this.db.pragma("table_info(sdk_sessions)").some(s=>s.name==="worker_port")||(this.db.exec("ALTER TABLE sdk_sessions ADD COLUMN worker_port INTEGER"),console.error("[SessionStore] Added worker_port column to sdk_sessions table"))}catch(e){console.error("[SessionStore] Migration error:",e.message)}}ensurePromptTrackingColumns(){try{this.db.pragma("table_info(sdk_sessions)").some(a=>a.name==="prompt_counter")||(this.db.exec("ALTER TABLE sdk_sessions ADD COLUMN prompt_counter INTEGER DEFAULT 0"),console.error("[SessionStore] Added prompt_counter column to sdk_sessions table")),this.db.pragma("table_info(observations)").some(a=>a.name==="prompt_number")||(this.db.exec("ALTER TABLE observations ADD COLUMN prompt_number INTEGER"),console.error("[SessionStore] Added prompt_number column to observations table")),this.db.pragma("table_info(session_summaries)").some(a=>a.name==="prompt_number")||(this.db.exec("ALTER TABLE session_summaries ADD COLUMN prompt_number INTEGER"),console.error("[SessionStore] Added prompt_number column to session_summaries table"));let f=this.db.pragma("index_list(session_summaries)").some(a=>a.unique===1)}catch(e){console.error("[SessionStore] Prompt tracking migration error:",e.message)}}removeSessionSummariesUniqueConstraint(){try{if(!this.db.pragma("index_list(session_summaries)").some(s=>s.unique===1))return;console.error("[SessionStore] Removing UNIQUE constraint from session_summaries.sdk_session_id..."),this.db.exec("BEGIN TRANSACTION");try{this.db.exec(`
import U from"better-sqlite3";import{join as i,dirname as k,basename as B}from"path";import{homedir as I}from"os";import{existsSync as W,mkdirSync as D}from"fs";import{fileURLToPath as y}from"url";function x(){return typeof __dirname<"u"?__dirname:k(y(import.meta.url))}var Y=x(),d=process.env.CLAUDE_MEM_DATA_DIR||i(I(),".claude-mem"),l=process.env.CLAUDE_CONFIG_DIR||i(I(),".claude"),q=i(d,"archives"),V=i(d,"logs"),J=i(d,"trash"),Q=i(d,"backups"),z=i(d,"settings.json"),f=i(d,"claude-mem.db"),Z=i(l,"settings.json"),ee=i(l,"commands"),se=i(l,"CLAUDE.md");function h(o){D(o,{recursive:!0})}var T=(n=>(n[n.DEBUG=0]="DEBUG",n[n.INFO=1]="INFO",n[n.WARN=2]="WARN",n[n.ERROR=3]="ERROR",n[n.SILENT=4]="SILENT",n))(T||{}),S=class{level;useColor;constructor(){let e=process.env.CLAUDE_MEM_LOG_LEVEL?.toUpperCase()||"INFO";this.level=T[e]??1,this.useColor=process.stdout.isTTY??!1}correlationId(e,s){return`obs-${e}-${s}`}sessionId(e){return`session-${e}`}formatData(e){if(e==null)return"";if(typeof e=="string")return e;if(typeof e=="number"||typeof e=="boolean")return e.toString();if(typeof e=="object"){if(e instanceof Error)return this.level===0?`${e.message}
${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let s=Object.keys(e);return s.length===0?"{}":s.length<=3?JSON.stringify(e):`{${s.length} keys: ${s.slice(0,3).join(", ")}...}`}return String(e)}formatTool(e,s){if(!s)return e;try{let t=typeof s=="string"?JSON.parse(s):s;if(e==="Bash"&&t.command){let r=t.command.length>50?t.command.substring(0,50)+"...":t.command;return`${e}(${r})`}if(e==="Read"&&t.file_path){let r=t.file_path.split("/").pop()||t.file_path;return`${e}(${r})`}if(e==="Edit"&&t.file_path){let r=t.file_path.split("/").pop()||t.file_path;return`${e}(${r})`}if(e==="Write"&&t.file_path){let r=t.file_path.split("/").pop()||t.file_path;return`${e}(${r})`}return e}catch{return e}}log(e,s,t,r,n){if(e<this.level)return;let a=new Date().toISOString().replace("T"," ").substring(0,23),E=T[e].padEnd(5),p=s.padEnd(6),m="";r?.correlationId?m=`[${r.correlationId}] `:r?.sessionId&&(m=`[session-${r.sessionId}] `);let _="";n!=null&&(this.level===0&&typeof n=="object"?_=`
`+JSON.stringify(n,null,2):_=" "+this.formatData(n));let N="";if(r){let{sessionId:w,sdkSessionId:M,correlationId:F,...R}=r;Object.keys(R).length>0&&(N=` {${Object.entries(R).map(([A,C])=>`${A}=${C}`).join(", ")}}`)}let O=`[${a}] [${E}] [${p}] ${m}${t}${N}${_}`;e===3?console.error(O):console.log(O)}debug(e,s,t,r){this.log(0,e,s,t,r)}info(e,s,t,r){this.log(1,e,s,t,r)}warn(e,s,t,r){this.log(2,e,s,t,r)}error(e,s,t,r){this.log(3,e,s,t,r)}dataIn(e,s,t,r){this.info(e,`\u2192 ${s}`,t,r)}dataOut(e,s,t,r){this.info(e,`\u2190 ${s}`,t,r)}success(e,s,t,r){this.info(e,`\u2713 ${s}`,t,r)}failure(e,s,t,r){this.error(e,`\u2717 ${s}`,t,r)}timing(e,s,t,r){this.info(e,`\u23F1 ${s}`,r,{duration:`${t}ms`})}},c=new S;var u=class{db;constructor(){h(d),this.db=new U(f),this.db.pragma("journal_mode = WAL"),this.db.pragma("synchronous = NORMAL"),this.db.pragma("foreign_keys = ON"),this.initializeSchema(),this.ensureWorkerPortColumn(),this.ensurePromptTrackingColumns(),this.removeSessionSummariesUniqueConstraint(),this.addObservationHierarchicalFields(),this.makeObservationsTextNullable()}initializeSchema(){try{this.db.exec(`
CREATE TABLE IF NOT EXISTS schema_versions (
id INTEGER PRIMARY KEY,
version INTEGER UNIQUE NOT NULL,
applied_at TEXT NOT NULL
)
`);let e=this.db.prepare("SELECT version FROM schema_versions ORDER BY version").all();(e.length>0?Math.max(...e.map(t=>t.version)):0)===0&&(console.error("[SessionStore] Initializing fresh database with migration004..."),this.db.exec(`
CREATE TABLE IF NOT EXISTS sdk_sessions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
claude_session_id TEXT UNIQUE NOT NULL,
sdk_session_id TEXT UNIQUE,
project TEXT NOT NULL,
user_prompt TEXT,
started_at TEXT NOT NULL,
started_at_epoch INTEGER NOT NULL,
completed_at TEXT,
completed_at_epoch INTEGER,
status TEXT CHECK(status IN ('active', 'completed', 'failed')) NOT NULL DEFAULT 'active'
);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_claude_id ON sdk_sessions(claude_session_id);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_sdk_id ON sdk_sessions(sdk_session_id);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_project ON sdk_sessions(project);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_status ON sdk_sessions(status);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_started ON sdk_sessions(started_at_epoch DESC);
CREATE TABLE IF NOT EXISTS observations (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sdk_session_id TEXT NOT NULL,
project TEXT NOT NULL,
text TEXT NOT NULL,
type TEXT NOT NULL CHECK(type IN ('decision', 'bugfix', 'feature', 'refactor', 'discovery')),
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
FOREIGN KEY(sdk_session_id) REFERENCES sdk_sessions(sdk_session_id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_observations_sdk_session ON observations(sdk_session_id);
CREATE INDEX IF NOT EXISTS idx_observations_project ON observations(project);
CREATE INDEX IF NOT EXISTS idx_observations_type ON observations(type);
CREATE INDEX IF NOT EXISTS idx_observations_created ON observations(created_at_epoch DESC);
CREATE TABLE IF NOT EXISTS session_summaries (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sdk_session_id TEXT UNIQUE NOT NULL,
project TEXT NOT NULL,
request TEXT,
investigated TEXT,
learned TEXT,
completed TEXT,
next_steps TEXT,
files_read TEXT,
files_edited TEXT,
notes TEXT,
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
FOREIGN KEY(sdk_session_id) REFERENCES sdk_sessions(sdk_session_id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_session_summaries_sdk_session ON session_summaries(sdk_session_id);
CREATE INDEX IF NOT EXISTS idx_session_summaries_project ON session_summaries(project);
CREATE INDEX IF NOT EXISTS idx_session_summaries_created ON session_summaries(created_at_epoch DESC);
`),this.db.prepare("INSERT INTO schema_versions (version, applied_at) VALUES (?, ?)").run(4,new Date().toISOString()),console.error("[SessionStore] Migration004 applied successfully"))}catch(e){throw console.error("[SessionStore] Schema initialization error:",e.message),e}}ensureWorkerPortColumn(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(5))return;this.db.pragma("table_info(sdk_sessions)").some(r=>r.name==="worker_port")||(this.db.exec("ALTER TABLE sdk_sessions ADD COLUMN worker_port INTEGER"),console.error("[SessionStore] Added worker_port column to sdk_sessions table")),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(5,new Date().toISOString())}catch(e){console.error("[SessionStore] Migration error:",e.message)}}ensurePromptTrackingColumns(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(6))return;this.db.pragma("table_info(sdk_sessions)").some(p=>p.name==="prompt_counter")||(this.db.exec("ALTER TABLE sdk_sessions ADD COLUMN prompt_counter INTEGER DEFAULT 0"),console.error("[SessionStore] Added prompt_counter column to sdk_sessions table")),this.db.pragma("table_info(observations)").some(p=>p.name==="prompt_number")||(this.db.exec("ALTER TABLE observations ADD COLUMN prompt_number INTEGER"),console.error("[SessionStore] Added prompt_number column to observations table")),this.db.pragma("table_info(session_summaries)").some(p=>p.name==="prompt_number")||(this.db.exec("ALTER TABLE session_summaries ADD COLUMN prompt_number INTEGER"),console.error("[SessionStore] Added prompt_number column to session_summaries table")),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(6,new Date().toISOString())}catch(e){console.error("[SessionStore] Prompt tracking migration error:",e.message)}}removeSessionSummariesUniqueConstraint(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(7))return;if(!this.db.pragma("index_list(session_summaries)").some(r=>r.unique===1)){this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(7,new Date().toISOString());return}console.error("[SessionStore] Removing UNIQUE constraint from session_summaries.sdk_session_id..."),this.db.exec("BEGIN TRANSACTION");try{this.db.exec(`
CREATE TABLE session_summaries_new (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sdk_session_id TEXT NOT NULL,
@@ -29,7 +91,7 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let t=Obje
CREATE INDEX idx_session_summaries_sdk_session ON session_summaries(sdk_session_id);
CREATE INDEX idx_session_summaries_project ON session_summaries(project);
CREATE INDEX idx_session_summaries_created ON session_summaries(created_at_epoch DESC);
`),this.db.exec("COMMIT"),console.error("[SessionStore] Successfully removed UNIQUE constraint from session_summaries.sdk_session_id")}catch(s){throw this.db.exec("ROLLBACK"),s}}catch(e){console.error("[SessionStore] Migration error (remove UNIQUE constraint):",e.message)}}addObservationHierarchicalFields(){try{if(this.db.pragma("table_info(observations)").some(s=>s.name==="title"))return;console.error("[SessionStore] Adding hierarchical fields to observations table..."),this.db.exec(`
`),this.db.exec("COMMIT"),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(7,new Date().toISOString()),console.error("[SessionStore] Successfully removed UNIQUE constraint from session_summaries.sdk_session_id")}catch(r){throw this.db.exec("ROLLBACK"),r}}catch(e){console.error("[SessionStore] Migration error (remove UNIQUE constraint):",e.message)}}addObservationHierarchicalFields(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(8))return;if(this.db.pragma("table_info(observations)").some(r=>r.name==="title")){this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(8,new Date().toISOString());return}console.error("[SessionStore] Adding hierarchical fields to observations table..."),this.db.exec(`
ALTER TABLE observations ADD COLUMN title TEXT;
ALTER TABLE observations ADD COLUMN subtitle TEXT;
ALTER TABLE observations ADD COLUMN facts TEXT;
@@ -37,7 +99,7 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let t=Obje
ALTER TABLE observations ADD COLUMN concepts TEXT;
ALTER TABLE observations ADD COLUMN files_read TEXT;
ALTER TABLE observations ADD COLUMN files_modified TEXT;
`),console.error("[SessionStore] Successfully added hierarchical fields to observations table")}catch(e){console.error("[SessionStore] Migration error (add hierarchical fields):",e.message)}}makeObservationsTextNullable(){try{let t=this.db.pragma("table_info(observations)").find(s=>s.name==="text");if(!t||t.notnull===0)return;console.error("[SessionStore] Making observations.text nullable..."),this.db.exec("BEGIN TRANSACTION");try{this.db.exec(`
`),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(8,new Date().toISOString()),console.error("[SessionStore] Successfully added hierarchical fields to observations table")}catch(e){console.error("[SessionStore] Migration error (add hierarchical fields):",e.message)}}makeObservationsTextNullable(){try{if(this.db.prepare("SELECT version FROM schema_versions WHERE version = ?").get(9))return;let t=this.db.pragma("table_info(observations)").find(r=>r.name==="text");if(!t||t.notnull===0){this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(9,new Date().toISOString());return}console.error("[SessionStore] Making observations.text nullable..."),this.db.exec("BEGIN TRANSACTION");try{this.db.exec(`
CREATE TABLE observations_new (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sdk_session_id TEXT NOT NULL,
@@ -67,7 +129,7 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let t=Obje
CREATE INDEX idx_observations_project ON observations(project);
CREATE INDEX idx_observations_type ON observations(type);
CREATE INDEX idx_observations_created ON observations(created_at_epoch DESC);
`),this.db.exec("COMMIT"),console.error("[SessionStore] Successfully made observations.text nullable")}catch(s){throw this.db.exec("ROLLBACK"),s}}catch(e){console.error("[SessionStore] Migration error (make text nullable):",e.message)}}getRecentSummaries(e,t=10){return this.db.prepare(`
`),this.db.exec("COMMIT"),this.db.prepare("INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)").run(9,new Date().toISOString()),console.error("[SessionStore] Successfully made observations.text nullable")}catch(r){throw this.db.exec("ROLLBACK"),r}}catch(e){console.error("[SessionStore] Migration error (make text nullable):",e.message)}}getRecentSummaries(e,s=10){return this.db.prepare(`
SELECT
request, investigated, learned, completed, next_steps,
files_read, files_edited, notes, prompt_number, created_at
@@ -75,13 +137,13 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let t=Obje
WHERE project = ?
ORDER BY created_at_epoch DESC
LIMIT ?
`).all(e,t)}getRecentObservations(e,t=20){return this.db.prepare(`
`).all(e,s)}getRecentObservations(e,s=20){return this.db.prepare(`
SELECT type, text, prompt_number, created_at
FROM observations
WHERE project = ?
ORDER BY created_at_epoch DESC
LIMIT ?
`).all(e,t)}getRecentSessionsWithStatus(e,t=3){return this.db.prepare(`
`).all(e,s)}getRecentSessionsWithStatus(e,s=3){return this.db.prepare(`
SELECT * FROM (
SELECT
s.sdk_session_id,
@@ -98,7 +160,7 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let t=Obje
LIMIT ?
)
ORDER BY started_at_epoch ASC
`).all(e,t)}getObservationsForSession(e){return this.db.prepare(`
`).all(e,s)}getObservationsForSession(e){return this.db.prepare(`
SELECT title, subtitle, type, prompt_number
FROM observations
WHERE sdk_session_id = ?
@@ -126,11 +188,11 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let t=Obje
FROM sdk_sessions
WHERE claude_session_id = ?
LIMIT 1
`).get(e)||null}reactivateSession(e,t){this.db.prepare(`
`).get(e)||null}reactivateSession(e,s){this.db.prepare(`
UPDATE sdk_sessions
SET status = 'active', user_prompt = ?, worker_port = NULL
WHERE id = ?
`).run(t,e)}incrementPromptCounter(e){return this.db.prepare(`
`).run(s,e)}incrementPromptCounter(e){return this.db.prepare(`
UPDATE sdk_sessions
SET prompt_counter = COALESCE(prompt_counter, 0) + 1
WHERE id = ?
@@ -138,43 +200,43 @@ ${e.stack}`:e.message;if(Array.isArray(e))return`[${e.length} items]`;let t=Obje
SELECT prompt_counter FROM sdk_sessions WHERE id = ?
`).get(e)?.prompt_counter||1}getPromptCounter(e){return this.db.prepare(`
SELECT prompt_counter FROM sdk_sessions WHERE id = ?
`).get(e)?.prompt_counter||0}createSDKSession(e,t,s){let r=new Date,n=r.getTime();return this.db.prepare(`
`).get(e)?.prompt_counter||0}createSDKSession(e,s,t){let r=new Date,n=r.getTime();return this.db.prepare(`
INSERT INTO sdk_sessions
(claude_session_id, project, user_prompt, started_at, started_at_epoch, status)
VALUES (?, ?, ?, ?, ?, 'active')
`).run(e,t,s,r.toISOString(),n).lastInsertRowid}updateSDKSessionId(e,t){return this.db.prepare(`
`).run(e,s,t,r.toISOString(),n).lastInsertRowid}updateSDKSessionId(e,s){return this.db.prepare(`
UPDATE sdk_sessions
SET sdk_session_id = ?
WHERE id = ? AND sdk_session_id IS NULL
`).run(t,e).changes===0?(p.debug("DB","sdk_session_id already set, skipping update",{sessionId:e,sdkSessionId:t}),!1):!0}setWorkerPort(e,t){this.db.prepare(`
`).run(s,e).changes===0?(c.debug("DB","sdk_session_id already set, skipping update",{sessionId:e,sdkSessionId:s}),!1):!0}setWorkerPort(e,s){this.db.prepare(`
UPDATE sdk_sessions
SET worker_port = ?
WHERE id = ?
`).run(t,e)}getWorkerPort(e){return this.db.prepare(`
`).run(s,e)}getWorkerPort(e){return this.db.prepare(`
SELECT worker_port
FROM sdk_sessions
WHERE id = ?
LIMIT 1
`).get(e)?.worker_port||null}storeObservation(e,t,s,r){let n=new Date,u=n.getTime();this.db.prepare(`
`).get(e)?.worker_port||null}storeObservation(e,s,t,r){let n=new Date,a=n.getTime();this.db.prepare(`
INSERT INTO observations
(sdk_session_id, project, type, title, subtitle, facts, narrative, concepts,
files_read, files_modified, prompt_number, created_at, created_at_epoch)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`).run(e,t,s.type,s.title,s.subtitle,JSON.stringify(s.facts),s.narrative,JSON.stringify(s.concepts),JSON.stringify(s.files_read),JSON.stringify(s.files_modified),r||null,n.toISOString(),u)}storeSummary(e,t,s,r){let n=new Date,u=n.getTime();this.db.prepare(`
`).run(e,s,t.type,t.title,t.subtitle,JSON.stringify(t.facts),t.narrative,JSON.stringify(t.concepts),JSON.stringify(t.files_read),JSON.stringify(t.files_modified),r||null,n.toISOString(),a)}storeSummary(e,s,t,r){let n=new Date,a=n.getTime();this.db.prepare(`
INSERT INTO session_summaries
(sdk_session_id, project, request, investigated, learned, completed,
next_steps, notes, prompt_number, created_at, created_at_epoch)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`).run(e,t,s.request,s.investigated,s.learned,s.completed,s.next_steps,s.notes,r||null,n.toISOString(),u)}markSessionCompleted(e){let t=new Date,s=t.getTime();this.db.prepare(`
`).run(e,s,t.request,t.investigated,t.learned,t.completed,t.next_steps,t.notes,r||null,n.toISOString(),a)}markSessionCompleted(e){let s=new Date,t=s.getTime();this.db.prepare(`
UPDATE sdk_sessions
SET status = 'completed', completed_at = ?, completed_at_epoch = ?
WHERE id = ?
`).run(t.toISOString(),s,e)}markSessionFailed(e){let t=new Date,s=t.getTime();this.db.prepare(`
`).run(s.toISOString(),t,e)}markSessionFailed(e){let s=new Date,t=s.getTime();this.db.prepare(`
UPDATE sdk_sessions
SET status = 'failed', completed_at = ?, completed_at_epoch = ?
WHERE id = ?
`).run(t.toISOString(),s,e)}cleanupOrphanedSessions(){let e=new Date,t=e.getTime();return this.db.prepare(`
`).run(s.toISOString(),t,e)}cleanupOrphanedSessions(){let e=new Date,s=e.getTime();return this.db.prepare(`
UPDATE sdk_sessions
SET status = 'failed', completed_at = ?, completed_at_epoch = ?
WHERE status = 'active'
`).run(e.toISOString(),t).changes}close(){this.db.close()}};function x(o,e,t){return o==="PreCompact"?e?{continue:!0,suppressOutput:!0}:{continue:!1,stopReason:t.reason||"Pre-compact operation failed",suppressOutput:!0}:o==="SessionStart"?e&&t.context?{continue:!0,suppressOutput:!0,hookSpecificOutput:{hookEventName:"SessionStart",additionalContext:t.context}}:{continue:!0,suppressOutput:!0}:o==="UserPromptSubmit"||o==="PostToolUse"?{continue:!0,suppressOutput:!0}:o==="Stop"?{continue:!0,suppressOutput:!0}:{continue:e,suppressOutput:!0,...t.reason&&!e?{stopReason:t.reason}:{}}}function l(o,e,t={}){let s=x(o,e,t);return JSON.stringify(s)}async function k(o){if(!o)throw new Error("summaryHook requires input");let{session_id:e}=o,t=new m,s=t.findActiveSDKSession(e);if(!s){t.close(),console.log(l("Stop",!0));return}if(!s.worker_port){t.close(),p.error("HOOK","No worker port for session",{sessionId:s.id}),console.log(l("Stop",!0));return}let r=t.getPromptCounter(s.id);t.close();try{p.dataIn("HOOK","Stop: Requesting summary",{sessionId:s.id,workerPort:s.worker_port,promptNumber:r});let n=await fetch(`http://127.0.0.1:${s.worker_port}/sessions/${s.id}/summarize`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({prompt_number:r}),signal:AbortSignal.timeout(2e3)});if(n.ok)p.debug("HOOK","Summary request sent successfully",{sessionId:s.id});else{let u=await n.text();p.failure("HOOK","Failed to generate summary",{sessionId:s.id,status:n.status},u)}}catch(n){p.failure("HOOK","Error requesting summary",{sessionId:s.id},n)}finally{console.log(l("Stop",!0))}}import{stdin as I}from"process";var b="";I.on("data",o=>b+=o);I.on("end",async()=>{try{let o=b.trim()?JSON.parse(b):void 0;await k(o),process.exit(0)}catch(o){console.error(`[claude-mem summary-hook error: ${o.message}]`),console.log('{"continue": true, "suppressOutput": true}'),process.exit(0)}});
`).run(e.toISOString(),s).changes}close(){this.db.close()}};function X(o,e,s){return o==="PreCompact"?e?{continue:!0,suppressOutput:!0}:{continue:!1,stopReason:s.reason||"Pre-compact operation failed",suppressOutput:!0}:o==="SessionStart"?e&&s.context?{continue:!0,suppressOutput:!0,hookSpecificOutput:{hookEventName:"SessionStart",additionalContext:s.context}}:{continue:!0,suppressOutput:!0}:o==="UserPromptSubmit"||o==="PostToolUse"?{continue:!0,suppressOutput:!0}:o==="Stop"?{continue:!0,suppressOutput:!0}:{continue:e,suppressOutput:!0,...s.reason&&!e?{stopReason:s.reason}:{}}}function g(o,e,s={}){let t=X(o,e,s);return JSON.stringify(t)}async function L(o){if(!o)throw new Error("summaryHook requires input");let{session_id:e}=o,s=new u,t=s.findActiveSDKSession(e);if(!t){s.close(),console.log(g("Stop",!0));return}if(!t.worker_port)throw s.close(),c.error("HOOK","No worker port for session",{sessionId:t.id}),new Error("No worker port for session - session may not be properly initialized");let r=s.getPromptCounter(t.id);s.close(),c.dataIn("HOOK","Stop: Requesting summary",{sessionId:t.id,workerPort:t.worker_port,promptNumber:r});let n=await fetch(`http://127.0.0.1:${t.worker_port}/sessions/${t.id}/summarize`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({prompt_number:r}),signal:AbortSignal.timeout(2e3)});if(!n.ok){let a=await n.text();throw c.failure("HOOK","Failed to generate summary",{sessionId:t.id,status:n.status},a),new Error(`Failed to request summary from worker: ${n.status} ${a}`)}c.debug("HOOK","Summary request sent successfully",{sessionId:t.id}),console.log(g("Stop",!0))}import{stdin as v}from"process";var b="";v.on("data",o=>b+=o);v.on("end",async()=>{let o=b.trim()?JSON.parse(b):void 0;await L(o),process.exit(0)});
+3 -9
View File
@@ -11,13 +11,7 @@ import { stdin } from 'process';
let input = '';
stdin.on('data', (chunk) => input += chunk);
stdin.on('end', async () => {
try {
const parsed = input.trim() ? JSON.parse(input) : undefined;
await newHook(parsed);
process.exit(0);
} catch (error: any) {
console.error(`[claude-mem new-hook error: ${error.message}]`);
console.log('{"continue": true, "suppressOutput": true}');
process.exit(0);
}
const parsed = input.trim() ? JSON.parse(input) : undefined;
await newHook(parsed);
process.exit(0);
});
+3 -9
View File
@@ -11,13 +11,7 @@ import { stdin } from 'process';
let input = '';
stdin.on('data', (chunk) => input += chunk);
stdin.on('end', async () => {
try {
const parsed = input.trim() ? JSON.parse(input) : undefined;
await saveHook(parsed);
process.exit(0);
} catch (error: any) {
console.error(`[claude-mem save-hook error: ${error.message}]`);
console.log('{"continue": true, "suppressOutput": true}');
process.exit(0);
}
const parsed = input.trim() ? JSON.parse(input) : undefined;
await saveHook(parsed);
process.exit(0);
});
+3 -9
View File
@@ -11,13 +11,7 @@ import { stdin } from 'process';
let input = '';
stdin.on('data', (chunk) => input += chunk);
stdin.on('end', async () => {
try {
const parsed = input.trim() ? JSON.parse(input) : undefined;
await summaryHook(parsed);
process.exit(0);
} catch (error: any) {
console.error(`[claude-mem summary-hook error: ${error.message}]`);
console.log('{"continue": true, "suppressOutput": true}');
process.exit(0);
}
const parsed = input.trim() ? JSON.parse(input) : undefined;
await summaryHook(parsed);
process.exit(0);
});
+1 -58
View File
@@ -1,8 +1,6 @@
import path from 'path';
import { existsSync } from 'fs';
import { spawn } from 'child_process';
import { SessionStore } from '../services/sqlite/SessionStore.js';
import { getWorkerPortFilePath, getPackageRoot } from '../shared/paths.js';
import { ensureWorkerRunning } from '../shared/worker-utils.js';
export interface SessionStartInput {
session_id?: string;
@@ -13,61 +11,6 @@ export interface SessionStartInput {
[key: string]: any;
}
/**
* Ensure worker service is running
* Auto-starts worker if not running (v4.0.0 feature)
*/
function ensureWorkerRunning(): void {
try {
const portFile = getWorkerPortFilePath();
// Check if worker is already running
if (existsSync(portFile)) {
// Worker appears to be running (port file exists)
return;
}
console.error('[claude-mem] Worker not running, starting...');
// Find worker service path
const packageRoot = getPackageRoot();
const workerPath = path.join(packageRoot, 'dist', 'worker-service.cjs');
if (!existsSync(workerPath)) {
console.error(`[claude-mem] Worker service not found at ${workerPath}`);
return;
}
// Try to start with PM2 first (preferred for production)
const ecosystemPath = path.join(packageRoot, 'ecosystem.config.cjs');
if (existsSync(ecosystemPath)) {
try {
spawn('pm2', ['start', ecosystemPath], {
detached: true,
stdio: 'ignore',
cwd: packageRoot
}).unref();
console.error('[claude-mem] Worker started with PM2');
return;
} catch (pm2Error) {
console.error('[claude-mem] PM2 not available, using direct spawn');
}
}
// Fallback: spawn worker directly
spawn('node', [workerPath], {
detached: true,
stdio: 'ignore',
env: { ...process.env, NODE_ENV: 'production' }
}).unref();
console.error('[claude-mem] Worker started in background');
} catch (error: any) {
// Don't fail the hook if worker start fails
console.error(`[claude-mem] Failed to start worker: ${error.message}`);
}
}
/**
* Context Hook - SessionStart
* Shows user what happened in recent sessions
+10 -33
View File
@@ -1,7 +1,7 @@
import path from 'path';
import { SessionStore } from '../services/sqlite/SessionStore.js';
import { createHookResponse } from './hook-response.js';
import { getWorkerPortFilePath } from '../shared/paths.js';
import { ensureWorkerRunning, getWorkerPort } from '../shared/worker-utils.js';
export interface UserPromptSubmitInput {
session_id: string;
@@ -10,26 +10,6 @@ export interface UserPromptSubmitInput {
[key: string]: any;
}
/**
* Get worker service port from file
*/
async function getWorkerPort(): Promise<number | null> {
const { readFileSync, existsSync } = await import('fs');
const portFile = getWorkerPortFilePath();
if (!existsSync(portFile)) {
return null;
}
try {
const portStr = readFileSync(portFile, 'utf8').trim();
return parseInt(portStr, 10);
} catch {
return null;
}
}
/**
* New Hook - UserPromptSubmit
* Initializes SDK memory session via HTTP POST to worker service
@@ -74,14 +54,15 @@ export async function newHook(input?: UserPromptSubmitInput): Promise<void> {
}
}
// Find worker service port
const port = await getWorkerPort();
if (!port) {
console.error('[new-hook] Worker service not running. Start with: npm run worker:start');
console.log(createHookResponse('UserPromptSubmit', true)); // Don't block Claude
return;
// Ensure worker service is running (v4.0.0 auto-start)
const workerReady = await ensureWorkerRunning();
if (!workerReady) {
throw new Error('Worker service failed to start or become healthy');
}
// Get fixed port
const port = getWorkerPort();
// Only initialize worker on new sessions
if (isNewSession) {
// Initialize session via HTTP
@@ -93,16 +74,12 @@ export async function newHook(input?: UserPromptSubmitInput): Promise<void> {
});
if (!response.ok) {
console.error('[new-hook] Failed to init session:', await response.text());
const errorText = await response.text();
throw new Error(`Failed to initialize session: ${response.status} ${errorText}`);
}
}
console.log(createHookResponse('UserPromptSubmit', true));
} catch (error: any) {
console.error('[new-hook] FATAL ERROR:', error.message);
console.error('[new-hook] Stack:', error.stack);
console.error('[new-hook] Full error:', JSON.stringify(error, Object.getOwnPropertyNames(error)));
console.log(createHookResponse('UserPromptSubmit', true)); // Don't block Claude
} finally {
db.close();
}
+27 -32
View File
@@ -44,8 +44,7 @@ export async function saveHook(input?: PostToolUseInput): Promise<void> {
if (!session.worker_port) {
db.close();
logger.error('HOOK', 'No worker port for session', { sessionId: session.id });
console.log(createHookResponse('PostToolUse', true));
return;
throw new Error('No worker port for session - session may not be properly initialized');
}
// Get current prompt number for this session
@@ -54,36 +53,32 @@ export async function saveHook(input?: PostToolUseInput): Promise<void> {
const toolStr = logger.formatTool(tool_name, tool_input);
try {
logger.dataIn('HOOK', `PostToolUse: ${toolStr}`, {
logger.dataIn('HOOK', `PostToolUse: ${toolStr}`, {
sessionId: session.id,
workerPort: session.worker_port
});
const response = await fetch(`http://127.0.0.1:${session.worker_port}/sessions/${session.id}/observations`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
tool_name,
tool_input: tool_input !== undefined ? JSON.stringify(tool_input) : '{}',
tool_output: tool_output !== undefined ? JSON.stringify(tool_output) : '{}',
prompt_number: promptNumber
}),
signal: AbortSignal.timeout(2000)
});
if (!response.ok) {
const errorText = await response.text();
logger.failure('HOOK', 'Failed to send observation', {
sessionId: session.id,
workerPort: session.worker_port
});
const response = await fetch(`http://127.0.0.1:${session.worker_port}/sessions/${session.id}/observations`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
tool_name,
tool_input: tool_input !== undefined ? JSON.stringify(tool_input) : '{}',
tool_output: tool_output !== undefined ? JSON.stringify(tool_output) : '{}',
prompt_number: promptNumber
}),
signal: AbortSignal.timeout(2000)
});
if (!response.ok) {
const errorText = await response.text();
logger.failure('HOOK', 'Failed to send observation', {
sessionId: session.id,
status: response.status
}, errorText);
} else {
logger.debug('HOOK', 'Observation sent successfully', { sessionId: session.id, toolName: tool_name });
}
} catch (error: any) {
logger.failure('HOOK', 'Error sending observation', { sessionId: session.id }, error);
} finally {
console.log(createHookResponse('PostToolUse', true));
status: response.status
}, errorText);
throw new Error(`Failed to send observation to worker: ${response.status} ${errorText}`);
}
logger.debug('HOOK', 'Observation sent successfully', { sessionId: session.id, toolName: tool_name });
console.log(createHookResponse('PostToolUse', true));
}
+23 -28
View File
@@ -30,40 +30,35 @@ export async function summaryHook(input?: StopInput): Promise<void> {
if (!session.worker_port) {
db.close();
logger.error('HOOK', 'No worker port for session', { sessionId: session.id });
console.log(createHookResponse('Stop', true));
return;
throw new Error('No worker port for session - session may not be properly initialized');
}
// Get current prompt number
const promptNumber = db.getPromptCounter(session.id);
db.close();
try {
logger.dataIn('HOOK', 'Stop: Requesting summary', {
logger.dataIn('HOOK', 'Stop: Requesting summary', {
sessionId: session.id,
workerPort: session.worker_port,
promptNumber
});
const response = await fetch(`http://127.0.0.1:${session.worker_port}/sessions/${session.id}/summarize`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ prompt_number: promptNumber }),
signal: AbortSignal.timeout(2000)
});
if (!response.ok) {
const errorText = await response.text();
logger.failure('HOOK', 'Failed to generate summary', {
sessionId: session.id,
workerPort: session.worker_port,
promptNumber
});
const response = await fetch(`http://127.0.0.1:${session.worker_port}/sessions/${session.id}/summarize`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ prompt_number: promptNumber }),
signal: AbortSignal.timeout(2000)
});
if (!response.ok) {
const errorText = await response.text();
logger.failure('HOOK', 'Failed to generate summary', {
sessionId: session.id,
status: response.status
}, errorText);
} else {
logger.debug('HOOK', 'Summary request sent successfully', { sessionId: session.id });
}
} catch (error: any) {
logger.failure('HOOK', 'Error requesting summary', { sessionId: session.id }, error);
} finally {
console.log(createHookResponse('Stop', true));
status: response.status
}, errorText);
throw new Error(`Failed to request summary from worker: ${response.status} ${errorText}`);
}
logger.debug('HOOK', 'Summary request sent successfully', { sessionId: session.id });
console.log(createHookResponse('Stop', true));
}
+140 -11
View File
@@ -18,6 +18,9 @@ export class SessionStore {
this.db.pragma('synchronous = NORMAL');
this.db.pragma('foreign_keys = ON');
// Initialize schema if needed (fresh database)
this.initializeSchema();
// Run migrations
this.ensureWorkerPortColumn();
this.ensurePromptTrackingColumns();
@@ -27,10 +30,108 @@ export class SessionStore {
}
/**
* Ensure worker_port column exists (migration)
* Initialize database schema using migrations (migration004)
* This runs the core SDK tables migration if no tables exist
*/
private initializeSchema(): void {
try {
// Create schema_versions table if it doesn't exist
this.db.exec(`
CREATE TABLE IF NOT EXISTS schema_versions (
id INTEGER PRIMARY KEY,
version INTEGER UNIQUE NOT NULL,
applied_at TEXT NOT NULL
)
`);
// Get applied migrations
const appliedVersions = this.db.prepare('SELECT version FROM schema_versions ORDER BY version').all() as Array<{version: number}>;
const maxApplied = appliedVersions.length > 0 ? Math.max(...appliedVersions.map(v => v.version)) : 0;
// Only run migration004 if no migrations have been applied
// This creates the sdk_sessions, observations, and session_summaries tables
if (maxApplied === 0) {
console.error('[SessionStore] Initializing fresh database with migration004...');
// Migration004: SDK agent architecture tables
this.db.exec(`
CREATE TABLE IF NOT EXISTS sdk_sessions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
claude_session_id TEXT UNIQUE NOT NULL,
sdk_session_id TEXT UNIQUE,
project TEXT NOT NULL,
user_prompt TEXT,
started_at TEXT NOT NULL,
started_at_epoch INTEGER NOT NULL,
completed_at TEXT,
completed_at_epoch INTEGER,
status TEXT CHECK(status IN ('active', 'completed', 'failed')) NOT NULL DEFAULT 'active'
);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_claude_id ON sdk_sessions(claude_session_id);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_sdk_id ON sdk_sessions(sdk_session_id);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_project ON sdk_sessions(project);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_status ON sdk_sessions(status);
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_started ON sdk_sessions(started_at_epoch DESC);
CREATE TABLE IF NOT EXISTS observations (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sdk_session_id TEXT NOT NULL,
project TEXT NOT NULL,
text TEXT NOT NULL,
type TEXT NOT NULL CHECK(type IN ('decision', 'bugfix', 'feature', 'refactor', 'discovery')),
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
FOREIGN KEY(sdk_session_id) REFERENCES sdk_sessions(sdk_session_id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_observations_sdk_session ON observations(sdk_session_id);
CREATE INDEX IF NOT EXISTS idx_observations_project ON observations(project);
CREATE INDEX IF NOT EXISTS idx_observations_type ON observations(type);
CREATE INDEX IF NOT EXISTS idx_observations_created ON observations(created_at_epoch DESC);
CREATE TABLE IF NOT EXISTS session_summaries (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sdk_session_id TEXT UNIQUE NOT NULL,
project TEXT NOT NULL,
request TEXT,
investigated TEXT,
learned TEXT,
completed TEXT,
next_steps TEXT,
files_read TEXT,
files_edited TEXT,
notes TEXT,
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
FOREIGN KEY(sdk_session_id) REFERENCES sdk_sessions(sdk_session_id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_session_summaries_sdk_session ON session_summaries(sdk_session_id);
CREATE INDEX IF NOT EXISTS idx_session_summaries_project ON session_summaries(project);
CREATE INDEX IF NOT EXISTS idx_session_summaries_created ON session_summaries(created_at_epoch DESC);
`);
// Record migration004 as applied
this.db.prepare('INSERT INTO schema_versions (version, applied_at) VALUES (?, ?)').run(4, new Date().toISOString());
console.error('[SessionStore] Migration004 applied successfully');
}
} catch (error: any) {
console.error('[SessionStore] Schema initialization error:', error.message);
throw error;
}
}
/**
* Ensure worker_port column exists (migration 5)
*/
private ensureWorkerPortColumn(): void {
try {
// Check if migration already applied
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(5) as {version: number} | undefined;
if (applied) return;
// Check if column exists
const tableInfo = this.db.pragma('table_info(sdk_sessions)');
const hasWorkerPort = (tableInfo as any[]).some((col: any) => col.name === 'worker_port');
@@ -39,16 +140,23 @@ export class SessionStore {
this.db.exec('ALTER TABLE sdk_sessions ADD COLUMN worker_port INTEGER');
console.error('[SessionStore] Added worker_port column to sdk_sessions table');
}
// Record migration
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(5, new Date().toISOString());
} catch (error: any) {
console.error('[SessionStore] Migration error:', error.message);
}
}
/**
* Ensure prompt tracking columns exist (migration 006)
* Ensure prompt tracking columns exist (migration 6)
*/
private ensurePromptTrackingColumns(): void {
try {
// Check if migration already applied
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(6) as {version: number} | undefined;
if (applied) return;
// Check sdk_sessions for prompt_counter
const sessionsInfo = this.db.pragma('table_info(sdk_sessions)');
const hasPromptCounter = (sessionsInfo as any[]).some((col: any) => col.name === 'prompt_counter');
@@ -76,27 +184,29 @@ export class SessionStore {
console.error('[SessionStore] Added prompt_number column to session_summaries table');
}
// Remove UNIQUE constraint on session_summaries.sdk_session_id
// SQLite doesn't support dropping constraints, so we need to check if it exists first
const summariesIndexes = this.db.pragma('index_list(session_summaries)');
const hasUniqueConstraint = (summariesIndexes as any[]).some((idx: any) => idx.unique === 1);
// Record migration
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(6, new Date().toISOString());
} catch (error: any) {
console.error('[SessionStore] Prompt tracking migration error:', error.message);
}
}
/**
* Remove UNIQUE constraint from session_summaries.sdk_session_id (migration 007)
* Remove UNIQUE constraint from session_summaries.sdk_session_id (migration 7)
*/
private removeSessionSummariesUniqueConstraint(): void {
try {
// Check if migration already applied
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(7) as {version: number} | undefined;
if (applied) return;
// Check if UNIQUE constraint exists
const summariesIndexes = this.db.pragma('index_list(session_summaries)');
const hasUniqueConstraint = (summariesIndexes as any[]).some((idx: any) => idx.unique === 1);
if (!hasUniqueConstraint) {
// Already migrated
// Already migrated (no constraint exists)
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(7, new Date().toISOString());
return;
}
@@ -152,6 +262,9 @@ export class SessionStore {
// Commit transaction
this.db.exec('COMMIT');
// Record migration
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(7, new Date().toISOString());
console.error('[SessionStore] Successfully removed UNIQUE constraint from session_summaries.sdk_session_id');
} catch (error: any) {
// Rollback on error
@@ -164,16 +277,21 @@ export class SessionStore {
}
/**
* Add hierarchical fields to observations table (migration 008)
* Add hierarchical fields to observations table (migration 8)
*/
private addObservationHierarchicalFields(): void {
try {
// Check if migration already applied
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(8) as {version: number} | undefined;
if (applied) return;
// Check if new fields already exist
const tableInfo = this.db.pragma('table_info(observations)');
const hasTitle = (tableInfo as any[]).some((col: any) => col.name === 'title');
if (hasTitle) {
// Already migrated
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(8, new Date().toISOString());
return;
}
@@ -190,6 +308,9 @@ export class SessionStore {
ALTER TABLE observations ADD COLUMN files_modified TEXT;
`);
// Record migration
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(8, new Date().toISOString());
console.error('[SessionStore] Successfully added hierarchical fields to observations table');
} catch (error: any) {
console.error('[SessionStore] Migration error (add hierarchical fields):', error.message);
@@ -197,17 +318,22 @@ export class SessionStore {
}
/**
* Make observations.text nullable (migration 009)
* Make observations.text nullable (migration 9)
* The text field is deprecated in favor of structured fields (title, subtitle, narrative, etc.)
*/
private makeObservationsTextNullable(): void {
try {
// Check if migration already applied
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(9) as {version: number} | undefined;
if (applied) return;
// Check if text column is already nullable
const tableInfo = this.db.pragma('table_info(observations)');
const textColumn = (tableInfo as any[]).find((col: any) => col.name === 'text');
if (!textColumn || textColumn.notnull === 0) {
// Already migrated or text column doesn't exist
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(9, new Date().toISOString());
return;
}
@@ -265,6 +391,9 @@ export class SessionStore {
// Commit transaction
this.db.exec('COMMIT');
// Record migration
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(9, new Date().toISOString());
console.error('[SessionStore] Successfully made observations.text nullable');
} catch (error: any) {
// Rollback on error
+11 -20
View File
@@ -10,12 +10,12 @@ import { SessionStore } from './sqlite/SessionStore.js';
import { buildInitPrompt, buildObservationPrompt, buildFinalizePrompt } from '../sdk/prompts.js';
import { parseObservations, parseSummary } from '../sdk/parser.js';
import type { SDKSession } from '../sdk/prompts.js';
import { findAvailablePort } from '../utils/port-allocator.js';
import { logger } from '../utils/logger.js';
import { getWorkerPortFilePath, ensureAllDataDirs } from '../shared/paths.js';
import { ensureAllDataDirs } from '../shared/paths.js';
const MODEL = 'claude-sonnet-4-5';
const DISALLOWED_TOOLS = ['Glob', 'Grep', 'ListMcpResourcesTool', 'WebSearch'];
const FIXED_PORT = parseInt(process.env.CLAUDE_MEM_WORKER_PORT || '37777', 10);
interface ObservationMessage {
type: 'observation';
@@ -69,13 +69,7 @@ class WorkerService {
}
async start(): Promise<void> {
// Find available port
const port = await findAvailablePort();
if (!port) {
throw new Error('No available ports in range 37000-37999');
}
this.port = port;
this.port = FIXED_PORT;
// Clean up orphaned sessions from previous worker instances
const db = new SessionStore();
@@ -87,18 +81,15 @@ class WorkerService {
}
return new Promise((resolve, reject) => {
this.app.listen(port, '127.0.0.1', () => {
logger.info('SYSTEM', `Worker started`, { port, pid: process.pid, activeSessions: this.sessions.size });
// Write port to file for hooks to discover
const { writeFileSync } = require('fs');
ensureAllDataDirs(); // Ensure data directory exists
const portFile = getWorkerPortFilePath();
writeFileSync(portFile, port.toString(), 'utf8');
logger.info('SYSTEM', `Port file written to ${portFile}`);
this.app.listen(FIXED_PORT, '127.0.0.1', () => {
logger.info('SYSTEM', `Worker started`, { port: FIXED_PORT, pid: process.pid, activeSessions: this.sessions.size });
resolve();
}).on('error', reject);
}).on('error', (err: any) => {
if (err.code === 'EADDRINUSE') {
logger.error('SYSTEM', `Port ${FIXED_PORT} already in use - worker may already be running`);
}
reject(err);
});
});
}
+18 -49
View File
@@ -4,26 +4,25 @@ import { existsSync, mkdirSync } from 'fs';
import { execSync } from 'child_process';
import { fileURLToPath } from 'url';
// Get __dirname that works in both ESM (hooks) and CJS (worker) contexts
function getDirname(): string {
// CJS context - __dirname exists
if (typeof __dirname !== 'undefined') {
return __dirname;
}
// ESM context - use import.meta.url
return dirname(fileURLToPath(import.meta.url));
}
const _dirname = getDirname();
/**
* Simple path configuration for claude-mem
* Standard paths based on Claude Code conventions
*
* v4.0.0: Data directory now uses CLAUDE_PLUGIN_ROOT when available
*/
// Base directories
// Priority: CLAUDE_PLUGIN_ROOT/data > CLAUDE_MEM_DATA_DIR > ~/.claude-mem
const getDataDir = (): string => {
if (process.env.CLAUDE_PLUGIN_ROOT) {
return join(process.env.CLAUDE_PLUGIN_ROOT, 'data');
}
if (process.env.CLAUDE_MEM_DATA_DIR) {
return process.env.CLAUDE_MEM_DATA_DIR;
}
return join(homedir(), '.claude-mem');
};
export const DATA_DIR = getDataDir();
export const DATA_DIR = process.env.CLAUDE_MEM_DATA_DIR || join(homedir(), '.claude-mem');
export const CLAUDE_CONFIG_DIR = process.env.CLAUDE_CONFIG_DIR || join(homedir(), '.claude');
// Data subdirectories
@@ -53,13 +52,6 @@ export function getWorkerSocketPath(sessionId: number): string {
return join(DATA_DIR, `worker-${sessionId}.sock`);
}
/**
* Get worker port file path
*/
export function getWorkerPortFilePath(): string {
return join(DATA_DIR, 'worker.port');
}
/**
* Ensure a directory exists
*/
@@ -103,36 +95,13 @@ export function getCurrentProjectName(): string {
}
/**
* Find package root directory (for install command)
* Find package root directory
*
* Works because bundled hooks are in plugin/scripts/,
* so package root is always two levels up
*/
export function getPackageRoot(): string {
// Method 1: Try require.resolve for package.json
try {
const packageJsonPath = require.resolve('claude-mem/package.json');
return dirname(packageJsonPath);
} catch {
// Continue to next method
}
// Method 2: Walk up from current module location
const currentFile = fileURLToPath(import.meta.url);
let currentDir = dirname(currentFile);
for (let i = 0; i < 10; i++) {
const packageJsonPath = join(currentDir, 'package.json');
if (existsSync(packageJsonPath)) {
const packageJson = require(packageJsonPath);
if (packageJson.name === 'claude-mem') {
return currentDir;
}
}
const parentDir = dirname(currentDir);
if (parentDir === currentDir) break;
currentDir = parentDir;
}
throw new Error('Cannot locate claude-mem package root. Ensure claude-mem is properly installed.');
return join(_dirname, '..', '..');
}
/**
+107
View File
@@ -0,0 +1,107 @@
import path from 'path';
import { existsSync } from 'fs';
import { spawn } from 'child_process';
import { getPackageRoot } from './paths.js';
const FIXED_PORT = parseInt(process.env.CLAUDE_MEM_WORKER_PORT || '37777', 10);
const HEALTH_CHECK_URL = `http://127.0.0.1:${FIXED_PORT}/health`;
/**
* Check if worker is responding by hitting health endpoint
*/
async function checkWorkerHealth(): Promise<boolean> {
try {
const response = await fetch(HEALTH_CHECK_URL, {
signal: AbortSignal.timeout(500)
});
return response.ok;
} catch {
return false;
}
}
/**
* Ensure worker service is running with retry logic
* Auto-starts worker if not running (v4.0.0 feature)
*
* @returns true if worker is responding, false if failed to start
*/
export async function ensureWorkerRunning(): Promise<boolean> {
try {
// Check if worker is already responding
if (await checkWorkerHealth()) {
return true;
}
console.error('[claude-mem] Worker not responding, starting...');
// Find worker service path
const packageRoot = getPackageRoot();
const workerPath = path.join(packageRoot, 'dist', 'worker-service.cjs');
if (!existsSync(workerPath)) {
console.error(`[claude-mem] Worker service not found at ${workerPath}`);
return false;
}
// Try to start with PM2 first (preferred for production)
const ecosystemPath = path.join(packageRoot, 'ecosystem.config.cjs');
if (existsSync(ecosystemPath)) {
try {
spawn('pm2', ['start', ecosystemPath], {
detached: true,
stdio: 'ignore',
cwd: packageRoot
}).unref();
console.error('[claude-mem] Worker started with PM2');
} catch (pm2Error) {
console.error('[claude-mem] PM2 not available, using direct spawn');
// Fallback: spawn worker directly
spawn('node', [workerPath], {
detached: true,
stdio: 'ignore',
env: { ...process.env, NODE_ENV: 'production', CLAUDE_MEM_WORKER_PORT: FIXED_PORT.toString() }
}).unref();
console.error('[claude-mem] Worker started in background');
}
} else {
// No PM2 config, spawn directly
spawn('node', [workerPath], {
detached: true,
stdio: 'ignore',
env: { ...process.env, NODE_ENV: 'production', CLAUDE_MEM_WORKER_PORT: FIXED_PORT.toString() }
}).unref();
console.error('[claude-mem] Worker started in background');
}
// Wait for worker to become healthy (retry 3 times with 500ms delay)
for (let i = 0; i < 3; i++) {
await new Promise(resolve => setTimeout(resolve, 500));
if (await checkWorkerHealth()) {
console.error('[claude-mem] Worker is healthy');
return true;
}
}
console.error('[claude-mem] Worker failed to become healthy after startup');
return false;
} catch (error: any) {
console.error(`[claude-mem] Failed to start worker: ${error.message}`);
return false;
}
}
/**
* Check if worker is currently running
*/
export async function isWorkerRunning(): Promise<boolean> {
return checkWorkerHealth();
}
/**
* Get the worker port number (fixed port)
*/
export function getWorkerPort(): number {
return FIXED_PORT;
}
-63
View File
@@ -1,63 +0,0 @@
import net from 'net';
/**
* Port Allocator Utility
* Finds available ports dynamically for worker service
*/
const PORT_RANGE_START = 37000;
const PORT_RANGE_END = 37999;
/**
* Check if a port is available
*/
function isPortAvailable(port: number): Promise<boolean> {
return new Promise((resolve) => {
const server = net.createServer();
server.once('error', (err: any) => {
if (err.code === 'EADDRINUSE') {
resolve(false);
} else {
resolve(false);
}
});
server.once('listening', () => {
server.close();
resolve(true);
});
server.listen(port, '127.0.0.1');
});
}
/**
* Find an available port in the configured range
* Returns a port number or null if none available
*/
export async function findAvailablePort(): Promise<number | null> {
// Try random ports first (faster for sparse allocation)
for (let i = 0; i < 10; i++) {
const randomPort = Math.floor(Math.random() * (PORT_RANGE_END - PORT_RANGE_START + 1)) + PORT_RANGE_START;
if (await isPortAvailable(randomPort)) {
return randomPort;
}
}
// Fall back to sequential search
for (let port = PORT_RANGE_START; port <= PORT_RANGE_END; port++) {
if (await isPortAvailable(port)) {
return port;
}
}
return null;
}
/**
* Check if a specific port is available
*/
export async function checkPort(port: number): Promise<boolean> {
return isPortAvailable(port);
}