Release v3.6.3

Published from npm package build
Source: https://github.com/thedotmack/claude-mem-source
This commit is contained in:
Alex Newman
2025-09-11 17:15:50 -04:00
parent c4eb2e2dc9
commit 97807494fd
43 changed files with 10632 additions and 286 deletions
+718
View File
@@ -0,0 +1,718 @@
import { OptionValues } from 'commander';
import { query } from '@anthropic-ai/claude-code';
import fs from 'fs';
import path from 'path';
import { getClaudePath } from '../shared/settings.js';
import { execSync } from 'child_process';
interface ChangelogEntry {
version: string;
date: string;
type: 'Added' | 'Changed' | 'Fixed' | 'Removed' | 'Deprecated' | 'Security';
description: string;
timestamp: string;
generatedAt?: string; // When this changelog entry was created
}
interface MemorySearchResult {
version: string;
text: string;
metadata: any;
}
export async function changelog(options: OptionValues): Promise<void> {
try {
// Handle --update flag to regenerate CHANGELOG.md from JSONL
if (options.update) {
await updateChangelogFromJsonl(options);
return;
}
// Get current version and project name from package.json
const packageJsonPath = path.join(process.cwd(), 'package.json');
let currentVersion = 'unknown';
let projectName = 'unknown';
if (fs.existsSync(packageJsonPath)) {
try {
const packageData = JSON.parse(fs.readFileSync(packageJsonPath, 'utf-8'));
currentVersion = packageData.version || 'unknown';
projectName = packageData.name || path.basename(process.cwd());
} catch (e) {
projectName = path.basename(process.cwd());
}
}
// Calculate versions to search for based on flags
const versionsToSearch: string[] = [];
let historicalCount = options.historical || 1; // Default to current version only
// Handle --generate flag for specific version
if (options.generate) {
versionsToSearch.push(options.generate);
historicalCount = 1; // Single version mode
console.log(`🎯 Generating changelog for specific version: ${options.generate}`);
} else if (currentVersion !== 'unknown') {
// Normal mode: use current version or historical versions
const parts = currentVersion.split('.');
if (parts.length === 3) {
let major = parseInt(parts[0]);
let minor = parseInt(parts[1]);
let patch = parseInt(parts[2]);
for (let i = 0; i < historicalCount; i++) {
versionsToSearch.push(`${major}.${minor}.${patch}`);
// Decrement version
if (patch === 0) {
if (minor === 0) {
// Can't go lower than x.0.0
break;
}
minor--;
patch = 9;
} else {
patch--;
}
}
}
}
if (versionsToSearch.length === 0) {
console.log('⚠️ Could not determine versions to search. Please check package.json');
process.exit(1);
}
// Check if current version already has a changelog entry
const projectChangelogDir = path.join(
process.env.HOME || process.env.USERPROFILE || '',
'.claude-mem',
'projects'
);
const changelogJsonlPath = path.join(projectChangelogDir, `${projectName}-changelog.jsonl`);
let hasCurrentVersion = false;
if (fs.existsSync(changelogJsonlPath)) {
const existingLines = fs.readFileSync(changelogJsonlPath, 'utf-8').split('\n').filter(l => l.trim());
for (const line of existingLines) {
try {
const entry = JSON.parse(line);
if (entry.version === currentVersion) {
hasCurrentVersion = true;
}
} catch (e) {
// Skip invalid lines
}
}
if (!options.historical && !options.generate && historicalCount === 1) {
if (hasCurrentVersion) {
console.log(`❌ Version ${currentVersion} already has changelog entries.`);
console.log('\n📝 Workflow:');
console.log(' 1. Make your code updates');
console.log(' 2. Build and test: bun run build');
console.log(' 3. Bump version: npm version patch');
console.log(' 4. Generate changelog: claude-mem changelog');
console.log(' 5. Commit and push\n');
console.log(`💡 Or use --historical 1 to regenerate this version's changelog`);
process.exit(1);
}
}
}
// Get npm publish times for all versions we need
let versionTimeRanges: Array<{version: string, startTime: string, endTime: string}> = [];
// Check if custom time range is provided
if (options.start && options.end) {
// Use custom time range for the specified version
const version = options.generate || currentVersion;
versionTimeRanges.push({
version,
startTime: options.start,
endTime: options.end
});
console.log(`📅 Using custom time range for ${version}:`);
console.log(` Start: ${new Date(options.start).toLocaleString()}`);
console.log(` End: ${new Date(options.end).toLocaleString()}`);
} else {
try {
const npmTimeData = execSync(`npm view ${projectName} time --json`, {
encoding: 'utf-8',
timeout: 5000
});
const publishTimes = JSON.parse(npmTimeData);
// For historical mode, we need one extra previous version to get proper time ranges
// E.g., for 3 versions, we need 4 timestamps to create 3 ranges
let extraPrevVersion = '';
if (historicalCount > 1) {
// Get the version before our oldest version in the search list
const oldestVersion = versionsToSearch[versionsToSearch.length - 1];
const parts = oldestVersion.split('.');
const major = parseInt(parts[0]);
const minor = parseInt(parts[1]);
const patch = parseInt(parts[2]);
if (patch > 0) {
extraPrevVersion = `${major}.${minor}.${patch - 1}`;
} else if (minor > 0) {
// Look for highest patch of previous minor
const prevMinorPrefix = `${major}.${minor - 1}.`;
const prevMinorVersions = Object.keys(publishTimes)
.filter(v => v.startsWith(prevMinorPrefix))
.sort((a, b) => {
const aPatch = parseInt(a.split('.')[2] || '0');
const bPatch = parseInt(b.split('.')[2] || '0');
return bPatch - aPatch;
});
if (prevMinorVersions.length > 0) {
extraPrevVersion = prevMinorVersions[0];
}
} else if (major > 0) {
// Look for highest version of previous major
const prevMajorPrefix = `${major - 1}.`;
const prevMajorVersions = Object.keys(publishTimes)
.filter(v => v.startsWith(prevMajorPrefix))
.sort((a, b) => {
const [, aMinor, aPatch] = a.split('.').map(Number);
const [, bMinor, bPatch] = b.split('.').map(Number);
if (aMinor !== bMinor) return bMinor - aMinor;
return bPatch - aPatch;
});
if (prevMajorVersions.length > 0) {
extraPrevVersion = prevMajorVersions[0];
}
}
if (options.verbose && extraPrevVersion && publishTimes[extraPrevVersion]) {
console.log(`📍 Using ${extraPrevVersion} as start boundary for time ranges`);
}
}
// Build time ranges for each version
for (let i = 0; i < versionsToSearch.length; i++) {
const version = versionsToSearch[i];
// Start time:
// - For the first (newest) version, use the publish time of the version before it
// - For middle versions, use the publish time of the next version in our list
// - For the last (oldest) version, use the extra previous version we found
let startTime = '2000-01-01T00:00:00Z'; // Default to old date
if (i === 0) {
// First (newest) version - find its immediate predecessor
const versionParts = version.split('.');
const major = parseInt(versionParts[0]);
const minor = parseInt(versionParts[1]);
const patch = parseInt(versionParts[2]);
let prevVersion = '';
if (patch > 0) {
prevVersion = `${major}.${minor}.${patch - 1}`;
} else if (minor > 0) {
// Look for highest patch of previous minor
const prevMinorPrefix = `${major}.${minor - 1}.`;
const prevMinorVersions = Object.keys(publishTimes)
.filter(v => v.startsWith(prevMinorPrefix))
.sort((a, b) => {
const aPatch = parseInt(a.split('.')[2] || '0');
const bPatch = parseInt(b.split('.')[2] || '0');
return bPatch - aPatch;
});
if (prevMinorVersions.length > 0) {
prevVersion = prevMinorVersions[0];
}
}
if (publishTimes[prevVersion]) {
startTime = publishTimes[prevVersion];
}
} else if (i < versionsToSearch.length - 1) {
// Middle versions - use the next version in our list
const prevVersionInList = versionsToSearch[i + 1];
if (publishTimes[prevVersionInList]) {
startTime = publishTimes[prevVersionInList];
}
} else {
// Last (oldest) version - use the extra previous version
if (extraPrevVersion && publishTimes[extraPrevVersion]) {
startTime = publishTimes[extraPrevVersion];
}
}
// End time is this version's publish time (or now for unreleased)
let endTime = publishTimes[version] || new Date().toISOString();
versionTimeRanges.push({ version, startTime, endTime });
if (options.verbose) {
console.log(`📅 Version ${version}: ${new Date(startTime).toLocaleString()} - ${new Date(endTime).toLocaleString()}`);
}
}
// Always log what we're doing for single version
if (historicalCount === 1) {
const latestRange = versionTimeRanges[0];
if (latestRange) {
console.log(`📦 Using npm time range for ${latestRange.version}: ${new Date(latestRange.startTime).toLocaleString()} - ${new Date(latestRange.endTime).toLocaleString()}`);
}
}
} catch (e) {
console.log('❌ Could not fetch npm publish times. Cannot proceed without time ranges.');
process.exit(1);
}
}
console.log(`🔍 Searching memories for versions: ${versionsToSearch.join(', ')}`);
console.log(`📦 Project: ${projectName}\n`);
// Phase 1: Search for version-related memories using MCP tools
// ALWAYS use time range search - no other method
const searchPrompt = versionTimeRanges.length > 0 ?
`You are helping generate a changelog by searching for memories within specific time ranges for multiple versions.
PROJECT: ${projectName}
VERSION TIME RANGES:
${versionTimeRanges.map(r => `- Version ${r.version}: ${new Date(r.startTime).toLocaleDateString()} to ${new Date(r.endTime).toLocaleDateString()}`).join('\n')}
YOUR TASK:
Use mcp__claude-mem__chroma_query_documents to search for memories for each version time range.
SEARCH STRATEGY:
${versionTimeRanges.map(r => {
const startDate = new Date(r.startTime);
const endDate = new Date(r.endTime);
// Generate all date prefixes between start and end
const datePrefixes: string[] = [];
const currentDate = new Date(startDate);
while (currentDate <= endDate) {
// Add day prefix like "2025-09-09"
const dayPrefix = currentDate.toISOString().split('T')[0];
datePrefixes.push(dayPrefix);
currentDate.setDate(currentDate.getDate() + 1);
}
return `
Version ${r.version} (${new Date(r.startTime).toLocaleDateString()} to ${new Date(r.endTime).toLocaleDateString()}):
1. Search for memories from these dates: ${datePrefixes.join(', ')}
2. Make multiple calls to mcp__claude-mem__chroma_query_documents:
- collection_name: "claude_memories"
- query_texts: Include the project name AND date in each query:
* "${projectName} ${datePrefixes[0]} feature"
* "${projectName} ${datePrefixes[0]} fix"
* "${projectName} ${datePrefixes[0]} change"
* "${projectName} ${datePrefixes[0]} improvement"
* "${projectName} ${datePrefixes[0]} refactor"
- n_results: 50
3. The date in the query text helps semantic search find memories from that day
4. Assign memories to this version if their timestamp falls within:
- Start: ${r.startTime}
- End: ${r.endTime}`;
}).join('\n')}
IMPORTANT:
- Always include project name and date in query_texts for best results
- Semantic search will naturally find memories near those dates
- Group returned memories by version based on their timestamp metadata
Return a JSON object with this structure:
{
"memories": [
{
"version": "version_number",
"text": "memory content",
"metadata": {metadata object with timestamp},
"relevance": "high/medium/low"
}
]
}
Group memories by the version they belong to based on timestamp.
Start searching now.` :
`ERROR: No time ranges available. This should never happen.`;
if (versionTimeRanges.length === 0) {
console.log('❌ No time ranges available. Cannot search memories.');
process.exit(1);
}
if (options.verbose) {
console.log('📝 Calling Claude to search memories...');
}
// Call Claude with MCP tools to search memories
const searchResponse = await query({
prompt: searchPrompt,
options: {
allowedTools: [
'mcp__claude-mem__chroma_query_documents',
'mcp__claude-mem__chroma_get_documents'
],
pathToClaudeCodeExecutable: getClaudePath()
}
});
// Extract memories from response
let memoriesJson = '';
if (searchResponse && typeof searchResponse === 'object' && Symbol.asyncIterator in searchResponse) {
for await (const message of searchResponse) {
if (message?.type === 'assistant' && message?.message?.content) {
const content = message.message.content;
if (typeof content === 'string') {
memoriesJson += content;
} else if (Array.isArray(content)) {
for (const block of content) {
if (block.type === 'text' && block.text) {
memoriesJson += block.text;
}
}
}
}
}
}
// Parse memories
let memories: MemorySearchResult[] = [];
try {
// Extract JSON from response (might be wrapped in markdown)
const jsonMatch = memoriesJson.match(/```json\n([\s\S]*?)\n```/) ||
memoriesJson.match(/\{[\s\S]*\}/);
if (jsonMatch) {
const parsed = JSON.parse(jsonMatch[1] || jsonMatch[0]);
if (parsed.memories && Array.isArray(parsed.memories)) {
memories = parsed.memories;
}
}
} catch (e) {
console.error('⚠️ Could not parse memory search results:', e);
}
if (memories.length === 0) {
console.log('\n⚠️ No version-related memories found. Try compressing more sessions first.');
process.exit(1);
}
console.log(`✅ Found ${memories.length} version-related memories\n`);
// Get system date for accuracy
const systemDate = execSync('date "+%Y-%m-%d %H:%M:%S %Z"').toString().trim();
const todayStr = systemDate.split(' ')[0]; // YYYY-MM-DD format
// Phase 2: Generate changelog entries from memories
const changelogPrompt = `Analyze these memories and generate changelog entries.
PROJECT: ${projectName}
DATE: ${todayStr}
MEMORIES BY VERSION:
${versionsToSearch.map(version => {
const versionMemories = memories.filter(m => m.version === version);
if (versionMemories.length === 0) return `### Version ${version}\nNo memories found.`;
return `### Version ${version} (${versionMemories.length} memories):
${versionMemories.map((m, i) => `${i + 1}. ${m.text}`).join('\n')}`;
}).join('\n\n')}
INSTRUCTIONS:
1. Extract concrete changes, fixes, and additions from the memories
2. Categorize each change as: Added, Changed, Fixed, Removed, Deprecated, or Security
3. Write clear, user-facing descriptions
4. Start each entry with an action verb
5. Focus on what matters to users, not internal implementation details
Return ONLY a JSON array with this structure:
[
{
"version": "3.6.1",
"type": "Added",
"description": "New feature description"
},
{
"version": "3.6.1",
"type": "Fixed",
"description": "Bug fix description"
}
]`;
console.log('🔄 Generating changelog entries...');
// Call Claude to generate changelog entries
const changelogResponse = await query({
prompt: changelogPrompt,
options: {
allowedTools: [],
pathToClaudeCodeExecutable: getClaudePath()
}
});
// Extract JSON from response
let entriesJson = '';
if (changelogResponse && typeof changelogResponse === 'object' && Symbol.asyncIterator in changelogResponse) {
for await (const message of changelogResponse) {
if (message?.type === 'assistant' && message?.message?.content) {
const content = message.message.content;
if (typeof content === 'string') {
entriesJson += content;
} else if (Array.isArray(content)) {
for (const block of content) {
if (block.type === 'text' && block.text) {
entriesJson += block.text;
}
}
}
}
}
}
// Parse changelog entries
let entries: ChangelogEntry[] = [];
try {
// Extract JSON (might be wrapped in markdown)
const jsonMatch = entriesJson.match(/```json\n([\s\S]*?)\n```/) ||
entriesJson.match(/\[[\s\S]*\]/);
if (jsonMatch) {
const parsed = JSON.parse(jsonMatch[1] || jsonMatch[0]);
if (Array.isArray(parsed)) {
const generatedAt = new Date().toISOString();
entries = parsed.map(e => ({
...e,
date: todayStr,
timestamp: e.timestamp || generatedAt, // Memory timestamp if available
generatedAt: generatedAt // When this changelog was generated
}));
}
}
} catch (e) {
console.error('⚠️ Could not parse changelog entries:', e);
}
if (entries.length === 0) {
console.log('⚠️ No changelog entries generated.');
process.exit(1);
}
// Ensure project changelog directory exists
if (!fs.existsSync(projectChangelogDir)) {
fs.mkdirSync(projectChangelogDir, { recursive: true });
}
// Save entries to project JSONL file
console.log(`\n💾 Saving ${entries.length} changelog entries to ${path.basename(changelogJsonlPath)}`);
// When using --historical or --generate, remove old entries for the versions being regenerated
if ((options.historical && historicalCount > 1) || options.generate) {
let existingEntries: ChangelogEntry[] = [];
if (fs.existsSync(changelogJsonlPath)) {
const lines = fs.readFileSync(changelogJsonlPath, 'utf-8').split('\n').filter(l => l.trim());
for (const line of lines) {
try {
const entry = JSON.parse(line);
// Keep entries that are NOT in the versions we're regenerating
if (!versionsToSearch.includes(entry.version)) {
existingEntries.push(entry);
}
} catch (e) {
// Skip invalid lines
}
}
}
// Rewrite the file with filtered entries plus new ones
const allEntries = [...existingEntries, ...entries];
const jsonlContent = allEntries.map(entry => JSON.stringify(entry)).join('\n') + '\n';
fs.writeFileSync(changelogJsonlPath, jsonlContent);
console.log(`🔄 Regenerated entries for versions: ${versionsToSearch.join(', ')}`);
} else {
// Append new entries to JSONL
const jsonlContent = entries.map(entry => JSON.stringify(entry)).join('\n') + '\n';
fs.appendFileSync(changelogJsonlPath, jsonlContent);
}
// Now generate markdown from all JSONL entries
console.log('\n📝 Generating CHANGELOG.md from entries...');
// Read all entries from JSONL
let allEntries: ChangelogEntry[] = [];
if (fs.existsSync(changelogJsonlPath)) {
const lines = fs.readFileSync(changelogJsonlPath, 'utf-8').split('\n').filter(l => l.trim());
for (const line of lines) {
try {
allEntries.push(JSON.parse(line));
} catch (e) {
// Skip invalid lines
}
}
}
// Group entries by version
const entriesByVersion = new Map<string, ChangelogEntry[]>();
for (const entry of allEntries) {
if (!entriesByVersion.has(entry.version)) {
entriesByVersion.set(entry.version, []);
}
entriesByVersion.get(entry.version)!.push(entry);
}
// Generate markdown
let markdown = '# Changelog\n\nAll notable changes to this project will be documented in this file.\n\nThe format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).\n\n';
// Sort versions in descending order
const sortedVersions = Array.from(entriesByVersion.keys()).sort((a, b) => {
const aParts = a.split('.').map(Number);
const bParts = b.split('.').map(Number);
for (let i = 0; i < 3; i++) {
if (aParts[i] !== bParts[i]) return bParts[i] - aParts[i];
}
return 0;
});
for (const version of sortedVersions) {
const versionEntries = entriesByVersion.get(version)!;
const date = versionEntries[0].date || todayStr;
markdown += `\n## [${version}] - ${date}\n\n`;
// Group by type
const types: Array<ChangelogEntry['type']> = ['Added', 'Changed', 'Fixed', 'Removed', 'Deprecated', 'Security'];
for (const type of types) {
const typeEntries = versionEntries.filter(e => e.type === type);
if (typeEntries.length > 0) {
markdown += `### ${type}\n`;
for (const entry of typeEntries) {
markdown += `- ${entry.description}\n`;
}
markdown += '\n';
}
}
}
// Write the CHANGELOG.md
const changelogPath = path.join(process.cwd(), 'CHANGELOG.md');
fs.writeFileSync(changelogPath, markdown);
console.log(`✅ Generated CHANGELOG.md with ${allEntries.length} total entries across ${entriesByVersion.size} versions!`);
if (options.preview) {
console.log('\n📄 Preview:\n');
console.log(markdown.split('\n').slice(0, 30).join('\n'));
if (markdown.split('\n').length > 30) {
console.log('\n... (truncated for preview)');
}
}
} catch (error) {
console.error('❌ Error generating changelog:', error instanceof Error ? error.message : error);
if (error instanceof Error && error.stack) {
console.error('Stack:', error.stack);
}
process.exit(1);
}
}
async function updateChangelogFromJsonl(options: OptionValues): Promise<void> {
try {
// Get project name from package.json
const packageJsonPath = path.join(process.cwd(), 'package.json');
let projectName = 'unknown';
if (fs.existsSync(packageJsonPath)) {
try {
const packageData = JSON.parse(fs.readFileSync(packageJsonPath, 'utf-8'));
projectName = packageData.name || path.basename(process.cwd());
} catch (e) {
projectName = path.basename(process.cwd());
}
}
const projectChangelogDir = path.join(
process.env.HOME || process.env.USERPROFILE || '',
'.claude-mem',
'projects'
);
const changelogJsonlPath = path.join(projectChangelogDir, `${projectName}-changelog.jsonl`);
if (!fs.existsSync(changelogJsonlPath)) {
console.log('❌ No changelog entries found. Generate some first with: claude-mem changelog');
process.exit(1);
}
console.log('📝 Updating CHANGELOG.md from JSONL entries...');
// Read all entries from JSONL
let allEntries: ChangelogEntry[] = [];
const lines = fs.readFileSync(changelogJsonlPath, 'utf-8').split('\n').filter(l => l.trim());
for (const line of lines) {
try {
allEntries.push(JSON.parse(line));
} catch (e) {
// Skip invalid lines
}
}
if (allEntries.length === 0) {
console.log('❌ No valid entries found in JSONL file');
process.exit(1);
}
// Group entries by version
const entriesByVersion = new Map<string, ChangelogEntry[]>();
for (const entry of allEntries) {
if (!entriesByVersion.has(entry.version)) {
entriesByVersion.set(entry.version, []);
}
entriesByVersion.get(entry.version)!.push(entry);
}
// Generate markdown
let markdown = '# Changelog\n\nAll notable changes to this project will be documented in this file.\n\nThe format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).\n\n';
// Sort versions in descending order
const sortedVersions = Array.from(entriesByVersion.keys()).sort((a, b) => {
const aParts = a.split('.').map(Number);
const bParts = b.split('.').map(Number);
for (let i = 0; i < 3; i++) {
if (aParts[i] !== bParts[i]) return bParts[i] - aParts[i];
}
return 0;
});
for (const version of sortedVersions) {
const versionEntries = entriesByVersion.get(version)!;
const date = versionEntries[0].date;
markdown += `\n## [${version}] - ${date}\n\n`;
// Group by type
const types: Array<ChangelogEntry['type']> = ['Added', 'Changed', 'Fixed', 'Removed', 'Deprecated', 'Security'];
for (const type of types) {
const typeEntries = versionEntries.filter(e => e.type === type);
if (typeEntries.length > 0) {
markdown += `### ${type}\n`;
for (const entry of typeEntries) {
markdown += `- ${entry.description}\n`;
}
markdown += '\n';
}
}
}
// Write the CHANGELOG.md
const changelogPath = path.join(process.cwd(), 'CHANGELOG.md');
fs.writeFileSync(changelogPath, markdown);
console.log(`✅ Updated CHANGELOG.md with ${allEntries.length} entries across ${entriesByVersion.size} versions!`);
if (options.preview) {
console.log('\n📄 Preview:\n');
console.log(markdown.split('\n').slice(0, 30).join('\n'));
if (markdown.split('\n').length > 30) {
console.log('\n... (truncated for preview)');
}
}
} catch (error) {
console.error('❌ Error updating changelog:', error instanceof Error ? error.message : error);
process.exit(1);
}
}
+43
View File
@@ -0,0 +1,43 @@
import { OptionValues } from 'commander';
import { basename, dirname } from 'path';
import {
createLoadingMessage,
createCompletionMessage,
createOperationSummary,
createUserFriendlyError
} from '../prompts/templates/context/ContextTemplates.js';
export async function compress(transcript?: string, options: OptionValues = {}): Promise<void> {
console.log(createLoadingMessage('compressing'));
if (!transcript) {
console.log(createUserFriendlyError('Compression', 'No transcript file provided', 'Please provide a path to a transcript file'));
return;
}
try {
const startTime = Date.now();
// Import and run compression
const { TranscriptCompressor } = await import('../core/compression/TranscriptCompressor.js');
const compressor = new TranscriptCompressor({
verbose: options.verbose || false
});
const sessionId = options.sessionId || basename(transcript, '.jsonl');
const archivePath = await compressor.compress(transcript, sessionId);
const duration = Date.now() - startTime;
console.log(createCompletionMessage('Compression', undefined, `Session archived as ${basename(archivePath)}`));
console.log(createOperationSummary('compress', { count: 1, duration, details: `Session: ${sessionId}` }));
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
console.log(createUserFriendlyError(
'Compression',
errorMessage,
'Check that the transcript file exists and you have write permissions'
));
throw error; // Re-throw to maintain existing error handling behavior
}
}
+146
View File
@@ -0,0 +1,146 @@
/**
* Hook command handlers for binary distribution
* These execute the actual hook logic embedded in the binary
*/
import { basename, sep } from 'path';
import { compress } from './compress.js';
import { loadContext } from './load-context.js';
/**
* Pre-compact hook handler
* Runs compression on the Claude Code transcript
*/
export async function preCompactHook(): Promise<void> {
try {
// Read hook data from stdin (Claude Code sends JSON)
let inputData = '';
// Set up stdin to read data
process.stdin.setEncoding('utf8');
// Collect all input data
for await (const chunk of process.stdin) {
inputData += chunk;
}
// Parse the JSON input
let transcriptPath: string | undefined;
if (inputData) {
try {
const hookData = JSON.parse(inputData);
transcriptPath = hookData.transcript_path;
} catch (parseError) {
// If JSON parsing fails, treat the input as a direct path
transcriptPath = inputData.trim();
}
}
// Fallback to environment variable or command line argument
if (!transcriptPath) {
transcriptPath = process.env.TRANSCRIPT_PATH || process.argv[2];
}
if (!transcriptPath) {
console.log('🗜️ Compressing session transcript...');
console.log('❌ No transcript path provided to pre-compact hook');
console.log('Hook data received:', inputData || 'none');
console.log('Environment TRANSCRIPT_PATH:', process.env.TRANSCRIPT_PATH || 'not set');
console.log('Command line args:', process.argv.slice(2));
return;
}
// Run compression with the transcript path
await compress(transcriptPath, { dryRun: false });
} catch (error: any) {
console.error('Pre-compact hook failed:', error.message);
process.exit(1);
}
}
/**
* Session-start hook handler
* Loads context for the new session
*/
export async function sessionStartHook(): Promise<void> {
try {
// Read hook data from stdin (Claude Code sends JSON)
let inputData = '';
// Set up stdin to read data
process.stdin.setEncoding('utf8');
// Collect all input data
for await (const chunk of process.stdin) {
inputData += chunk;
}
// Parse the JSON input to get the current working directory
let project: string | undefined;
if (inputData) {
try {
const hookData = JSON.parse(inputData);
// Extract project name from cwd if provided
if (hookData.cwd) {
project = basename(hookData.cwd);
}
} catch (parseError) {
// If JSON parsing fails, continue without project filtering
console.error('Failed to parse session-start hook data:', parseError);
}
}
// If no project from hook data, try to get from current working directory
if (!project) {
project = basename(process.cwd());
}
// Load context with session-start format and project filtering
await loadContext({ format: 'session-start', count: '10', project });
} catch (error: any) {
console.error('Session-start hook failed:', error.message);
process.exit(1);
}
}
/**
* Session-end hook handler
* Compresses session transcript when ending with /clear
*/
export async function sessionEndHook(): Promise<void> {
try {
// Read hook data from stdin (Claude Code sends JSON)
let inputData = '';
// Set up stdin to read data
process.stdin.setEncoding('utf8');
// Collect all input data
for await (const chunk of process.stdin) {
inputData += chunk;
}
// Parse the JSON input to check the reason for session end
if (inputData) {
try {
const hookData = JSON.parse(inputData);
// If reason is "clear", compress the session transcript before it's deleted
if (hookData.reason === 'clear' && hookData.transcript_path) {
console.log('🗜️ Compressing current session before /clear...');
await compress(hookData.transcript_path, { dryRun: false });
}
} catch (parseError) {
// If JSON parsing fails, log but don't fail the hook
console.error('Failed to parse hook data:', parseError);
}
}
console.log('Session ended successfully');
} catch (error: any) {
console.error('Session-end hook failed:', error.message);
process.exit(1);
}
}
+541
View File
@@ -0,0 +1,541 @@
#!/usr/bin/env node
import * as p from '@clack/prompts';
import path from 'path';
import fs from 'fs';
import os from 'os';
import chalk from 'chalk';
import { TranscriptCompressor } from '../core/compression/TranscriptCompressor.js';
import { TitleGenerator, TitleGenerationRequest } from '../core/titles/TitleGenerator.js';
interface ConversationMetadata {
sessionId: string;
timestamp: string;
messageCount: number;
branch?: string;
cwd: string;
fileSize: number;
}
interface ConversationItem extends ConversationMetadata {
filePath: string;
projectName: string;
parsedDate: Date;
relativeDate: string;
}
function formatFileSize(bytes: number): string {
if (bytes < 1024) return `${bytes}B`;
if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)}KB`;
return `${(bytes / (1024 * 1024)).toFixed(1)}MB`;
}
function formatRelativeDate(date: Date): string {
const now = new Date();
const diffMs = now.getTime() - date.getTime();
const diffMins = Math.floor(diffMs / 60000);
const diffHours = Math.floor(diffMs / 3600000);
const diffDays = Math.floor(diffMs / 86400000);
if (diffMins < 1) return 'just now';
if (diffMins < 60) return `${diffMins}m ago`;
if (diffHours < 24) return `${diffHours}h ago`;
if (diffDays < 7) return `${diffDays}d ago`;
if (diffDays < 30) return `${Math.floor(diffDays / 7)}w ago`;
if (diffDays < 365) return `${Math.floor(diffDays / 30)}mo ago`;
return `${Math.floor(diffDays / 365)}y ago`;
}
function parseTimestamp(timestamp: string, fallbackPath: string): Date {
try {
const parsed = new Date(timestamp);
if (!isNaN(parsed.getTime())) return parsed;
} catch {}
// Fallback: try to extract from filename
const match = fallbackPath.match(/(\d{4})-(\d{2})-(\d{2})_(\d{2})-(\d{2})-(\d{2})/);
if (match) {
const [_, year, month, day, hour, minute, second] = match;
return new Date(
parseInt(year),
parseInt(month) - 1,
parseInt(day),
parseInt(hour),
parseInt(minute),
parseInt(second)
);
}
// Last resort: file stats
const stats = fs.statSync(fallbackPath);
return stats.mtime;
}
function extractFirstUserMessage(filePath: string): string {
try {
const content = fs.readFileSync(filePath, 'utf-8');
const lines = content.trim().split('\n').filter(Boolean);
for (const line of lines) {
try {
const message = JSON.parse(line);
if (message.type === 'user' && message.message?.content) {
const messageContent = message.message.content;
if (Array.isArray(messageContent)) {
const textContent = messageContent
.filter(item => item.type === 'text')
.map(item => item.text)
.join(' ');
if (textContent.trim()) return textContent.trim();
} else if (typeof messageContent === 'string') {
return messageContent.trim();
}
}
} catch {}
}
return 'Conversation'; // Fallback
} catch {
return 'Conversation'; // Fallback
}
}
async function loadImportedSessions(): Promise<Set<string>> {
const importedIds = new Set<string>();
const indexPath = path.join(os.homedir(), '.claude-mem', 'claude-mem-index.jsonl');
if (!fs.existsSync(indexPath)) return importedIds;
const content = fs.readFileSync(indexPath, 'utf-8');
const lines = content.trim().split('\n').filter(Boolean);
for (const line of lines) {
try {
const entry = JSON.parse(line);
// Check both session_id (from index) and sessionId (legacy)
if (entry.session_id) {
importedIds.add(entry.session_id);
} else if (entry.sessionId) {
importedIds.add(entry.sessionId);
}
} catch {}
}
return importedIds;
}
async function scanConversations(): Promise<{ conversations: ConversationItem[]; skippedCount: number }> {
const claudeDir = path.join(os.homedir(), '.claude', 'projects');
if (!fs.existsSync(claudeDir)) {
return { conversations: [], skippedCount: 0 };
}
const projects = fs.readdirSync(claudeDir)
.filter(dir => fs.statSync(path.join(claudeDir, dir)).isDirectory());
const conversations: ConversationItem[] = [];
const importedSessionIds = await loadImportedSessions();
let skippedCount = 0;
for (const project of projects) {
const projectDir = path.join(claudeDir, project);
const files = fs.readdirSync(projectDir)
.filter(file => file.endsWith('.jsonl'))
.map(file => path.join(projectDir, file));
for (const filePath of files) {
try {
const content = fs.readFileSync(filePath, 'utf-8');
const lines = content.trim().split('\n').filter(Boolean);
// Parse first line for metadata
const firstLine = JSON.parse(lines[0]);
const messageCount = lines.length;
const stats = fs.statSync(filePath);
const fileSize = stats.size;
const metadata: ConversationMetadata = {
sessionId: firstLine.sessionId || path.basename(filePath, '.jsonl'),
timestamp: firstLine.timestamp || stats.mtime.toISOString(),
messageCount,
branch: firstLine.branch,
cwd: firstLine.cwd || projectDir,
fileSize
};
// Skip if already imported
if (importedSessionIds.has(metadata.sessionId)) {
skippedCount++;
continue;
}
const projectName = path.basename(path.dirname(filePath));
const parsedDate = parseTimestamp(metadata.timestamp, filePath);
const relativeDate = formatRelativeDate(parsedDate);
conversations.push({
filePath,
...metadata,
projectName,
parsedDate,
relativeDate
});
} catch {}
}
}
return { conversations, skippedCount };
}
export async function importHistory(options: { verbose?: boolean; multi?: boolean } = {}) {
console.clear();
p.intro(chalk.bgCyan.black(' CLAUDE-MEM IMPORT '));
const s = p.spinner();
s.start('Scanning conversation history');
const { conversations, skippedCount } = await scanConversations();
if (conversations.length === 0) {
s.stop('No new conversations found');
const message = skippedCount > 0
? `All ${skippedCount} conversation${skippedCount === 1 ? ' is' : 's are'} already imported.`
: 'No conversations found.';
p.outro(chalk.yellow(message));
return;
}
// Sort by date (newest first)
conversations.sort((a, b) => b.parsedDate.getTime() - a.parsedDate.getTime());
const statusMessage = skippedCount > 0
? `Found ${conversations.length} new conversation${conversations.length === 1 ? '' : 's'} (${skippedCount} already imported)`
: `Found ${conversations.length} new conversation${conversations.length === 1 ? '' : 's'}`;
s.stop(statusMessage);
// Group conversations by project for better organization
const projectGroups = conversations.reduce((acc, conv) => {
if (!acc[conv.projectName]) acc[conv.projectName] = [];
acc[conv.projectName].push(conv);
return acc;
}, {} as Record<string, ConversationItem[]>);
// Create selection options
const importMode = await p.select({
message: 'How would you like to import?',
options: [
{ value: 'browse', label: 'Browse by Project', hint: 'Select project then conversations' },
{ value: 'project', label: 'Import Entire Project', hint: 'Select project and import all conversations' },
{ value: 'recent', label: 'Recent Conversations', hint: 'Import most recent across all projects' },
{ value: 'search', label: 'Search', hint: 'Search for specific conversations' }
]
});
if (p.isCancel(importMode)) {
p.cancel('Import cancelled');
return;
}
let selectedConversations: ConversationItem[] = [];
if (importMode === 'browse') {
// Project selection
const projectOptions = Object.entries(projectGroups)
.sort((a, b) => b[1][0].parsedDate.getTime() - a[1][0].parsedDate.getTime())
.map(([project, convs]) => ({
value: project,
label: project,
hint: `${convs.length} conversation${convs.length === 1 ? '' : 's'}, latest: ${convs[0].relativeDate}`
}));
const selectedProject = await p.select({
message: 'Select a project',
options: projectOptions
});
if (p.isCancel(selectedProject)) {
p.cancel('Import cancelled');
return;
}
const projectConvs = projectGroups[selectedProject as string];
// Ask about title generation
const generateTitles = await p.confirm({
message: 'Would you like to generate titles for easier browsing?',
initialValue: false
});
if (p.isCancel(generateTitles)) {
p.cancel('Import cancelled');
return;
}
if (generateTitles) {
await processTitleGeneration(projectConvs, selectedProject as string);
}
// Conversation selection within project
const titleGenerator = new TitleGenerator();
const convOptions = projectConvs.map(conv => {
const title = titleGenerator.getTitleForSession(conv.sessionId);
const displayTitle = title ? `"${title}" • ` : '';
return {
value: conv.sessionId,
label: `${displayTitle}${conv.relativeDate}${conv.messageCount} messages • ${formatFileSize(conv.fileSize)}`,
hint: conv.branch ? `branch: ${conv.branch}` : undefined
};
});
if (options.multi) {
const selected = await p.multiselect({
message: `Select conversations from ${selectedProject} (Space to select, Enter to confirm)`,
options: convOptions,
required: false
});
if (p.isCancel(selected)) {
p.cancel('Import cancelled');
return;
}
const selectedIds = selected as string[];
selectedConversations = projectConvs.filter(c => selectedIds.includes(c.sessionId));
} else {
// Single select with continuous import
let continueImporting = true;
const importedInSession = new Set<string>();
while (continueImporting && projectConvs.length > importedInSession.size) {
const availableConvs = projectConvs.filter(c => !importedInSession.has(c.sessionId));
if (availableConvs.length === 0) break;
const titleGenerator = new TitleGenerator();
const convOptions = availableConvs.map(conv => {
const title = titleGenerator.getTitleForSession(conv.sessionId);
const displayTitle = title ? `"${title}" • ` : '';
return {
value: conv.sessionId,
label: `${displayTitle}${conv.relativeDate}${conv.messageCount} messages • ${formatFileSize(conv.fileSize)}`,
hint: conv.branch ? `branch: ${conv.branch}` : undefined
};
});
const selected = await p.select({
message: `Select a conversation (${importedInSession.size}/${projectConvs.length} imported)`,
options: [
...convOptions,
{ value: 'done', label: '✅ Done importing', hint: 'Exit import mode' }
]
});
if (p.isCancel(selected) || selected === 'done') {
continueImporting = false;
break;
}
const conv = availableConvs.find(c => c.sessionId === selected);
if (conv) {
selectedConversations = [conv];
await processImport(selectedConversations, options.verbose);
importedInSession.add(conv.sessionId);
}
}
if (importedInSession.size > 0) {
p.outro(chalk.green(`✅ Imported ${importedInSession.size} conversation${importedInSession.size === 1 ? '' : 's'}`));
} else {
p.outro(chalk.yellow('No conversations imported'));
}
return;
}
} else if (importMode === 'project') {
// Project selection for importing entire project
const projectOptions = Object.entries(projectGroups)
.sort((a, b) => b[1][0].parsedDate.getTime() - a[1][0].parsedDate.getTime())
.map(([project, convs]) => ({
value: project,
label: project,
hint: `${convs.length} conversation${convs.length === 1 ? '' : 's'}, latest: ${convs[0].relativeDate}`
}));
const selectedProject = await p.select({
message: 'Select a project to import all conversations',
options: projectOptions
});
if (p.isCancel(selectedProject)) {
p.cancel('Import cancelled');
return;
}
const projectConvs = projectGroups[selectedProject as string];
// Ask about title generation
const generateTitles = await p.confirm({
message: 'Would you like to generate titles for easier browsing?',
initialValue: false
});
if (p.isCancel(generateTitles)) {
p.cancel('Import cancelled');
return;
}
if (generateTitles) {
await processTitleGeneration(projectConvs, selectedProject as string);
}
const confirm = await p.confirm({
message: `Import all ${projectConvs.length} conversation${projectConvs.length === 1 ? '' : 's'} from ${selectedProject}?`
});
if (p.isCancel(confirm) || !confirm) {
p.cancel('Import cancelled');
return;
}
selectedConversations = projectConvs;
} else if (importMode === 'recent') {
const limit = await p.text({
message: 'How many recent conversations?',
placeholder: '10',
initialValue: '10',
validate: (value) => {
const num = parseInt(value);
if (isNaN(num) || num < 1) return 'Please enter a valid number';
if (num > conversations.length) return `Only ${conversations.length} available`;
}
});
if (p.isCancel(limit)) {
p.cancel('Import cancelled');
return;
}
const count = parseInt(limit as string);
selectedConversations = conversations.slice(0, count);
} else if (importMode === 'search') {
const searchTerm = await p.text({
message: 'Search conversations (project name or session ID)',
placeholder: 'Enter search term'
});
if (p.isCancel(searchTerm)) {
p.cancel('Import cancelled');
return;
}
const term = (searchTerm as string).toLowerCase();
const matches = conversations.filter(c =>
c.projectName.toLowerCase().includes(term) ||
c.sessionId.toLowerCase().includes(term) ||
(c.branch && c.branch.toLowerCase().includes(term))
);
if (matches.length === 0) {
p.outro(chalk.yellow('No matching conversations found'));
return;
}
const titleGenerator = new TitleGenerator();
const matchOptions = matches.map(conv => {
const title = titleGenerator.getTitleForSession(conv.sessionId);
const displayTitle = title ? `"${title}" • ` : '';
return {
value: conv.sessionId,
label: `${displayTitle}${conv.projectName}${conv.relativeDate}${conv.messageCount} msgs`,
hint: formatFileSize(conv.fileSize)
};
});
const selected = await p.multiselect({
message: `Found ${matches.length} matches. Select to import:`,
options: matchOptions,
required: false
});
if (p.isCancel(selected)) {
p.cancel('Import cancelled');
return;
}
const selectedIds = selected as string[];
selectedConversations = matches.filter(c => selectedIds.includes(c.sessionId));
}
// Process the import
if (selectedConversations.length > 0) {
await processImport(selectedConversations, options.verbose);
p.outro(chalk.green(`✅ Successfully imported ${selectedConversations.length} conversation${selectedConversations.length === 1 ? '' : 's'}`));
} else {
p.outro(chalk.yellow('No conversations selected for import'));
}
}
async function processTitleGeneration(conversations: ConversationItem[], projectName: string) {
const titleGenerator = new TitleGenerator();
const existingTitles = titleGenerator.getExistingTitles();
// Filter conversations that don't have titles yet
const conversationsNeedingTitles = conversations.filter(conv => !existingTitles.has(conv.sessionId));
if (conversationsNeedingTitles.length === 0) {
p.note('All conversations already have titles!', 'Title Generation');
return;
}
const s = p.spinner();
s.start(`Generating titles for ${conversationsNeedingTitles.length} conversations...`);
const requests: TitleGenerationRequest[] = conversationsNeedingTitles.map(conv => ({
sessionId: conv.sessionId,
projectName: projectName,
firstMessage: extractFirstUserMessage(conv.filePath)
}));
try {
await titleGenerator.batchGenerateTitles(requests);
s.stop(`✅ Generated ${conversationsNeedingTitles.length} titles`);
} catch (error) {
s.stop(`❌ Failed to generate titles`);
console.error(chalk.red(`Error: ${error}`));
}
}
async function processImport(conversations: ConversationItem[], verbose?: boolean) {
const s = p.spinner();
for (let i = 0; i < conversations.length; i++) {
const conv = conversations[i];
const progress = conversations.length > 1 ? `[${i + 1}/${conversations.length}] ` : '';
s.start(`${progress}Importing ${conv.projectName} (${conv.relativeDate})`);
try {
// Extract project name from the conversation's cwd field
const projectName = path.basename(conv.cwd);
// Use TranscriptCompressor to process
const compressor = new TranscriptCompressor();
await compressor.compress(conv.filePath, conv.sessionId, projectName);
s.stop(`${progress}Imported ${conv.projectName} (${conv.messageCount} messages)`);
if (verbose) {
p.note(`Session: ${conv.sessionId}\nSize: ${formatFileSize(conv.fileSize)}\nBranch: ${conv.branch || 'main'}`, 'Details');
}
} catch (error) {
s.stop(`${progress}Failed to import ${conv.projectName}`);
if (verbose) {
console.error(chalk.red(`Error: ${error}`));
}
}
}
}
File diff suppressed because it is too large Load Diff
+198
View File
@@ -0,0 +1,198 @@
import { OptionValues } from 'commander';
import fs from 'fs';
import { join } from 'path';
import { PathDiscovery } from '../services/path-discovery.js';
import {
createCompletionMessage,
createContextualError,
createUserFriendlyError,
formatTimeAgo,
outputSessionStartContent
} from '../prompts/templates/context/ContextTemplates.js';
interface IndexEntry {
summary: string;
entity: string;
keywords: string[];
}
interface TrashStatus {
folderCount: number;
fileCount: number;
totalSize: number;
isEmpty: boolean;
}
function formatSize(bytes: number): string {
if (bytes === 0) return '0 B';
const k = 1024;
const sizes = ['B', 'KB', 'MB', 'GB'];
const i = Math.floor(Math.log(bytes) / Math.log(k));
return parseFloat((bytes / Math.pow(k, i)).toFixed(1)) + ' ' + sizes[i];
}
function getTrashStatus(): TrashStatus {
const trashDir = PathDiscovery.getInstance().getTrashDirectory();
if (!fs.existsSync(trashDir)) {
return { folderCount: 0, fileCount: 0, totalSize: 0, isEmpty: true };
}
const items = fs.readdirSync(trashDir);
if (items.length === 0) {
return { folderCount: 0, fileCount: 0, totalSize: 0, isEmpty: true };
}
let folderCount = 0;
let fileCount = 0;
let totalSize = 0;
for (const item of items) {
const itemPath = join(trashDir, item);
const stats = fs.statSync(itemPath);
if (stats.isDirectory()) {
folderCount++;
} else {
fileCount++;
}
totalSize += stats.size;
}
return { folderCount, fileCount, totalSize, isEmpty: false };
}
export async function loadContext(options: OptionValues = {}): Promise<void> {
const pathDiscovery = PathDiscovery.getInstance();
const indexPath = pathDiscovery.getIndexPath();
try {
// Check if index file exists
if (!fs.existsSync(indexPath)) {
if (options.format === 'session-start') {
console.log(createContextualError('NO_MEMORIES', options.project || 'this project'));
}
return;
}
const content = fs.readFileSync(indexPath, 'utf-8');
const lines = content.trim().split('\n').filter(line => line.trim());
if (lines.length === 0) {
if (options.format === 'session-start') {
console.log(createContextualError('NO_MEMORIES', options.project || 'this project'));
}
return;
}
// Parse JSONL format - each line is a JSON object
const jsonObjects: any[] = [];
for (const line of lines) {
try {
// Skip lines that don't look like JSON (could be legacy format)
if (!line.trim().startsWith('{')) {
continue;
}
const obj = JSON.parse(line);
jsonObjects.push(obj);
} catch (e) {
// Skip malformed JSON lines
continue;
}
}
if (jsonObjects.length === 0) {
if (options.format === 'session-start') {
console.log(createContextualError('NO_MEMORIES', options.project || 'this project'));
}
return;
}
// Separate memories, overviews, and other types
const memories = jsonObjects.filter(obj => obj.type === 'memory');
const overviews = jsonObjects.filter(obj => obj.type === 'overview');
const sessions = jsonObjects.filter(obj => obj.type === 'session');
// Filter each type by project if specified
let filteredMemories = memories;
let filteredOverviews = overviews;
if (options.project) {
filteredMemories = memories.filter(obj => obj.project === options.project);
filteredOverviews = overviews.filter(obj => obj.project === options.project);
}
if (options.format === 'session-start') {
// Get last 10 memories and last 5 overviews for session-start
const recentMemories = filteredMemories.slice(-10);
const recentOverviews = filteredOverviews.slice(-5);
// Combine them for the display
const recentObjects = [...recentMemories, ...recentOverviews];
// Find most recent timestamp for last session info
let lastSessionTime = 'recently';
const timestamps = recentObjects
.map(obj => {
// Get timestamp from JSON object
return obj.timestamp ? new Date(obj.timestamp) : null;
})
.filter(date => date !== null)
.sort((a, b) => b.getTime() - a.getTime());
if (timestamps.length > 0) {
lastSessionTime = formatTimeAgo(timestamps[0]);
}
// Use dual-stream output for session start formatting
outputSessionStartContent({
projectName: options.project || 'your project',
memoryCount: recentMemories.length,
lastSessionTime,
recentObjects
});
} else if (options.format === 'json') {
// For JSON format, combine last 10 of each type
const recentMemories = filteredMemories.slice(-10);
const recentOverviews = filteredOverviews.slice(-3);
const recentObjects = [...recentMemories, ...recentOverviews];
console.log(JSON.stringify(recentObjects));
} else {
// Default format - show last 10 memories and last 3 overviews
const recentMemories = filteredMemories.slice(-10);
const recentOverviews = filteredOverviews.slice(-3);
const totalCount = recentMemories.length + recentOverviews.length;
console.log(createCompletionMessage('Context loading', totalCount, 'recent entries found'));
// Show memories first
recentMemories.forEach((obj) => {
console.log(`${obj.text} | ${obj.document_id} | ${obj.keywords}`);
});
// Then show overviews
recentOverviews.forEach((obj) => {
console.log(`**Overview:** ${obj.content}`);
});
}
// Display trash status if not empty (except for JSON format to avoid breaking JSON parsing)
if (options.format !== 'json') {
const trashStatus = getTrashStatus();
if (!trashStatus.isEmpty) {
const formattedSize = formatSize(trashStatus.totalSize);
console.log(`🗑️ Trash ${trashStatus.folderCount} folders | ${trashStatus.fileCount} files | ${formattedSize} use \`$ claude-mem restore\``);
console.log('');
}
}
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
if (options.format === 'session-start') {
console.log(createContextualError('CONNECTION_FAILED', errorMessage));
} else {
console.log(createUserFriendlyError('Context loading', errorMessage, 'Check file permissions and try again'));
}
}
}
+84
View File
@@ -0,0 +1,84 @@
import { OptionValues } from 'commander';
import { readFileSync, readdirSync, statSync } from 'fs';
import { join } from 'path';
import { PathDiscovery } from '../services/path-discovery.js';
// <Block> 1.1 ====================================
async function showLog(logPath: string, logType: string, tail: number): Promise<void> {
// <Block> 1.2 ====================================
try {
const content = readFileSync(logPath, 'utf8');
const lines = content.split('\n').filter(line => line.trim());
const displayLines = lines.slice(-tail);
console.log(`📋 ${logType} Logs (last ${tail} lines):`);
console.log(` File: ${logPath}`);
console.log('');
// <Block> 1.3 ====================================
if (displayLines.length === 0) {
console.log(' No log entries found');
// </Block> =======================================
} else {
displayLines.forEach(line => {
console.log(` ${line}`);
});
}
// </Block> =======================================
console.log('');
// </Block> =======================================
} catch (error) {
// <Block> 1.4 ====================================
console.log(`❌ Could not read ${logType.toLowerCase()} log: ${logPath}`);
// </Block> =======================================
}
// </Block> =======================================
}
// <Block> 2.1 ====================================
export async function logs(options: OptionValues = {}): Promise<void> {
// <Block> 2.2 ====================================
const logsDir = PathDiscovery.getLogsDirectory();
const tail = parseInt(options.tail) || 20;
// </Block> =======================================
// Find most recent log file
try {
const files = readdirSync(logsDir);
const logFiles = files
.filter(f => f.startsWith('claude-mem-') && f.endsWith('.log'))
.map(f => ({
name: f,
path: join(logsDir, f),
mtime: statSync(join(logsDir, f)).mtime
}))
.sort((a, b) => b.mtime.getTime() - a.mtime.getTime());
if (logFiles.length === 0) {
console.log('❌ No log files found in ~/.claude-mem/logs/');
return;
}
// Show most recent log
await showLog(logFiles[0].path, 'Most Recent', tail);
if (options.all && logFiles.length > 1) {
console.log(`📚 Found ${logFiles.length} total log files`);
}
} catch (error) {
console.log('❌ Could not read logs directory: ~/.claude-mem/logs/');
console.log(' Run a compression first to generate logs');
}
// <Block> 2.5 ====================================
if (options.follow) {
console.log('Following logs... (Press Ctrl+C to stop)');
// Basic follow implementation - would need more sophisticated watching in real usage
setInterval(() => {
// This would need proper file watching implementation
}, 1000);
}
// </Block> =======================================
// </Block> =======================================
}
+122
View File
@@ -0,0 +1,122 @@
#!/usr/bin/env node
/**
* One-time migration script to convert claude-mem-index.md to claude-mem-index.jsonl
*/
import fs from 'fs';
import path from 'path';
import { PathDiscovery } from '../services/path-discovery.js';
export function migrateToJSONL(): void {
const pathDiscovery = PathDiscovery.getInstance();
const oldIndexPath = path.join(pathDiscovery.getDataDirectory(), 'claude-mem-index.md');
const newIndexPath = pathDiscovery.getIndexPath();
// Check if old index exists
if (!fs.existsSync(oldIndexPath)) {
console.log('No markdown index found to migrate');
return;
}
// Check if new index already exists
if (fs.existsSync(newIndexPath)) {
console.log('JSONL index already exists, skipping migration');
return;
}
console.log('Starting migration from MD to JSONL...');
const content = fs.readFileSync(oldIndexPath, 'utf-8');
const lines = content.split('\n').filter(line => line.trim());
const jsonlLines: string[] = [];
let currentSessionId = '';
let currentSessionTimestamp = '';
for (const line of lines) {
// Parse session headers: # Session: <id> [<timestamp>]
const sessionMatch = line.match(/^# Session:\s*([^\[]+)(?:\s*\[([^\]]+)\])?/);
if (sessionMatch) {
currentSessionId = sessionMatch[1].trim();
currentSessionTimestamp = sessionMatch[2]?.trim() || new Date().toISOString();
// Extract project from session ID (assuming format like <project>_<uuid>)
const projectMatch = currentSessionId.match(/^([^_]+)_/);
const project = projectMatch ? projectMatch[1] : 'unknown';
jsonlLines.push(JSON.stringify({
type: 'session',
session_id: currentSessionId,
timestamp: currentSessionTimestamp,
project
}));
continue;
}
// Parse overviews: **Overview:** <text>
const overviewMatch = line.match(/^\*\*Overview:\*\*\s*(.+)/);
if (overviewMatch) {
const overviewText = overviewMatch[1].trim();
// Extract project from current session ID
const projectMatch = currentSessionId.match(/^([^_]+)_/);
const project = projectMatch ? projectMatch[1] : 'unknown';
jsonlLines.push(JSON.stringify({
type: 'overview',
content: overviewText,
session_id: currentSessionId,
project,
timestamp: currentSessionTimestamp
}));
continue;
}
// Skip certain lines
if (line.startsWith('# NO SUMMARIES EXTRACTED')) {
continue;
}
// Parse memory entries (pipe-separated)
if (line.includes(' | ')) {
const parts = line.split(' | ').map(p => p.trim());
if (parts.length >= 3) {
const [text, document_id, keywords, timestamp, archive] = parts;
// Extract project from document_id (format: <project>_<session>_<number>)
const projectMatch = document_id?.match(/^([^_]+)_/);
const project = projectMatch ? projectMatch[1] : 'unknown';
jsonlLines.push(JSON.stringify({
type: 'memory',
text,
document_id: document_id || `${currentSessionId}_${Date.now()}`,
keywords: keywords || '',
session_id: currentSessionId,
project,
timestamp: timestamp || currentSessionTimestamp,
archive: archive || `${currentSessionId}.jsonl.archive`
}));
}
}
}
// Write JSONL file
fs.writeFileSync(newIndexPath, jsonlLines.join('\n') + '\n');
// Backup old index
const backupPath = oldIndexPath + '.backup';
fs.renameSync(oldIndexPath, backupPath);
console.log(`✅ Migration complete!`);
console.log(` - Converted ${jsonlLines.length} entries`);
console.log(` - New index: ${newIndexPath}`);
console.log(` - Backup: ${backupPath}`);
}
// Run if called directly
if (import.meta.url === `file://${process.argv[1]}`) {
migrateToJSONL();
}
+24
View File
@@ -0,0 +1,24 @@
import { readdirSync, renameSync } from 'fs';
import { join } from 'path';
import * as p from '@clack/prompts';
import { PathDiscovery } from '../services/path-discovery.js';
export async function restore(): Promise<void> {
const trashDir = PathDiscovery.getInstance().getTrashDirectory();
const files = readdirSync(trashDir);
if (files.length === 0) {
console.log('Trash is empty');
return;
}
const file = await p.select({
message: 'Select file to restore:',
options: files.map(f => ({ value: f, label: f }))
});
if (p.isCancel(file)) return;
renameSync(join(trashDir, file), join(process.cwd(), file));
console.log(`Restored ${file}`);
}
+70
View File
@@ -0,0 +1,70 @@
import { OptionValues } from 'commander';
import { appendFileSync } from 'fs';
import { PathDiscovery } from '../services/path-discovery.js';
/**
* Generates a descriptive session ID from the message content
* Takes first few meaningful words and creates a readable identifier
*/
function generateSessionId(message: string): string {
// Remove punctuation and split into words
const words = message
.toLowerCase()
.replace(/[^\w\s]/g, ' ')
.split(/\s+/)
.filter(word => word.length > 2); // Skip short words like 'a', 'is', 'to'
// Take first 3-4 meaningful words, max 30 chars
const sessionWords = words.slice(0, 4).join('-');
const truncated = sessionWords.length > 30 ? sessionWords.substring(0, 27) + '...' : sessionWords;
// Add timestamp suffix to ensure uniqueness
const timestamp = new Date().toISOString().substring(11, 19).replace(/:/g, '');
return `${truncated}-${timestamp}`;
}
/**
* Save command - stores a message to both Chroma collection and JSONL index
*/
export async function save(message: string, options: OptionValues = {}): Promise<void> {
if (!message || message.trim() === '') {
console.error('Error: Message is required');
process.exit(1);
}
const pathDiscovery = PathDiscovery.getInstance();
const timestamp = new Date().toISOString();
const projectName = PathDiscovery.getCurrentProjectName();
const sessionId = generateSessionId(message);
const documentId = `${projectName}_${sessionId}_overview`;
// 1. Save to Chroma collection (skip for now - MCP tools only available in Claude Code context)
// TODO: Add Chroma integration when called from Claude Code with MCP server running
// 2. Append to JSONL index file
const indexPath = pathDiscovery.getIndexPath();
const indexEntry = {
type: "overview",
content: message,
session_id: sessionId,
project: projectName,
timestamp: timestamp
};
// Ensure the directory exists
pathDiscovery.ensureDirectory(pathDiscovery.getDataDirectory());
// Append to JSONL file
appendFileSync(indexPath, JSON.stringify(indexEntry) + '\n', 'utf8');
// 3. Return JSON response for hook compatibility
console.log(JSON.stringify({
success: true,
document_id: documentId,
session_id: sessionId,
project: projectName,
timestamp: timestamp,
suppressOutput: true
}));
}
+176
View File
@@ -0,0 +1,176 @@
import { readFileSync, existsSync, readdirSync, statSync } from 'fs';
import { join, resolve, dirname } from 'path';
import { execSync } from 'child_process';
import { fileURLToPath } from 'url';
import { PathDiscovery } from '../services/path-discovery.js';
const __dirname = dirname(fileURLToPath(import.meta.url));
export async function status(): Promise<void> {
console.log('🔍 Claude Memory System Status Check');
console.log('=====================================\n');
console.log('📂 Installed Hook Scripts:');
const pathDiscovery = PathDiscovery.getInstance();
const claudeMemHooksDir = pathDiscovery.getHooksDirectory();
const preCompactScript = join(claudeMemHooksDir, 'pre-compact.js');
const sessionStartScript = join(claudeMemHooksDir, 'session-start.js');
const sessionEndScript = join(claudeMemHooksDir, 'session-end.js');
const checkScript = (path: string, name: string) => {
if (existsSync(path)) {
console.log(`${name}: Found at ${path}`);
} else {
console.log(`${name}: Not found at ${path}`);
}
};
checkScript(preCompactScript, 'pre-compact.js');
checkScript(sessionStartScript, 'session-start.js');
checkScript(sessionEndScript, 'session-end.js');
console.log('');
console.log('⚙️ Settings Configuration:');
const checkSettings = (name: string, path: string) => {
if (!existsSync(path)) {
console.log(` ⏭️ ${name}: No settings file`);
return;
}
console.log(` 📋 ${name}: ${path}`);
try {
const settings = JSON.parse(readFileSync(path, 'utf8'));
const hasPreCompact = settings.hooks?.PreCompact?.some((matcher: any) =>
matcher.hooks?.some((hook: any) =>
hook.command?.includes('pre-compact.js') || hook.command?.includes('claude-mem')
)
);
const hasSessionStart = settings.hooks?.SessionStart?.some((matcher: any) =>
matcher.hooks?.some((hook: any) =>
hook.command?.includes('session-start.js') || hook.command?.includes('claude-mem')
)
);
const hasSessionEnd = settings.hooks?.SessionEnd?.some((matcher: any) =>
matcher.hooks?.some((hook: any) =>
hook.command?.includes('session-end.js') || hook.command?.includes('claude-mem')
)
);
console.log(` PreCompact: ${hasPreCompact ? '✅' : '❌'}`);
console.log(` SessionStart: ${hasSessionStart ? '✅' : '❌'}`);
console.log(` SessionEnd: ${hasSessionEnd ? '✅' : '❌'}`);
} catch (error: any) {
console.log(` ⚠️ Could not parse settings`);
}
};
checkSettings('Global', pathDiscovery.getClaudeSettingsPath());
checkSettings('Project', join(process.cwd(), '.claude', 'settings.json'));
console.log('');
console.log('📦 Compressed Transcripts:');
const claudeProjectsDir = join(pathDiscovery.getClaudeConfigDirectory(), 'projects');
if (existsSync(claudeProjectsDir)) {
try {
let compressedCount = 0;
let archiveCount = 0;
const searchDir = (dir: string, depth = 0) => {
if (depth > 3) return;
const files = readdirSync(dir);
for (const file of files) {
const fullPath = join(dir, file);
const stats = statSync(fullPath);
if (stats.isDirectory() && !file.startsWith('.')) {
searchDir(fullPath, depth + 1);
} else if (file.endsWith('.jsonl.compressed')) {
compressedCount++;
} else if (file.endsWith('.jsonl.archive')) {
archiveCount++;
}
}
};
searchDir(claudeProjectsDir);
console.log(` Compressed files: ${compressedCount}`);
console.log(` Archive files: ${archiveCount}`);
} catch (error) {
console.log(` ⚠️ Could not scan projects directory`);
}
} else {
console.log(` ️ No Claude projects directory found`);
}
console.log('');
console.log('🔧 Runtime Environment:');
const checkCommand = (cmd: string, name: string) => {
try {
const version = execSync(`${cmd} --version`, { encoding: 'utf8' }).trim();
console.log(`${name}: ${version}`);
} catch {
console.log(`${name}: Not found`);
}
};
checkCommand('node', 'Node.js');
checkCommand('bun', 'Bun');
console.log('');
console.log('🧠 Chroma Storage Status:');
console.log(' ✅ Storage backend: Chroma MCP');
console.log(` 📍 Data location: ${pathDiscovery.getChromaDirectory()}`);
console.log(' 🔍 Features: Vector search, semantic similarity, document storage');
console.log('');
console.log('📊 Summary:');
const globalPath = pathDiscovery.getClaudeSettingsPath();
const projectPath = join(process.cwd(), '.claude', 'settings.json');
let isInstalled = false;
let installLocation = 'Not installed';
try {
if (existsSync(globalPath)) {
const settings = JSON.parse(readFileSync(globalPath, 'utf8'));
if (settings.hooks?.PreCompact || settings.hooks?.SessionStart || settings.hooks?.SessionEnd) {
isInstalled = true;
installLocation = 'Global';
}
}
if (existsSync(projectPath)) {
const settings = JSON.parse(readFileSync(projectPath, 'utf8'));
if (settings.hooks?.PreCompact || settings.hooks?.SessionStart || settings.hooks?.SessionEnd) {
isInstalled = true;
installLocation = installLocation === 'Global' ? 'Global + Project' : 'Project';
}
}
} catch {}
if (isInstalled) {
console.log(` ✅ Claude Memory System is installed (${installLocation})`);
console.log('');
console.log('💡 To test: Use /compact in Claude Code');
} else {
console.log(` ❌ Claude Memory System is not installed`);
console.log('');
console.log('💡 To install: claude-mem install');
}
}
+66
View File
@@ -0,0 +1,66 @@
import { rmSync, readdirSync, existsSync, statSync } from 'fs';
import { join } from 'path';
import * as p from '@clack/prompts';
import { PathDiscovery } from '../services/path-discovery.js';
export async function emptyTrash(options: { force?: boolean } = {}): Promise<void> {
const trashDir = PathDiscovery.getInstance().getTrashDirectory();
// Check if trash directory exists
if (!existsSync(trashDir)) {
p.log.info('🗑️ Trash is already empty');
return;
}
try {
const files = readdirSync(trashDir);
if (files.length === 0) {
p.log.info('🗑️ Trash is already empty');
return;
}
// Count items
let folderCount = 0;
let fileCount = 0;
for (const file of files) {
const filePath = join(trashDir, file);
const stats = statSync(filePath);
if (stats.isDirectory()) {
folderCount++;
} else {
fileCount++;
}
}
// Confirm deletion unless --force flag is used
if (!options.force) {
const confirm = await p.confirm({
message: `Permanently delete ${folderCount} folders and ${fileCount} files from trash?`,
initialValue: false
});
if (p.isCancel(confirm) || !confirm) {
p.log.info('Cancelled - trash not emptied');
return;
}
}
// Delete all files in trash
const s = p.spinner();
s.start('Emptying trash...');
for (const file of files) {
const filePath = join(trashDir, file);
rmSync(filePath, { recursive: true, force: true });
}
s.stop(`🗑️ Trash emptied - permanently deleted ${folderCount} folders and ${fileCount} files`);
} catch (error) {
p.log.error('Failed to empty trash');
console.error(error);
process.exit(1);
}
}
+124
View File
@@ -0,0 +1,124 @@
import { readdirSync, statSync } from 'fs';
import { join, basename } from 'path';
import * as p from '@clack/prompts';
import { PathDiscovery } from '../services/path-discovery.js';
interface TrashItem {
originalName: string;
trashedName: string;
size: number;
trashedAt: Date;
isDirectory: boolean;
}
function parseTrashName(filename: string): { name: string; timestamp: number } {
const lastDotIndex = filename.lastIndexOf('.');
if (lastDotIndex === -1) return { name: filename, timestamp: 0 };
const timestamp = parseInt(filename.substring(lastDotIndex + 1));
if (isNaN(timestamp)) return { name: filename, timestamp: 0 };
return {
name: filename.substring(0, lastDotIndex),
timestamp
};
}
function formatSize(bytes: number): string {
if (bytes === 0) return '0 B';
const k = 1024;
const sizes = ['B', 'KB', 'MB', 'GB'];
const i = Math.floor(Math.log(bytes) / Math.log(k));
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
}
function getDirectorySize(dirPath: string): number {
let size = 0;
const files = readdirSync(dirPath);
for (const file of files) {
const filePath = join(dirPath, file);
const stats = statSync(filePath);
if (stats.isDirectory()) {
size += getDirectorySize(filePath);
} else {
size += stats.size;
}
}
return size;
}
export async function viewTrash(): Promise<void> {
const trashDir = PathDiscovery.getInstance().getTrashDirectory();
try {
const files = readdirSync(trashDir);
if (files.length === 0) {
p.log.info('🗑️ Trash is empty');
return;
}
const items: TrashItem[] = files.map(file => {
const filePath = join(trashDir, file);
const stats = statSync(filePath);
const { name, timestamp } = parseTrashName(file);
const size = stats.isDirectory() ? getDirectorySize(filePath) : stats.size;
return {
originalName: name,
trashedName: file,
size,
trashedAt: new Date(timestamp),
isDirectory: stats.isDirectory()
};
});
// Sort by date, newest first
items.sort((a, b) => b.trashedAt.getTime() - a.trashedAt.getTime());
// Display header
console.log('\n🗑️ Trash Contents\n');
console.log('─'.repeat(80));
// Display items
let totalSize = 0;
let folderCount = 0;
let fileCount = 0;
for (const item of items) {
totalSize += item.size;
if (item.isDirectory) {
folderCount++;
} else {
fileCount++;
}
const type = item.isDirectory ? '📁' : '📄';
const date = item.trashedAt.toLocaleString();
const size = formatSize(item.size);
console.log(`${type} ${item.originalName}`);
console.log(` Size: ${size} | Trashed: ${date}`);
console.log(` ID: ${item.trashedName}`);
console.log();
}
// Display summary
console.log('─'.repeat(80));
console.log(`Total: ${folderCount} folders, ${fileCount} files (${formatSize(totalSize)})`);
console.log('\nTo restore files: claude-mem restore');
console.log('To empty trash: claude-mem trash empty');
} catch (error) {
if ((error as any).code === 'ENOENT') {
p.log.info('🗑️ Trash is empty');
} else {
p.log.error('Failed to read trash directory');
console.error(error);
}
}
}
+60
View File
@@ -0,0 +1,60 @@
import { renameSync, existsSync, mkdirSync, statSync } from 'fs';
import { join, basename } from 'path';
import { glob } from 'glob';
import { PathDiscovery } from '../services/path-discovery.js';
interface TrashOptions {
force?: boolean;
recursive?: boolean;
}
export async function trash(filePaths: string | string[], options: TrashOptions = {}): Promise<void> {
const trashDir = PathDiscovery.getInstance().getTrashDirectory();
if (!existsSync(trashDir)) mkdirSync(trashDir, { recursive: true });
// Handle single string or array of paths
const paths = Array.isArray(filePaths) ? filePaths : [filePaths];
for (const filePath of paths) {
// Handle glob patterns
const expandedPaths = await glob(filePath);
const actualPaths = expandedPaths.length > 0 ? expandedPaths : [filePath];
for (const actualPath of actualPaths) {
try {
// Check if file exists
if (!existsSync(actualPath)) {
if (!options.force) {
console.error(`trash: ${actualPath}: No such file or directory`);
continue;
}
// With -f, silently skip missing files
continue;
}
// Check if it's a directory and we need recursive
const stats = statSync(actualPath);
if (stats.isDirectory() && !options.recursive) {
if (!options.force) {
console.error(`trash: ${actualPath}: is a directory`);
continue;
}
}
// Generate unique destination name to avoid conflicts
const fileName = basename(actualPath);
const timestamp = Date.now();
const destination = join(trashDir, `${fileName}.${timestamp}`);
renameSync(actualPath, destination);
console.log(`Moved ${fileName} to trash`);
} catch (error) {
if (!options.force) {
const errorMessage = error instanceof Error ? error.message : String(error);
console.error(`trash: ${actualPath}: ${errorMessage}`);
}
}
}
}
}
+133
View File
@@ -0,0 +1,133 @@
import { OptionValues } from 'commander';
import { readFileSync, writeFileSync, existsSync } from 'fs';
import { join } from 'path';
import { PathDiscovery } from '../services/path-discovery.js';
export async function uninstall(options: OptionValues = {}): Promise<void> {
console.log('🔄 Uninstalling Claude Memory System hooks...');
const locations = [];
if (options.all) {
locations.push({
name: 'User',
path: PathDiscovery.getInstance().getClaudeSettingsPath()
});
locations.push({
name: 'Project',
path: join(process.cwd(), '.claude', 'settings.json')
});
} else {
const isProject = options.project;
const pathDiscovery = PathDiscovery.getInstance();
locations.push({
name: isProject ? 'Project' : 'User',
path: isProject ? join(process.cwd(), '.claude', 'settings.json') : pathDiscovery.getClaudeSettingsPath()
});
}
const pathDiscovery = PathDiscovery.getInstance();
const claudeMemHooksDir = pathDiscovery.getHooksDirectory();
const preCompactScript = join(claudeMemHooksDir, 'pre-compact.js');
const sessionStartScript = join(claudeMemHooksDir, 'session-start.js');
const sessionEndScript = join(claudeMemHooksDir, 'session-end.js');
let removedCount = 0;
for (const location of locations) {
if (!existsSync(location.path)) {
console.log(`⏭️ No settings found at ${location.name} location`);
continue;
}
try {
const content = readFileSync(location.path, 'utf8');
const settings = JSON.parse(content);
if (!settings.hooks) {
console.log(`⏭️ No hooks configured in ${location.name} settings`);
continue;
}
let modified = false;
if (settings.hooks.PreCompact) {
const filteredPreCompact = settings.hooks.PreCompact.filter((matcher: any) =>
!matcher.hooks?.some((hook: any) =>
hook.command === preCompactScript ||
hook.command?.includes('pre-compact.js') ||
hook.command?.includes('claude-mem')
)
);
if (filteredPreCompact.length !== settings.hooks.PreCompact.length) {
settings.hooks.PreCompact = filteredPreCompact.length ? filteredPreCompact : undefined;
modified = true;
console.log(`✅ Removed PreCompact hook from ${location.name} settings`);
}
}
if (settings.hooks.SessionStart) {
const filteredSessionStart = settings.hooks.SessionStart.filter((matcher: any) =>
!matcher.hooks?.some((hook: any) =>
hook.command === sessionStartScript ||
hook.command?.includes('session-start.js') ||
hook.command?.includes('claude-mem')
)
);
if (filteredSessionStart.length !== settings.hooks.SessionStart.length) {
settings.hooks.SessionStart = filteredSessionStart.length ? filteredSessionStart : undefined;
modified = true;
console.log(`✅ Removed SessionStart hook from ${location.name} settings`);
}
}
if (settings.hooks.SessionEnd) {
const filteredSessionEnd = settings.hooks.SessionEnd.filter((matcher: any) =>
!matcher.hooks?.some((hook: any) =>
hook.command === sessionEndScript ||
hook.command?.includes('session-end.js') ||
hook.command?.includes('claude-mem')
)
);
if (filteredSessionEnd.length !== settings.hooks.SessionEnd.length) {
settings.hooks.SessionEnd = filteredSessionEnd.length ? filteredSessionEnd : undefined;
modified = true;
console.log(`✅ Removed SessionEnd hook from ${location.name} settings`);
}
}
if (settings.hooks.PreCompact === undefined) delete settings.hooks.PreCompact;
if (settings.hooks.SessionStart === undefined) delete settings.hooks.SessionStart;
if (settings.hooks.SessionEnd === undefined) delete settings.hooks.SessionEnd;
if (!Object.keys(settings.hooks).length) delete settings.hooks;
if (modified) {
const backupPath = location.path + '.backup.' + Date.now();
writeFileSync(backupPath, content);
console.log(`📋 Created backup: ${backupPath}`);
writeFileSync(location.path, JSON.stringify(settings, null, 2));
removedCount++;
console.log(`✅ Updated ${location.name} settings: ${location.path}`);
} else {
console.log(`️ No Claude Memory System hooks found in ${location.name} settings`);
}
} catch (error: any) {
console.log(`⚠️ Could not process ${location.name} settings: ${error.message}`);
}
}
console.log('');
if (removedCount > 0) {
console.log('✨ Uninstallation complete!');
console.log('The Claude Memory System hooks have been removed from your settings.');
console.log('');
console.log('Note: Your compressed transcripts and archives are preserved.');
console.log('To reinstall: claude-mem install');
} else {
console.log('️ No Claude Memory System hooks were found to remove.');
}
}