Release v3.7.0

Published from npm package build
Source: https://github.com/thedotmack/claude-mem-source
This commit is contained in:
Alex Newman
2025-09-17 20:19:19 -04:00
parent 35b7aab174
commit b0032c1745
18 changed files with 2855 additions and 350 deletions
+396 -218
View File
File diff suppressed because one or more lines are too long
+1 -1
View File
@@ -1,6 +1,6 @@
{
"name": "claude-mem",
"version": "3.6.10",
"version": "3.7.0",
"description": "Memory compression system for Claude Code - persist context across sessions",
"keywords": [
"claude",
+11
View File
@@ -201,6 +201,17 @@ program
.option('-m, --multi', 'Enable multi-select mode (default is single-select)')
.action(importHistory);
// Migrate Index command
program
.command('migrate-index')
.description('Migrate JSONL index to SQLite database')
.option('--force', 'Force migration even if SQLite database already has data')
.option('--keep-jsonl', 'Keep original JSONL file (archive it by default)')
.action(async (options) => {
const { migrateIndex } = await import('../commands/migrate-index.js');
await migrateIndex(options);
});
// <Block> 1.11 ===================================
// Hook Commands
// Internal commands called by hook scripts
+15 -20
View File
@@ -6,6 +6,7 @@ import os from 'os';
import chalk from 'chalk';
import { TranscriptCompressor } from '../core/compression/TranscriptCompressor.js';
import { TitleGenerator, TitleGenerationRequest } from '../core/titles/TitleGenerator.js';
import { getStorageProvider, needsMigration } from '../shared/storage.js';
interface ConversationMetadata {
sessionId: string;
@@ -100,27 +101,21 @@ function extractFirstUserMessage(filePath: string): string {
}
async function loadImportedSessions(): Promise<Set<string>> {
const importedIds = new Set<string>();
const indexPath = path.join(os.homedir(), '.claude-mem', 'claude-mem-index.jsonl');
if (!fs.existsSync(indexPath)) return importedIds;
const content = fs.readFileSync(indexPath, 'utf-8');
const lines = content.trim().split('\n').filter(Boolean);
for (const line of lines) {
try {
const entry = JSON.parse(line);
// Check both session_id (from index) and sessionId (legacy)
if (entry.session_id) {
importedIds.add(entry.session_id);
} else if (entry.sessionId) {
importedIds.add(entry.sessionId);
}
} catch {}
try {
// Check if migration is needed and warn the user
if (await needsMigration()) {
console.warn('⚠️ JSONL to SQLite migration recommended. Run: claude-mem migrate-index');
}
const storage = await getStorageProvider();
// Use storage provider to get all session IDs efficiently
return await storage.getAllSessionIds();
} catch (error) {
console.warn('Failed to load imported sessions, proceeding with empty set:', error);
return new Set<string>();
}
return importedIds;
}
async function scanConversations(): Promise<{ conversations: ConversationItem[]; skippedCount: number }> {
+201 -73
View File
@@ -9,6 +9,8 @@ import {
formatTimeAgo,
outputSessionStartContent
} from '../prompts/templates/context/ContextTemplates.js';
import { getStorageProvider, needsMigration } from '../shared/storage.js';
import { MemoryRow, OverviewRow, SessionRow } from '../services/sqlite/types.js';
interface TrashStatus {
folderCount: number;
@@ -66,82 +68,84 @@ function getTrashStatus(): TrashStatus {
}
export async function loadContext(options: OptionValues = {}): Promise<void> {
const pathDiscovery = PathDiscovery.getInstance();
const indexPath = pathDiscovery.getIndexPath();
try {
// Check if index file exists
if (!fs.existsSync(indexPath)) {
// Check if migration is needed and warn the user
if (await needsMigration()) {
console.warn('⚠️ JSONL to SQLite migration recommended. Run: claude-mem migrate-index');
}
const storage = await getStorageProvider();
// If using JSONL fallback, use original implementation
if (storage.backend === 'jsonl') {
return await loadContextFromJSONL(options);
}
// SQLite implementation - fetch data using storage provider
let recentMemories: MemoryRow[] = [];
let recentOverviews: OverviewRow[] = [];
let recentSessions: SessionRow[] = [];
// Auto-detect current project for session-start format if no project specified
let projectToUse = options.project;
if (!projectToUse && options.format === 'session-start') {
projectToUse = PathDiscovery.getCurrentProjectName();
}
if (projectToUse) {
recentMemories = await storage.getRecentMemoriesForProject(projectToUse, 10);
recentOverviews = await storage.getRecentOverviewsForProject(projectToUse, options.format === 'session-start' ? 5 : 3);
recentSessions = await storage.getRecentSessionsForProject(projectToUse, 5);
} else {
recentMemories = await storage.getRecentMemories(10);
recentOverviews = await storage.getRecentOverviews(options.format === 'session-start' ? 5 : 3);
recentSessions = await storage.getRecentSessions(5);
}
// Convert SQLite rows to JSONL format for compatibility with existing output functions
const memoriesAsJSON = recentMemories.map(row => ({
type: 'memory',
text: row.text,
document_id: row.document_id,
keywords: row.keywords,
session_id: row.session_id,
project: row.project,
timestamp: row.created_at,
archive: row.archive_basename
}));
const overviewsAsJSON = recentOverviews.map(row => ({
type: 'overview',
content: row.content,
session_id: row.session_id,
project: row.project,
timestamp: row.created_at
}));
const sessionsAsJSON = recentSessions.map(row => ({
type: 'session',
session_id: row.session_id,
project: row.project,
timestamp: row.created_at
}));
// If no data found, show appropriate messages
if (memoriesAsJSON.length === 0 && overviewsAsJSON.length === 0 && sessionsAsJSON.length === 0) {
if (options.format === 'session-start') {
console.log(createContextualError('NO_MEMORIES', options.project || 'this project'));
console.log(createContextualError('NO_MEMORIES', projectToUse || 'this project'));
}
return;
}
const content = fs.readFileSync(indexPath, 'utf-8');
const lines = content.trim().split('\n').filter(line => line.trim());
if (lines.length === 0) {
if (options.format === 'session-start') {
console.log(createContextualError('NO_MEMORIES', options.project || 'this project'));
}
return;
}
// Parse JSONL format - each line is a JSON object
const jsonObjects: any[] = [];
for (const line of lines) {
try {
// Skip lines that don't look like JSON (could be legacy format)
if (!line.trim().startsWith('{')) {
continue;
}
const obj = JSON.parse(line);
jsonObjects.push(obj);
} catch (e) {
// Skip malformed JSON lines
continue;
}
}
if (jsonObjects.length === 0) {
if (options.format === 'session-start') {
console.log(createContextualError('NO_MEMORIES', options.project || 'this project'));
}
return;
}
// Separate memories, overviews, and other types
const memories = jsonObjects.filter(obj => obj.type === 'memory');
const overviews = jsonObjects.filter(obj => obj.type === 'overview');
const sessions = jsonObjects.filter(obj => obj.type === 'session');
// Filter each type by project if specified
// Handle both hyphen and underscore formats since index has mixed entries
let filteredMemories = memories;
let filteredOverviews = overviews;
let filteredSessions = sessions;
if (options.project) {
const matchesProject = buildProjectMatcher(options.project);
filteredMemories = memories.filter(obj => matchesProject(obj.project));
filteredOverviews = overviews.filter(obj => matchesProject(obj.project));
filteredSessions = sessions.filter(obj => matchesProject(obj.project));
}
// Use the same output logic as the original implementation
if (options.format === 'session-start') {
// Get last 10 memories and last 5 overviews for session-start
const recentMemories = filteredMemories.slice(-10);
const recentOverviews = filteredOverviews.slice(-5);
const recentSessions = filteredSessions.slice(-5);
// Combine them for the display
const recentObjects = [...recentSessions, ...recentMemories, ...recentOverviews];
const recentObjects = [...sessionsAsJSON, ...memoriesAsJSON, ...overviewsAsJSON];
// Find most recent timestamp for last session info
let lastSessionTime = 'recently';
const timestamps = recentObjects
.map(obj => {
// Get timestamp from JSON object
return obj.timestamp ? new Date(obj.timestamp) : null;
})
.filter(date => date !== null)
@@ -153,33 +157,29 @@ export async function loadContext(options: OptionValues = {}): Promise<void> {
// Use dual-stream output for session start formatting
outputSessionStartContent({
projectName: options.project || 'your project',
memoryCount: recentMemories.length,
projectName: projectToUse || 'your project',
memoryCount: memoriesAsJSON.length,
lastSessionTime,
recentObjects
});
} else if (options.format === 'json') {
// For JSON format, combine last 10 of each type
const recentMemories = filteredMemories.slice(-10);
const recentOverviews = filteredOverviews.slice(-3);
const recentObjects = [...recentMemories, ...recentOverviews];
const recentObjects = [...memoriesAsJSON, ...overviewsAsJSON];
console.log(JSON.stringify(recentObjects));
} else {
// Default format - show last 10 memories and last 3 overviews
const recentMemories = filteredMemories.slice(-10);
const recentOverviews = filteredOverviews.slice(-3);
const totalCount = recentMemories.length + recentOverviews.length;
const totalCount = memoriesAsJSON.length + overviewsAsJSON.length;
console.log(createCompletionMessage('Context loading', totalCount, 'recent entries found'));
// Show memories first
recentMemories.forEach((obj) => {
memoriesAsJSON.forEach((obj) => {
console.log(`${obj.text} | ${obj.document_id} | ${obj.keywords}`);
});
// Then show overviews
recentOverviews.forEach((obj) => {
overviewsAsJSON.forEach((obj) => {
console.log(`**Overview:** ${obj.content}`);
});
}
@@ -203,3 +203,131 @@ export async function loadContext(options: OptionValues = {}): Promise<void> {
}
}
}
/**
* Original JSONL-based implementation for fallback compatibility
*/
async function loadContextFromJSONL(options: OptionValues = {}): Promise<void> {
const pathDiscovery = PathDiscovery.getInstance();
const indexPath = pathDiscovery.getIndexPath();
// Auto-detect current project for session-start format if no project specified
let projectToUse = options.project;
if (!projectToUse && options.format === 'session-start') {
projectToUse = PathDiscovery.getCurrentProjectName();
}
// Check if index file exists
if (!fs.existsSync(indexPath)) {
if (options.format === 'session-start') {
console.log(createContextualError('NO_MEMORIES', projectToUse || 'this project'));
}
return;
}
const content = fs.readFileSync(indexPath, 'utf-8');
const lines = content.trim().split('\n').filter(line => line.trim());
if (lines.length === 0) {
if (options.format === 'session-start') {
console.log(createContextualError('NO_MEMORIES', projectToUse || 'this project'));
}
return;
}
// Parse JSONL format - each line is a JSON object
const jsonObjects: any[] = [];
for (const line of lines) {
try {
// Skip lines that don't look like JSON (could be legacy format)
if (!line.trim().startsWith('{')) {
continue;
}
const obj = JSON.parse(line);
jsonObjects.push(obj);
} catch (e) {
// Skip malformed JSON lines
continue;
}
}
if (jsonObjects.length === 0) {
if (options.format === 'session-start') {
console.log(createContextualError('NO_MEMORIES', projectToUse || 'this project'));
}
return;
}
// Separate memories, overviews, and other types
const memories = jsonObjects.filter(obj => obj.type === 'memory');
const overviews = jsonObjects.filter(obj => obj.type === 'overview');
const sessions = jsonObjects.filter(obj => obj.type === 'session');
// Filter each type by project if specified
// Handle both hyphen and underscore formats since index has mixed entries
let filteredMemories = memories;
let filteredOverviews = overviews;
let filteredSessions = sessions;
if (projectToUse) {
const matchesProject = buildProjectMatcher(projectToUse);
filteredMemories = memories.filter(obj => matchesProject(obj.project));
filteredOverviews = overviews.filter(obj => matchesProject(obj.project));
filteredSessions = sessions.filter(obj => matchesProject(obj.project));
}
if (options.format === 'session-start') {
// Get last 10 memories and last 5 overviews for session-start
const recentMemories = filteredMemories.slice(-10);
const recentOverviews = filteredOverviews.slice(-5);
const recentSessions = filteredSessions.slice(-5);
// Combine them for the display
const recentObjects = [...recentSessions, ...recentMemories, ...recentOverviews];
// Find most recent timestamp for last session info
let lastSessionTime = 'recently';
const timestamps = recentObjects
.map(obj => {
// Get timestamp from JSON object
return obj.timestamp ? new Date(obj.timestamp) : null;
})
.filter(date => date !== null)
.sort((a, b) => b.getTime() - a.getTime());
if (timestamps.length > 0) {
lastSessionTime = formatTimeAgo(timestamps[0]);
}
// Use dual-stream output for session start formatting
outputSessionStartContent({
projectName: projectToUse || 'your project',
memoryCount: recentMemories.length,
lastSessionTime,
recentObjects
});
} else if (options.format === 'json') {
// For JSON format, combine last 10 of each type
const recentMemories = filteredMemories.slice(-10);
const recentOverviews = filteredOverviews.slice(-3);
const recentObjects = [...recentMemories, ...recentOverviews];
console.log(JSON.stringify(recentObjects));
} else {
// Default format - show last 10 memories and last 3 overviews
const recentMemories = filteredMemories.slice(-10);
const recentOverviews = filteredOverviews.slice(-3);
const totalCount = recentMemories.length + recentOverviews.length;
console.log(createCompletionMessage('Context loading', totalCount, 'recent entries found'));
// Show memories first
recentMemories.forEach((obj) => {
console.log(`${obj.text} | ${obj.document_id} | ${obj.keywords}`);
});
// Then show overviews
recentOverviews.forEach((obj) => {
console.log(`**Overview:** ${obj.content}`);
});
}
}
+300
View File
@@ -0,0 +1,300 @@
import { OptionValues } from 'commander';
import fs from 'fs';
import path from 'path';
import { PathDiscovery } from '../services/path-discovery.js';
import {
createStores,
SessionInput,
MemoryInput,
OverviewInput,
DiagnosticInput,
normalizeTimestamp
} from '../services/sqlite/index.js';
interface MigrationStats {
totalLines: number;
skippedLines: number;
invalidJson: number;
sessionsCreated: number;
memoriesCreated: number;
overviewsCreated: number;
diagnosticsCreated: number;
orphanedOverviews: number;
orphanedMemories: number;
}
/**
* Migrate claude-mem index from JSONL to SQLite
*/
export async function migrateIndex(options: OptionValues = {}): Promise<void> {
const pathDiscovery = PathDiscovery.getInstance();
const indexPath = pathDiscovery.getIndexPath();
const backupPath = `${indexPath}.backup-${Date.now()}`;
console.log('🔄 Starting JSONL to SQLite migration...');
console.log(`📁 Index file: ${indexPath}`);
// Check if JSONL file exists
if (!fs.existsSync(indexPath)) {
console.log('️ No JSONL index file found - nothing to migrate');
return;
}
try {
// Initialize SQLite database and stores
console.log('🏗️ Initializing SQLite database...');
const stores = await createStores();
// Check if we already have data in SQLite
const existingSessions = stores.sessions.count();
if (existingSessions > 0 && !options.force) {
console.log(`⚠️ SQLite database already contains ${existingSessions} sessions.`);
console.log(' Use --force to migrate anyway (will skip duplicates)');
return;
}
// Create backup of JSONL file
if (!options.keepJsonl) {
console.log(`💾 Creating backup: ${path.basename(backupPath)}`);
fs.copyFileSync(indexPath, backupPath);
}
// Read and parse JSONL file
console.log('📖 Reading JSONL index file...');
const content = fs.readFileSync(indexPath, 'utf-8');
const lines = content.trim().split('\n').filter(line => line.trim());
const stats: MigrationStats = {
totalLines: lines.length,
skippedLines: 0,
invalidJson: 0,
sessionsCreated: 0,
memoriesCreated: 0,
overviewsCreated: 0,
diagnosticsCreated: 0,
orphanedOverviews: 0,
orphanedMemories: 0
};
console.log(`📝 Processing ${stats.totalLines} lines...`);
// Parse all lines first
const records: any[] = [];
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
try {
// Skip lines that don't look like JSON
if (!line.trim().startsWith('{')) {
stats.skippedLines++;
continue;
}
const record = JSON.parse(line);
if (record && typeof record === 'object') {
records.push({ ...record, _lineNumber: i + 1 });
} else {
stats.skippedLines++;
}
} catch (error) {
stats.invalidJson++;
console.warn(`⚠️ Invalid JSON at line ${i + 1}: ${line.substring(0, 50)}...`);
}
}
console.log(`✅ Parsed ${records.length} valid records`);
// Group records by type
const sessions = records.filter(r => r.type === 'session');
const memories = records.filter(r => r.type === 'memory');
const overviews = records.filter(r => r.type === 'overview');
const diagnostics = records.filter(r => r.type === 'diagnostic');
const unknown = records.filter(r => !['session', 'memory', 'overview', 'diagnostic'].includes(r.type));
if (unknown.length > 0) {
console.log(`⚠️ Found ${unknown.length} records with unknown types - will skip`);
stats.skippedLines += unknown.length;
}
// Create session tracking
const sessionIds = new Set(sessions.map(s => s.session_id));
const orphanedSessionIds = new Set();
// Migrate sessions first
console.log('💾 Migrating sessions...');
for (const sessionData of sessions) {
try {
const { isoString } = normalizeTimestamp(sessionData.timestamp);
const sessionInput: SessionInput = {
session_id: sessionData.session_id,
project: sessionData.project || 'unknown',
created_at: isoString,
source: 'legacy-jsonl'
};
// Skip if session already exists (when using --force)
if (!stores.sessions.has(sessionInput.session_id)) {
stores.sessions.create(sessionInput);
stats.sessionsCreated++;
}
} catch (error) {
console.warn(`⚠️ Failed to migrate session ${sessionData.session_id}: ${error}`);
}
}
// Migrate memories
console.log('🧠 Migrating memories...');
for (const memoryData of memories) {
try {
const { isoString } = normalizeTimestamp(memoryData.timestamp);
// Check if session exists, create orphaned session if needed
if (!sessionIds.has(memoryData.session_id)) {
if (!orphanedSessionIds.has(memoryData.session_id)) {
orphanedSessionIds.add(memoryData.session_id);
const orphanedSession: SessionInput = {
session_id: memoryData.session_id,
project: memoryData.project || 'unknown',
created_at: isoString,
source: 'legacy-jsonl'
};
if (!stores.sessions.has(orphanedSession.session_id)) {
stores.sessions.create(orphanedSession);
stats.sessionsCreated++;
stats.orphanedMemories++;
}
}
}
const memoryInput: MemoryInput = {
session_id: memoryData.session_id,
text: memoryData.text || '',
document_id: memoryData.document_id,
keywords: memoryData.keywords,
created_at: isoString,
project: memoryData.project || 'unknown',
archive_basename: memoryData.archive,
origin: 'transcript'
};
// Skip duplicate document_ids
if (!memoryInput.document_id || !stores.memories.hasDocumentId(memoryInput.document_id)) {
stores.memories.create(memoryInput);
stats.memoriesCreated++;
}
} catch (error) {
console.warn(`⚠️ Failed to migrate memory ${memoryData.document_id}: ${error}`);
}
}
// Migrate overviews
console.log('📋 Migrating overviews...');
for (const overviewData of overviews) {
try {
const { isoString } = normalizeTimestamp(overviewData.timestamp);
// Check if session exists, create orphaned session if needed
if (!sessionIds.has(overviewData.session_id)) {
if (!orphanedSessionIds.has(overviewData.session_id)) {
orphanedSessionIds.add(overviewData.session_id);
const orphanedSession: SessionInput = {
session_id: overviewData.session_id,
project: overviewData.project || 'unknown',
created_at: isoString,
source: 'legacy-jsonl'
};
if (!stores.sessions.has(orphanedSession.session_id)) {
stores.sessions.create(orphanedSession);
stats.sessionsCreated++;
stats.orphanedOverviews++;
}
}
}
const overviewInput: OverviewInput = {
session_id: overviewData.session_id,
content: overviewData.content || '',
created_at: isoString,
project: overviewData.project || 'unknown',
origin: 'claude'
};
stores.overviews.upsert(overviewInput);
stats.overviewsCreated++;
} catch (error) {
console.warn(`⚠️ Failed to migrate overview ${overviewData.session_id}: ${error}`);
}
}
// Migrate diagnostics
console.log('🩺 Migrating diagnostics...');
for (const diagnosticData of diagnostics) {
try {
const { isoString } = normalizeTimestamp(diagnosticData.timestamp);
const diagnosticInput: DiagnosticInput = {
session_id: diagnosticData.session_id,
message: diagnosticData.message || '',
severity: 'warn',
created_at: isoString,
project: diagnosticData.project || 'unknown',
origin: 'compressor'
};
stores.diagnostics.create(diagnosticInput);
stats.diagnosticsCreated++;
} catch (error) {
console.warn(`⚠️ Failed to migrate diagnostic: ${error}`);
}
}
// Print migration summary
console.log('\n✅ Migration completed successfully!');
console.log('\n📊 Migration Summary:');
console.log(` Total lines processed: ${stats.totalLines}`);
console.log(` Skipped lines: ${stats.skippedLines}`);
console.log(` Invalid JSON lines: ${stats.invalidJson}`);
console.log(` Sessions created: ${stats.sessionsCreated}`);
console.log(` Memories created: ${stats.memoriesCreated}`);
console.log(` Overviews created: ${stats.overviewsCreated}`);
console.log(` Diagnostics created: ${stats.diagnosticsCreated}`);
if (stats.orphanedOverviews > 0 || stats.orphanedMemories > 0) {
console.log(` Orphaned records (sessions synthesized): ${stats.orphanedOverviews + stats.orphanedMemories}`);
}
// Archive or keep JSONL file
if (options.keepJsonl) {
console.log(`\n💾 Original JSONL file preserved: ${indexPath}`);
console.log(` SQLite database is now the primary index`);
} else {
const archiveDir = path.join(pathDiscovery.getDataDirectory(), 'archive', 'legacy');
fs.mkdirSync(archiveDir, { recursive: true });
const archivedPath = path.join(archiveDir, `claude-mem-index-${Date.now()}.jsonl`);
fs.renameSync(indexPath, archivedPath);
console.log(`\n📦 Original JSONL file archived: ${path.basename(archivedPath)}`);
console.log(` Backup available at: ${path.basename(backupPath)}`);
}
console.log('\n🎉 Migration complete! You can now use claude-mem with SQLite backend.');
console.log(' Run `claude-mem load-context` to verify the migration worked.');
} catch (error) {
console.error('\n❌ Migration failed:', error);
// Restore backup if we created one
if (fs.existsSync(backupPath) && !fs.existsSync(indexPath)) {
console.log('🔄 Restoring backup...');
fs.renameSync(backupPath, indexPath);
}
process.exit(1);
}
}
+43 -28
View File
@@ -1,6 +1,7 @@
import { OptionValues } from 'commander';
import { appendFileSync } from 'fs';
import { PathDiscovery } from '../services/path-discovery.js';
import { getStorageProvider, needsMigration } from '../shared/storage.js';
/**
* Generates a descriptive session ID from the message content
@@ -25,7 +26,7 @@ function generateSessionId(message: string): string {
}
/**
* Save command - stores a message to both Chroma collection and JSONL index
* Save command - stores a message using the configured storage provider
*/
export async function save(message: string, options: OptionValues = {}): Promise<void> {
// Debug: Log what we receive
@@ -38,38 +39,52 @@ export async function save(message: string, options: OptionValues = {}): Promise
process.exit(1);
}
const pathDiscovery = PathDiscovery.getInstance();
const timestamp = new Date().toISOString();
const projectName = PathDiscovery.getCurrentProjectName();
const sessionId = generateSessionId(message);
const documentId = `${projectName}_${sessionId}_overview`;
// 1. Save to Chroma collection (skip for now - MCP tools only available in Claude Code context)
// TODO: Add Chroma integration when called from Claude Code with MCP server running
try {
// Check if migration is needed
if (await needsMigration()) {
console.warn('⚠️ JSONL to SQLite migration recommended. Run: claude-mem migrate-index');
}
// 2. Append to JSONL index file
const indexPath = pathDiscovery.getIndexPath();
const indexEntry = {
type: "overview",
content: message,
session_id: sessionId,
project: projectName,
timestamp: timestamp
};
// Get storage provider (SQLite preferred, JSONL fallback)
const storage = await getStorageProvider();
// Ensure the directory exists
pathDiscovery.ensureDirectory(pathDiscovery.getDataDirectory());
// Append to JSONL file
appendFileSync(indexPath, JSON.stringify(indexEntry) + '\n', 'utf8');
// Ensure session exists or create it
if (!await storage.hasSession(sessionId)) {
await storage.createSession({
session_id: sessionId,
project: projectName,
created_at: timestamp,
source: 'save'
});
}
// 3. Return JSON response for hook compatibility
console.log(JSON.stringify({
success: true,
document_id: documentId,
session_id: sessionId,
project: projectName,
timestamp: timestamp,
suppressOutput: true
}));
}
// Upsert the overview
await storage.upsertOverview({
session_id: sessionId,
content: message,
created_at: timestamp,
project: projectName,
origin: 'manual'
});
// Return JSON response for hook compatibility
console.log(JSON.stringify({
success: true,
document_id: documentId,
session_id: sessionId,
project: projectName,
timestamp: timestamp,
backend: storage.backend,
suppressOutput: true
}));
} catch (error) {
console.error('Error saving message:', error);
process.exit(1);
}
}
+106 -10
View File
@@ -10,6 +10,8 @@ import { log } from '../../shared/logger.js';
import { CompressionError } from '../../shared/types.js';
import { getClaudePath } from '../../shared/settings.js';
import { ChunkManager, ChunkingOptions, ChunkMetadata } from './ChunkManager.js';
import { getStorageProvider, needsMigration } from '../../shared/storage.js';
import { SessionInput, MemoryInput, OverviewInput, DiagnosticInput } from '../../services/sqlite/types.js';
/**
* Interface for message objects in transcript
@@ -217,7 +219,7 @@ export class TranscriptCompressor {
// Check if we need to use chunked processing
const needsChunking = this.chunkManager.needsChunking(conversationText);
let summaries: string[] = [];
let summaries: any[] = [];
let overview: string | null = null;
if (needsChunking) {
@@ -277,6 +279,7 @@ export class TranscriptCompressor {
'mcp__claude-mem__chroma_delete_documents',
],
pathToClaudeCodeExecutable: getClaudePath(),
model: 'sonnet'
},
});
this.debugLog('✅ Claude SDK response received');
@@ -302,7 +305,7 @@ export class TranscriptCompressor {
this.debugLog(`📦 Archive created: ${archivePath}`);
// Write to index - same method for both chunked and non-chunked
this.appendToIndex(summaries, overview, projectPrefix, finalSessionId, messages, archivePath, timestamp);
await this.appendToIndex(summaries, overview, projectPrefix, finalSessionId, messages, archivePath, timestamp);
this.debugLog(`📥 Written ${summaries.length} summaries to index`);
log.debug(`✅ SUCCESS`);
@@ -551,10 +554,10 @@ export class TranscriptCompressor {
* Processes a transcript in chunks when it's too large for single processing
*/
private async compressInChunks(
messages: TranscriptMessage[],
messages: TranscriptMessage[],
sessionId: string,
projectPrefix: string
): Promise<{ summaries: string[]; overview: string | null }> {
): Promise<{ summaries: any[]; overview: string | null }> {
this.debugLog('📦 Large transcript detected, processing in chunks...');
// Create filtered output for chunking
@@ -571,9 +574,10 @@ export class TranscriptCompressor {
this.debugLog(this.chunkManager.getChunkingStats(chunks));
console.log(`\n📊 Processing ${chunks.length} chunks...`);
const allSummaries: string[] = [];
// Process each chunk (no longer collecting overviews from chunks)
const allSummaries: any[] = [];
const chunkOverviews: string[] = [];
// Process each chunk and collect overviews
for (let i = 0; i < chunks.length; i++) {
const chunk = chunks[i];
console.log(`\n🔄 Processing chunk ${i + 1}/${chunks.length}...`);
@@ -623,6 +627,7 @@ ${chunk.content}`;
'mcp__claude-mem__chroma_delete_documents',
],
pathToClaudeCodeExecutable: getClaudePath(),
model: 'sonnet'
},
});
@@ -685,6 +690,7 @@ Return ONLY the overview text, nothing else.`;
options: {
allowedTools: [], // No tools needed for overview generation
pathToClaudeCodeExecutable: getClaudePath(),
model: 'sonnet'
},
});
@@ -1157,10 +1163,100 @@ Return ONLY the overview text, nothing else.`;
// </Block> =======================================
/**
* Appends summaries in JSONL format to the index file
* Each line is a JSON object with type field for easy parsing
* Stores summaries using the configured storage provider (SQLite or JSONL fallback)
* Each record is stored with proper type information for easy querying
*/
private appendToIndex(summaries: any[], overview: string | null, projectPrefix: string, sessionId: string, messages: TranscriptMessage[], archivePath: string, timestamp: string): void {
private async appendToIndex(summaries: any[], overview: string | null, projectPrefix: string, sessionId: string, messages: TranscriptMessage[], archivePath: string, timestamp: string): Promise<void> {
try {
// Check if migration is needed and log warning
if (await needsMigration()) {
this.debugLog('⚠️ JSONL to SQLite migration recommended. Run: claude-mem migrate-index');
}
const storage = await getStorageProvider();
this.debugLog(`💾 Using ${storage.backend} storage backend`);
// Create or ensure session exists
const sessionInput: SessionInput = {
session_id: sessionId,
project: projectPrefix,
created_at: timestamp,
source: 'compress',
archive_path: archivePath,
archive_bytes: fs.statSync(archivePath).size,
archived_at: new Date().toISOString()
};
// Check if session already exists (for duplicate prevention)
if (!await storage.hasSession(sessionId)) {
await storage.createSession(sessionInput);
this.debugLog(`📋 Created session record: ${sessionId}`);
} else {
this.debugLog(`📋 Session already exists: ${sessionId}`);
}
// Add overview if present
if (overview) {
const overviewInput: OverviewInput = {
session_id: sessionId,
content: overview,
created_at: timestamp,
project: projectPrefix,
origin: 'claude'
};
await storage.upsertOverview(overviewInput);
this.debugLog(`📝 Stored overview for session: ${sessionId}`);
}
// If no summaries from Claude, write diagnostic info
if (!summaries || summaries.length === 0) {
log.debug('📝 No summaries extracted from JSON response');
const diagnosticInput: DiagnosticInput = {
session_id: sessionId,
message: "NO SUMMARIES EXTRACTED - Check logs for valid JSON response",
severity: 'warn',
created_at: timestamp,
project: projectPrefix,
origin: 'compressor'
};
await storage.createDiagnostic(diagnosticInput);
this.debugLog(`⚠️ No summaries for session ${sessionId} - Check if Claude returned valid JSON in <JSONResponse> tags`);
} else {
// Prepare memory records for bulk insertion
const memoryInputs: MemoryInput[] = summaries.map((summary) => ({
session_id: sessionId,
text: summary.text || '',
document_id: summary.document_id,
keywords: summary.keywords,
created_at: summary.timestamp || timestamp,
project: projectPrefix,
archive_basename: path.basename(archivePath),
origin: 'transcript'
}));
// Store memories using bulk operation if available, otherwise one by one
await storage.createMemories(memoryInputs);
log.debug(`📝 Stored ${summaries.length} summaries using ${storage.backend}`);
this.debugLog(`💾 Stored ${summaries.length} memories for session: ${sessionId}`);
}
} catch (error) {
// If storage fails, fall back to JSONL as emergency backup
this.debugLog(`❌ Storage failed, falling back to JSONL: ${error}`);
log.warn('Storage provider failed, falling back to JSONL', error);
// Emergency JSONL fallback
this.appendToIndexJSONL(summaries, overview, projectPrefix, sessionId, messages, archivePath, timestamp);
}
}
/**
* Emergency fallback method using original JSONL approach
*/
private appendToIndexJSONL(summaries: any[], overview: string | null, projectPrefix: string, sessionId: string, messages: TranscriptMessage[], archivePath: string, timestamp: string): void {
// Use PathResolver's getIndexPath() for consistency
const indexPath = this.paths.getIndexPath();
const indexDir = this.paths.getConfigDir();
+179
View File
@@ -0,0 +1,179 @@
import Database from 'better-sqlite3';
import path from 'path';
import fs from 'fs';
import { PathDiscovery } from '../path-discovery.js';
export interface Migration {
version: number;
up: (db: Database.Database) => void;
down?: (db: Database.Database) => void;
}
let dbInstance: Database.Database | null = null;
/**
* SQLite Database singleton with migration support and optimized settings
*/
export class DatabaseManager {
private static instance: DatabaseManager;
private db: Database.Database | null = null;
private migrations: Migration[] = [];
static getInstance(): DatabaseManager {
if (!DatabaseManager.instance) {
DatabaseManager.instance = new DatabaseManager();
}
return DatabaseManager.instance;
}
/**
* Register a migration to be run during initialization
*/
registerMigration(migration: Migration): void {
this.migrations.push(migration);
// Keep migrations sorted by version
this.migrations.sort((a, b) => a.version - b.version);
}
/**
* Initialize database connection with optimized settings
*/
async initialize(): Promise<Database.Database> {
if (this.db) {
return this.db;
}
// Ensure the data directory exists
const dataDir = PathDiscovery.getInstance().getDataDirectory();
fs.mkdirSync(dataDir, { recursive: true });
const dbPath = path.join(dataDir, 'claude-mem.db');
this.db = new Database(dbPath);
// Apply optimized SQLite settings
this.db.pragma('journal_mode = WAL');
this.db.pragma('synchronous = NORMAL');
this.db.pragma('foreign_keys = ON');
this.db.pragma('temp_store = memory');
this.db.pragma('mmap_size = 268435456'); // 256MB
this.db.pragma('cache_size = 10000');
// Initialize schema_versions table
this.initializeSchemaVersions();
// Run migrations
await this.runMigrations();
dbInstance = this.db;
return this.db;
}
/**
* Get the current database connection
*/
getConnection(): Database.Database {
if (!this.db) {
throw new Error('Database not initialized. Call initialize() first.');
}
return this.db;
}
/**
* Execute a function within a transaction
*/
withTransaction<T>(fn: (db: Database.Database) => T): T {
const db = this.getConnection();
const transaction = db.transaction(fn);
return transaction(db);
}
/**
* Close the database connection
*/
close(): void {
if (this.db) {
this.db.close();
this.db = null;
dbInstance = null;
}
}
/**
* Initialize the schema_versions table
*/
private initializeSchemaVersions(): void {
if (!this.db) return;
this.db.exec(`
CREATE TABLE IF NOT EXISTS schema_versions (
id INTEGER PRIMARY KEY,
version INTEGER UNIQUE NOT NULL,
applied_at TEXT NOT NULL
)
`);
}
/**
* Run all pending migrations
*/
private async runMigrations(): Promise<void> {
if (!this.db) return;
const appliedVersions = this.db
.prepare('SELECT version FROM schema_versions ORDER BY version')
.all()
.map((row: any) => row.version);
const maxApplied = appliedVersions.length > 0 ? Math.max(...appliedVersions) : 0;
for (const migration of this.migrations) {
if (migration.version > maxApplied) {
console.log(`Applying migration ${migration.version}...`);
const transaction = this.db.transaction(() => {
migration.up(this.db!);
this.db!
.prepare('INSERT INTO schema_versions (version, applied_at) VALUES (?, ?)')
.run(migration.version, new Date().toISOString());
});
transaction();
console.log(`Migration ${migration.version} applied successfully`);
}
}
}
/**
* Get current schema version
*/
getCurrentVersion(): number {
if (!this.db) return 0;
const result = this.db
.prepare('SELECT MAX(version) as version FROM schema_versions')
.get() as { version: number } | undefined;
return result?.version || 0;
}
}
/**
* Get the global database instance (for compatibility)
*/
export function getDatabase(): Database.Database {
if (!dbInstance) {
throw new Error('Database not initialized. Call DatabaseManager.getInstance().initialize() first.');
}
return dbInstance;
}
/**
* Initialize and get database manager
*/
export async function initializeDatabase(): Promise<Database.Database> {
const manager = DatabaseManager.getInstance();
return await manager.initialize();
}
export { Database };
+229
View File
@@ -0,0 +1,229 @@
import { Database } from 'better-sqlite3';
import { getDatabase } from './Database.js';
import { DiagnosticRow, DiagnosticInput, normalizeTimestamp } from './types.js';
/**
* Data Access Object for diagnostic records
*/
export class DiagnosticsStore {
private db: Database.Database;
constructor(db?: Database.Database) {
this.db = db || getDatabase();
}
/**
* Create a new diagnostic record
*/
create(input: DiagnosticInput): DiagnosticRow {
const { isoString, epoch } = normalizeTimestamp(input.created_at);
const stmt = this.db.prepare(`
INSERT INTO diagnostics (
session_id, message, severity, created_at, created_at_epoch, project, origin
) VALUES (?, ?, ?, ?, ?, ?, ?)
`);
const info = stmt.run(
input.session_id || null,
input.message,
input.severity || 'warn',
isoString,
epoch,
input.project,
input.origin || 'compressor'
);
return this.getById(info.lastInsertRowid as number)!;
}
/**
* Get diagnostic by primary key
*/
getById(id: number): DiagnosticRow | null {
const stmt = this.db.prepare('SELECT * FROM diagnostics WHERE id = ?');
return stmt.get(id) as DiagnosticRow || null;
}
/**
* Get diagnostics for a specific session
*/
getBySessionId(sessionId: string): DiagnosticRow[] {
const stmt = this.db.prepare(`
SELECT * FROM diagnostics
WHERE session_id = ?
ORDER BY created_at_epoch DESC
`);
return stmt.all(sessionId) as DiagnosticRow[];
}
/**
* Get recent diagnostics for a project
*/
getRecentForProject(project: string, limit = 10): DiagnosticRow[] {
const stmt = this.db.prepare(`
SELECT * FROM diagnostics
WHERE project = ?
ORDER BY created_at_epoch DESC
LIMIT ?
`);
return stmt.all(project, limit) as DiagnosticRow[];
}
/**
* Get recent diagnostics across all projects
*/
getRecent(limit = 10): DiagnosticRow[] {
const stmt = this.db.prepare(`
SELECT * FROM diagnostics
ORDER BY created_at_epoch DESC
LIMIT ?
`);
return stmt.all(limit) as DiagnosticRow[];
}
/**
* Get diagnostics by severity level
*/
getBySeverity(severity: 'info' | 'warn' | 'error', limit?: number): DiagnosticRow[] {
const query = limit
? 'SELECT * FROM diagnostics WHERE severity = ? ORDER BY created_at_epoch DESC LIMIT ?'
: 'SELECT * FROM diagnostics WHERE severity = ? ORDER BY created_at_epoch DESC';
const stmt = this.db.prepare(query);
const params = limit ? [severity, limit] : [severity];
return stmt.all(...params) as DiagnosticRow[];
}
/**
* Get diagnostics by origin
*/
getByOrigin(origin: string, limit?: number): DiagnosticRow[] {
const query = limit
? 'SELECT * FROM diagnostics WHERE origin = ? ORDER BY created_at_epoch DESC LIMIT ?'
: 'SELECT * FROM diagnostics WHERE origin = ? ORDER BY created_at_epoch DESC';
const stmt = this.db.prepare(query);
const params = limit ? [origin, limit] : [origin];
return stmt.all(...params) as DiagnosticRow[];
}
/**
* Search diagnostics by message content
*/
searchByMessage(query: string, project?: string, limit = 20): DiagnosticRow[] {
let sql = 'SELECT * FROM diagnostics WHERE message LIKE ?';
const params: any[] = [`%${query}%`];
if (project) {
sql += ' AND project = ?';
params.push(project);
}
sql += ' ORDER BY created_at_epoch DESC LIMIT ?';
params.push(limit);
const stmt = this.db.prepare(sql);
return stmt.all(...params) as DiagnosticRow[];
}
/**
* Count total diagnostics
*/
count(): number {
const stmt = this.db.prepare('SELECT COUNT(*) as count FROM diagnostics');
const result = stmt.get() as { count: number };
return result.count;
}
/**
* Count diagnostics by project
*/
countByProject(project: string): number {
const stmt = this.db.prepare('SELECT COUNT(*) as count FROM diagnostics WHERE project = ?');
const result = stmt.get(project) as { count: number };
return result.count;
}
/**
* Count diagnostics by severity
*/
countBySeverity(severity: 'info' | 'warn' | 'error'): number {
const stmt = this.db.prepare('SELECT COUNT(*) as count FROM diagnostics WHERE severity = ?');
const result = stmt.get(severity) as { count: number };
return result.count;
}
/**
* Update a diagnostic record
*/
update(id: number, input: Partial<DiagnosticInput>): DiagnosticRow {
const existing = this.getById(id);
if (!existing) {
throw new Error(`Diagnostic with id ${id} not found`);
}
const { isoString, epoch } = normalizeTimestamp(input.created_at || existing.created_at);
const stmt = this.db.prepare(`
UPDATE diagnostics SET
message = ?, severity = ?, created_at = ?, created_at_epoch = ?, project = ?, origin = ?
WHERE id = ?
`);
stmt.run(
input.message || existing.message,
input.severity || existing.severity,
isoString,
epoch,
input.project || existing.project,
input.origin || existing.origin,
id
);
return this.getById(id)!;
}
/**
* Delete a diagnostic by ID
*/
deleteById(id: number): boolean {
const stmt = this.db.prepare('DELETE FROM diagnostics WHERE id = ?');
const info = stmt.run(id);
return info.changes > 0;
}
/**
* Delete diagnostics by session_id
*/
deleteBySessionId(sessionId: string): number {
const stmt = this.db.prepare('DELETE FROM diagnostics WHERE session_id = ?');
const info = stmt.run(sessionId);
return info.changes;
}
/**
* Get unique projects from diagnostics
*/
getUniqueProjects(): string[] {
const stmt = this.db.prepare('SELECT DISTINCT project FROM diagnostics ORDER BY project');
const rows = stmt.all() as { project: string }[];
return rows.map(row => row.project);
}
/**
* Get diagnostic summary stats
*/
getStats(): { total: number; info: number; warn: number; error: number } {
const stmt = this.db.prepare(`
SELECT
COUNT(*) as total,
COUNT(CASE WHEN severity = 'info' THEN 1 END) as info,
COUNT(CASE WHEN severity = 'warn' THEN 1 END) as warn,
COUNT(CASE WHEN severity = 'error' THEN 1 END) as error
FROM diagnostics
`);
return stmt.get() as { total: number; info: number; warn: number; error: number };
}
}
+247
View File
@@ -0,0 +1,247 @@
import { Database } from 'better-sqlite3';
import { getDatabase } from './Database.js';
import { MemoryRow, MemoryInput, normalizeTimestamp } from './types.js';
/**
* Data Access Object for memory records
*/
export class MemoryStore {
private db: Database.Database;
constructor(db?: Database.Database) {
this.db = db || getDatabase();
}
/**
* Create a new memory record
*/
create(input: MemoryInput): MemoryRow {
const { isoString, epoch } = normalizeTimestamp(input.created_at);
const stmt = this.db.prepare(`
INSERT INTO memories (
session_id, text, document_id, keywords, created_at, created_at_epoch,
project, archive_basename, origin
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
`);
const info = stmt.run(
input.session_id,
input.text,
input.document_id || null,
input.keywords || null,
isoString,
epoch,
input.project,
input.archive_basename || null,
input.origin || 'transcript'
);
return this.getById(info.lastInsertRowid as number)!;
}
/**
* Create multiple memory records in a transaction
*/
createMany(inputs: MemoryInput[]): MemoryRow[] {
const transaction = this.db.transaction((memories: MemoryInput[]) => {
const results: MemoryRow[] = [];
for (const memory of memories) {
results.push(this.create(memory));
}
return results;
});
return transaction(inputs);
}
/**
* Get memory by primary key
*/
getById(id: number): MemoryRow | null {
const stmt = this.db.prepare('SELECT * FROM memories WHERE id = ?');
return stmt.get(id) as MemoryRow || null;
}
/**
* Get memory by document_id
*/
getByDocumentId(documentId: string): MemoryRow | null {
const stmt = this.db.prepare('SELECT * FROM memories WHERE document_id = ?');
return stmt.get(documentId) as MemoryRow || null;
}
/**
* Check if a document_id already exists
*/
hasDocumentId(documentId: string): boolean {
const stmt = this.db.prepare('SELECT 1 FROM memories WHERE document_id = ? LIMIT 1');
return Boolean(stmt.get(documentId));
}
/**
* Get memories for a specific session
*/
getBySessionId(sessionId: string): MemoryRow[] {
const stmt = this.db.prepare(`
SELECT * FROM memories
WHERE session_id = ?
ORDER BY created_at_epoch DESC
`);
return stmt.all(sessionId) as MemoryRow[];
}
/**
* Get recent memories for a project
*/
getRecentForProject(project: string, limit = 10): MemoryRow[] {
const stmt = this.db.prepare(`
SELECT * FROM memories
WHERE project = ?
ORDER BY created_at_epoch DESC
LIMIT ?
`);
return stmt.all(project, limit) as MemoryRow[];
}
/**
* Get recent memories across all projects
*/
getRecent(limit = 10): MemoryRow[] {
const stmt = this.db.prepare(`
SELECT * FROM memories
ORDER BY created_at_epoch DESC
LIMIT ?
`);
return stmt.all(limit) as MemoryRow[];
}
/**
* Search memories by text content
*/
searchByText(query: string, project?: string, limit = 20): MemoryRow[] {
let sql = 'SELECT * FROM memories WHERE text LIKE ?';
const params: any[] = [`%${query}%`];
if (project) {
sql += ' AND project = ?';
params.push(project);
}
sql += ' ORDER BY created_at_epoch DESC LIMIT ?';
params.push(limit);
const stmt = this.db.prepare(sql);
return stmt.all(...params) as MemoryRow[];
}
/**
* Search memories by keywords
*/
searchByKeywords(keywords: string, project?: string, limit = 20): MemoryRow[] {
let sql = 'SELECT * FROM memories WHERE keywords LIKE ?';
const params: any[] = [`%${keywords}%`];
if (project) {
sql += ' AND project = ?';
params.push(project);
}
sql += ' ORDER BY created_at_epoch DESC LIMIT ?';
params.push(limit);
const stmt = this.db.prepare(sql);
return stmt.all(...params) as MemoryRow[];
}
/**
* Get memories by origin type
*/
getByOrigin(origin: string, limit?: number): MemoryRow[] {
const query = limit
? 'SELECT * FROM memories WHERE origin = ? ORDER BY created_at_epoch DESC LIMIT ?'
: 'SELECT * FROM memories WHERE origin = ? ORDER BY created_at_epoch DESC';
const stmt = this.db.prepare(query);
const params = limit ? [origin, limit] : [origin];
return stmt.all(...params) as MemoryRow[];
}
/**
* Count total memories
*/
count(): number {
const stmt = this.db.prepare('SELECT COUNT(*) as count FROM memories');
const result = stmt.get() as { count: number };
return result.count;
}
/**
* Count memories by project
*/
countByProject(project: string): number {
const stmt = this.db.prepare('SELECT COUNT(*) as count FROM memories WHERE project = ?');
const result = stmt.get(project) as { count: number };
return result.count;
}
/**
* Update a memory record
*/
update(id: number, input: Partial<MemoryInput>): MemoryRow {
const existing = this.getById(id);
if (!existing) {
throw new Error(`Memory with id ${id} not found`);
}
const { isoString, epoch } = normalizeTimestamp(input.created_at || existing.created_at);
const stmt = this.db.prepare(`
UPDATE memories SET
text = ?, document_id = ?, keywords = ?, created_at = ?, created_at_epoch = ?,
project = ?, archive_basename = ?, origin = ?
WHERE id = ?
`);
stmt.run(
input.text || existing.text,
input.document_id !== undefined ? input.document_id : existing.document_id,
input.keywords !== undefined ? input.keywords : existing.keywords,
isoString,
epoch,
input.project || existing.project,
input.archive_basename !== undefined ? input.archive_basename : existing.archive_basename,
input.origin || existing.origin,
id
);
return this.getById(id)!;
}
/**
* Delete a memory by ID
*/
deleteById(id: number): boolean {
const stmt = this.db.prepare('DELETE FROM memories WHERE id = ?');
const info = stmt.run(id);
return info.changes > 0;
}
/**
* Delete memories by session_id
*/
deleteBySessionId(sessionId: string): number {
const stmt = this.db.prepare('DELETE FROM memories WHERE session_id = ?');
const info = stmt.run(sessionId);
return info.changes;
}
/**
* Get unique projects from memories
*/
getUniqueProjects(): string[] {
const stmt = this.db.prepare('SELECT DISTINCT project FROM memories ORDER BY project');
const rows = stmt.all() as { project: string }[];
return rows.map(row => row.project);
}
}
+196
View File
@@ -0,0 +1,196 @@
import { Database } from 'better-sqlite3';
import { getDatabase } from './Database.js';
import { OverviewRow, OverviewInput, normalizeTimestamp } from './types.js';
/**
* Data Access Object for overview records
*/
export class OverviewStore {
private db: Database.Database;
constructor(db?: Database.Database) {
this.db = db || getDatabase();
}
/**
* Create a new overview record
*/
create(input: OverviewInput): OverviewRow {
const { isoString, epoch } = normalizeTimestamp(input.created_at);
const stmt = this.db.prepare(`
INSERT INTO overviews (
session_id, content, created_at, created_at_epoch, project, origin
) VALUES (?, ?, ?, ?, ?, ?)
`);
const info = stmt.run(
input.session_id,
input.content,
isoString,
epoch,
input.project,
input.origin || 'claude'
);
return this.getById(info.lastInsertRowid as number)!;
}
/**
* Create or replace an overview for a session (since one session should have one overview)
*/
upsert(input: OverviewInput): OverviewRow {
const existing = this.getBySessionId(input.session_id);
if (existing) {
return this.update(existing.id, input);
}
return this.create(input);
}
/**
* Get overview by primary key
*/
getById(id: number): OverviewRow | null {
const stmt = this.db.prepare('SELECT * FROM overviews WHERE id = ?');
return stmt.get(id) as OverviewRow || null;
}
/**
* Get overview by session_id
*/
getBySessionId(sessionId: string): OverviewRow | null {
const stmt = this.db.prepare('SELECT * FROM overviews WHERE session_id = ?');
return stmt.get(sessionId) as OverviewRow || null;
}
/**
* Get recent overviews for a project
*/
getRecentForProject(project: string, limit = 5): OverviewRow[] {
const stmt = this.db.prepare(`
SELECT * FROM overviews
WHERE project = ?
ORDER BY created_at_epoch DESC
LIMIT ?
`);
return stmt.all(project, limit) as OverviewRow[];
}
/**
* Get recent overviews across all projects
*/
getRecent(limit = 5): OverviewRow[] {
const stmt = this.db.prepare(`
SELECT * FROM overviews
ORDER BY created_at_epoch DESC
LIMIT ?
`);
return stmt.all(limit) as OverviewRow[];
}
/**
* Search overviews by content
*/
searchByContent(query: string, project?: string, limit = 10): OverviewRow[] {
let sql = 'SELECT * FROM overviews WHERE content LIKE ?';
const params: any[] = [`%${query}%`];
if (project) {
sql += ' AND project = ?';
params.push(project);
}
sql += ' ORDER BY created_at_epoch DESC LIMIT ?';
params.push(limit);
const stmt = this.db.prepare(sql);
return stmt.all(...params) as OverviewRow[];
}
/**
* Get overviews by origin type
*/
getByOrigin(origin: string, limit?: number): OverviewRow[] {
const query = limit
? 'SELECT * FROM overviews WHERE origin = ? ORDER BY created_at_epoch DESC LIMIT ?'
: 'SELECT * FROM overviews WHERE origin = ? ORDER BY created_at_epoch DESC';
const stmt = this.db.prepare(query);
const params = limit ? [origin, limit] : [origin];
return stmt.all(...params) as OverviewRow[];
}
/**
* Count total overviews
*/
count(): number {
const stmt = this.db.prepare('SELECT COUNT(*) as count FROM overviews');
const result = stmt.get() as { count: number };
return result.count;
}
/**
* Count overviews by project
*/
countByProject(project: string): number {
const stmt = this.db.prepare('SELECT COUNT(*) as count FROM overviews WHERE project = ?');
const result = stmt.get(project) as { count: number };
return result.count;
}
/**
* Update an overview record
*/
update(id: number, input: Partial<OverviewInput>): OverviewRow {
const existing = this.getById(id);
if (!existing) {
throw new Error(`Overview with id ${id} not found`);
}
const { isoString, epoch } = normalizeTimestamp(input.created_at || existing.created_at);
const stmt = this.db.prepare(`
UPDATE overviews SET
content = ?, created_at = ?, created_at_epoch = ?, project = ?, origin = ?
WHERE id = ?
`);
stmt.run(
input.content || existing.content,
isoString,
epoch,
input.project || existing.project,
input.origin || existing.origin,
id
);
return this.getById(id)!;
}
/**
* Delete an overview by ID
*/
deleteById(id: number): boolean {
const stmt = this.db.prepare('DELETE FROM overviews WHERE id = ?');
const info = stmt.run(id);
return info.changes > 0;
}
/**
* Delete overview by session_id
*/
deleteBySessionId(sessionId: string): boolean {
const stmt = this.db.prepare('DELETE FROM overviews WHERE session_id = ?');
const info = stmt.run(sessionId);
return info.changes > 0;
}
/**
* Get unique projects from overviews
*/
getUniqueProjects(): string[] {
const stmt = this.db.prepare('SELECT DISTINCT project FROM overviews ORDER BY project');
const rows = stmt.all() as { project: string }[];
return rows.map(row => row.project);
}
}
+195
View File
@@ -0,0 +1,195 @@
import { Database } from 'better-sqlite3';
import { getDatabase } from './Database.js';
import { SessionRow, SessionInput, normalizeTimestamp } from './types.js';
/**
* Data Access Object for session records
*/
export class SessionStore {
private db: Database.Database;
constructor(db?: Database.Database) {
this.db = db || getDatabase();
}
/**
* Create a new session record
*/
create(input: SessionInput): SessionRow {
const { isoString, epoch } = normalizeTimestamp(input.created_at);
const stmt = this.db.prepare(`
INSERT INTO sessions (
session_id, project, created_at, created_at_epoch, source,
archive_path, archive_bytes, archive_checksum, archived_at, metadata_json
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`);
const info = stmt.run(
input.session_id,
input.project,
isoString,
epoch,
input.source || 'compress',
input.archive_path || null,
input.archive_bytes || null,
input.archive_checksum || null,
input.archived_at || null,
input.metadata_json || null
);
return this.getById(info.lastInsertRowid as number)!;
}
/**
* Upsert a session record (insert or update if session_id exists)
*/
upsert(input: SessionInput): SessionRow {
const existing = this.getBySessionId(input.session_id);
if (existing) {
return this.update(existing.id, input);
}
return this.create(input);
}
/**
* Update an existing session record
*/
update(id: number, input: Partial<SessionInput>): SessionRow {
const existing = this.getById(id);
if (!existing) {
throw new Error(`Session with id ${id} not found`);
}
const { isoString, epoch } = normalizeTimestamp(input.created_at || existing.created_at);
const stmt = this.db.prepare(`
UPDATE sessions SET
project = ?, created_at = ?, created_at_epoch = ?, source = ?,
archive_path = ?, archive_bytes = ?, archive_checksum = ?, archived_at = ?, metadata_json = ?
WHERE id = ?
`);
stmt.run(
input.project || existing.project,
isoString,
epoch,
input.source || existing.source,
input.archive_path !== undefined ? input.archive_path : existing.archive_path,
input.archive_bytes !== undefined ? input.archive_bytes : existing.archive_bytes,
input.archive_checksum !== undefined ? input.archive_checksum : existing.archive_checksum,
input.archived_at !== undefined ? input.archived_at : existing.archived_at,
input.metadata_json !== undefined ? input.metadata_json : existing.metadata_json,
id
);
return this.getById(id)!;
}
/**
* Get session by primary key
*/
getById(id: number): SessionRow | null {
const stmt = this.db.prepare('SELECT * FROM sessions WHERE id = ?');
return stmt.get(id) as SessionRow || null;
}
/**
* Get session by session_id
*/
getBySessionId(sessionId: string): SessionRow | null {
const stmt = this.db.prepare('SELECT * FROM sessions WHERE session_id = ?');
return stmt.get(sessionId) as SessionRow || null;
}
/**
* Check if a session exists by session_id
*/
has(sessionId: string): boolean {
const stmt = this.db.prepare('SELECT 1 FROM sessions WHERE session_id = ? LIMIT 1');
return Boolean(stmt.get(sessionId));
}
/**
* Get all session_ids as a Set (useful for import-history)
*/
getAllSessionIds(): Set<string> {
const stmt = this.db.prepare('SELECT session_id FROM sessions');
const rows = stmt.all() as { session_id: string }[];
return new Set(rows.map(row => row.session_id));
}
/**
* Get recent sessions for a project
*/
getRecentForProject(project: string, limit = 5): SessionRow[] {
const stmt = this.db.prepare(`
SELECT * FROM sessions
WHERE project = ?
ORDER BY created_at_epoch DESC
LIMIT ?
`);
return stmt.all(project, limit) as SessionRow[];
}
/**
* Get recent sessions across all projects
*/
getRecent(limit = 5): SessionRow[] {
const stmt = this.db.prepare(`
SELECT * FROM sessions
ORDER BY created_at_epoch DESC
LIMIT ?
`);
return stmt.all(limit) as SessionRow[];
}
/**
* Get sessions by source type
*/
getBySource(source: 'compress' | 'save' | 'legacy-jsonl', limit?: number): SessionRow[] {
const query = limit
? 'SELECT * FROM sessions WHERE source = ? ORDER BY created_at_epoch DESC LIMIT ?'
: 'SELECT * FROM sessions WHERE source = ? ORDER BY created_at_epoch DESC';
const stmt = this.db.prepare(query);
const params = limit ? [source, limit] : [source];
return stmt.all(...params) as SessionRow[];
}
/**
* Count total sessions
*/
count(): number {
const stmt = this.db.prepare('SELECT COUNT(*) as count FROM sessions');
const result = stmt.get() as { count: number };
return result.count;
}
/**
* Count sessions by project
*/
countByProject(project: string): number {
const stmt = this.db.prepare('SELECT COUNT(*) as count FROM sessions WHERE project = ?');
const result = stmt.get(project) as { count: number };
return result.count;
}
/**
* Delete a session by ID (cascades to related records)
*/
deleteById(id: number): boolean {
const stmt = this.db.prepare('DELETE FROM sessions WHERE id = ?');
const info = stmt.run(id);
return info.changes > 0;
}
/**
* Delete a session by session_id (cascades to related records)
*/
deleteBySessionId(sessionId: string): boolean {
const stmt = this.db.prepare('DELETE FROM sessions WHERE session_id = ?');
const info = stmt.run(sessionId);
return info.changes > 0;
}
}
+32
View File
@@ -0,0 +1,32 @@
// Import migrations to register them
import './migrations/index.js';
// Export main components
export { DatabaseManager, getDatabase, initializeDatabase } from './Database.js';
// Export store classes
export { SessionStore } from './SessionStore.js';
export { MemoryStore } from './MemoryStore.js';
export { OverviewStore } from './OverviewStore.js';
export { DiagnosticsStore } from './DiagnosticsStore.js';
// Export types
export * from './types.js';
// Convenience function to get all stores
export async function createStores() {
const { DatabaseManager } = await import('./Database.js');
const db = await DatabaseManager.getInstance().initialize();
const { SessionStore } = await import('./SessionStore.js');
const { MemoryStore } = await import('./MemoryStore.js');
const { OverviewStore } = await import('./OverviewStore.js');
const { DiagnosticsStore } = await import('./DiagnosticsStore.js');
return {
sessions: new SessionStore(db),
memories: new MemoryStore(db),
overviews: new OverviewStore(db),
diagnostics: new DiagnosticsStore(db)
};
}
@@ -0,0 +1,133 @@
import { Migration } from '../Database.js';
/**
* Initial migration: Create all core tables for claude-mem SQLite index
*/
export const migration001: Migration = {
version: 1,
up: (db) => {
// Create sessions table
db.exec(`
CREATE TABLE sessions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
session_id TEXT UNIQUE NOT NULL,
project TEXT NOT NULL,
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
source TEXT DEFAULT 'compress',
archive_path TEXT,
archive_bytes INTEGER,
archive_checksum TEXT,
archived_at TEXT,
metadata_json TEXT
)
`);
// Create indexes for sessions
db.exec(`
CREATE INDEX sessions_project_created_at ON sessions (project, created_at_epoch DESC)
`);
db.exec(`
CREATE INDEX sessions_source_created ON sessions (source, created_at_epoch DESC)
`);
// Create overviews table
db.exec(`
CREATE TABLE overviews (
id INTEGER PRIMARY KEY AUTOINCREMENT,
session_id TEXT NOT NULL REFERENCES sessions(session_id) ON DELETE CASCADE,
content TEXT NOT NULL,
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
project TEXT NOT NULL,
origin TEXT DEFAULT 'claude'
)
`);
// Create index for overviews
db.exec(`
CREATE INDEX overviews_project_created_at ON overviews (project, created_at_epoch DESC)
`);
// Create memories table
db.exec(`
CREATE TABLE memories (
id INTEGER PRIMARY KEY AUTOINCREMENT,
session_id TEXT NOT NULL REFERENCES sessions(session_id) ON DELETE CASCADE,
text TEXT NOT NULL,
document_id TEXT,
keywords TEXT,
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
project TEXT NOT NULL,
archive_basename TEXT,
origin TEXT DEFAULT 'transcript'
)
`);
// Create indexes for memories
db.exec(`
CREATE INDEX memories_project_created_at ON memories (project, created_at_epoch DESC)
`);
db.exec(`
CREATE UNIQUE INDEX memories_document_id_unique ON memories (document_id) WHERE document_id IS NOT NULL
`);
// Create diagnostics table
db.exec(`
CREATE TABLE diagnostics (
id INTEGER PRIMARY KEY AUTOINCREMENT,
session_id TEXT REFERENCES sessions(session_id) ON DELETE SET NULL,
message TEXT NOT NULL,
severity TEXT DEFAULT 'warn',
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
project TEXT NOT NULL,
origin TEXT DEFAULT 'compressor'
)
`);
// Create index for diagnostics
db.exec(`
CREATE INDEX diagnostics_project_created_at ON diagnostics (project, created_at_epoch DESC)
`);
// Create archives table (for future archival workflows)
db.exec(`
CREATE TABLE archives (
id INTEGER PRIMARY KEY AUTOINCREMENT,
session_id TEXT UNIQUE NOT NULL REFERENCES sessions(session_id) ON DELETE CASCADE,
path TEXT NOT NULL,
bytes INTEGER,
checksum TEXT,
stored_at TEXT NOT NULL,
storage_status TEXT DEFAULT 'active'
)
`);
// Create titles table (ready for conversation-titles.jsonl migration)
db.exec(`
CREATE TABLE titles (
id INTEGER PRIMARY KEY AUTOINCREMENT,
session_id TEXT UNIQUE NOT NULL REFERENCES sessions(session_id) ON DELETE CASCADE,
title TEXT NOT NULL,
created_at TEXT NOT NULL,
project TEXT NOT NULL
)
`);
console.log('✅ Created initial database schema with all tables and indexes');
},
down: (db) => {
// Drop tables in reverse order to respect foreign key constraints
const tables = ['titles', 'archives', 'diagnostics', 'memories', 'overviews', 'sessions'];
for (const table of tables) {
db.exec(`DROP TABLE IF EXISTS ${table}`);
}
console.log('🗑️ Dropped all tables from initial migration');
}
};
+17
View File
@@ -0,0 +1,17 @@
import { DatabaseManager } from '../Database.js';
import { migration001 } from './001_initial.js';
/**
* Register all migrations with the database manager
*/
export function registerMigrations(): void {
const manager = DatabaseManager.getInstance();
// Register migrations in order
manager.registerMigration(migration001);
console.log('📋 Registered all database migrations');
}
// Auto-register migrations when this module is imported
registerMigrations();
+152
View File
@@ -0,0 +1,152 @@
/**
* Database entity types for SQLite storage
*/
export interface SessionRow {
id: number;
session_id: string;
project: string;
created_at: string;
created_at_epoch: number;
source: 'compress' | 'save' | 'legacy-jsonl';
archive_path?: string;
archive_bytes?: number;
archive_checksum?: string;
archived_at?: string;
metadata_json?: string;
}
export interface OverviewRow {
id: number;
session_id: string;
content: string;
created_at: string;
created_at_epoch: number;
project: string;
origin: string;
}
export interface MemoryRow {
id: number;
session_id: string;
text: string;
document_id?: string;
keywords?: string;
created_at: string;
created_at_epoch: number;
project: string;
archive_basename?: string;
origin: string;
}
export interface DiagnosticRow {
id: number;
session_id?: string;
message: string;
severity: 'info' | 'warn' | 'error';
created_at: string;
created_at_epoch: number;
project: string;
origin: string;
}
export interface ArchiveRow {
id: number;
session_id: string;
path: string;
bytes?: number;
checksum?: string;
stored_at: string;
storage_status: 'active' | 'archived' | 'deleted';
}
export interface TitleRow {
id: number;
session_id: string;
title: string;
created_at: string;
project: string;
}
/**
* Input types for creating new records (without id and auto-generated fields)
*/
export interface SessionInput {
session_id: string;
project: string;
created_at: string;
source?: 'compress' | 'save' | 'legacy-jsonl';
archive_path?: string;
archive_bytes?: number;
archive_checksum?: string;
archived_at?: string;
metadata_json?: string;
}
export interface OverviewInput {
session_id: string;
content: string;
created_at: string;
project: string;
origin?: string;
}
export interface MemoryInput {
session_id: string;
text: string;
document_id?: string;
keywords?: string;
created_at: string;
project: string;
archive_basename?: string;
origin?: string;
}
export interface DiagnosticInput {
session_id?: string;
message: string;
severity?: 'info' | 'warn' | 'error';
created_at: string;
project: string;
origin?: string;
}
/**
* Helper function to normalize timestamps from various formats
*/
export function normalizeTimestamp(timestamp: string | Date | number | undefined): { isoString: string; epoch: number } {
let date: Date;
if (!timestamp) {
date = new Date();
} else if (timestamp instanceof Date) {
date = timestamp;
} else if (typeof timestamp === 'number') {
date = new Date(timestamp);
} else if (typeof timestamp === 'string') {
// Handle empty strings
if (!timestamp.trim()) {
date = new Date();
} else {
date = new Date(timestamp);
// If invalid date, try to parse it differently
if (isNaN(date.getTime())) {
// Try common formats
const cleaned = timestamp.replace(/\s+/g, 'T').replace(/T+/g, 'T');
date = new Date(cleaned);
// Still invalid? Use current time
if (isNaN(date.getTime())) {
date = new Date();
}
}
}
} else {
date = new Date();
}
return {
isoString: date.toISOString(),
epoch: date.getTime()
};
}
+402
View File
@@ -0,0 +1,402 @@
import fs from 'fs';
import { PathDiscovery } from '../services/path-discovery.js';
import {
createStores,
SessionStore,
MemoryStore,
OverviewStore,
DiagnosticsStore,
SessionInput,
MemoryInput,
OverviewInput,
DiagnosticInput,
SessionRow,
MemoryRow,
OverviewRow,
DiagnosticRow,
normalizeTimestamp
} from '../services/sqlite/index.js';
/**
* Storage backend types
*/
export type StorageBackend = 'sqlite' | 'jsonl';
/**
* Unified interface for storage operations
*/
export interface IStorageProvider {
backend: StorageBackend;
// Session operations
createSession(session: SessionInput): Promise<SessionRow | void>;
getSession(sessionId: string): Promise<SessionRow | null>;
hasSession(sessionId: string): Promise<boolean>;
getAllSessionIds(): Promise<Set<string>>;
getRecentSessions(limit?: number): Promise<SessionRow[]>;
getRecentSessionsForProject(project: string, limit?: number): Promise<SessionRow[]>;
// Memory operations
createMemory(memory: MemoryInput): Promise<MemoryRow | void>;
createMemories(memories: MemoryInput[]): Promise<void>;
getRecentMemories(limit?: number): Promise<MemoryRow[]>;
getRecentMemoriesForProject(project: string, limit?: number): Promise<MemoryRow[]>;
hasDocumentId(documentId: string): Promise<boolean>;
// Overview operations
createOverview(overview: OverviewInput): Promise<OverviewRow | void>;
upsertOverview(overview: OverviewInput): Promise<OverviewRow | void>;
getRecentOverviews(limit?: number): Promise<OverviewRow[]>;
getRecentOverviewsForProject(project: string, limit?: number): Promise<OverviewRow[]>;
// Diagnostic operations
createDiagnostic(diagnostic: DiagnosticInput): Promise<DiagnosticRow | void>;
// Health check
isAvailable(): Promise<boolean>;
}
/**
* SQLite-based storage provider
*/
export class SQLiteStorageProvider implements IStorageProvider {
public readonly backend = 'sqlite';
private stores?: {
sessions: SessionStore;
memories: MemoryStore;
overviews: OverviewStore;
diagnostics: DiagnosticsStore;
};
private async getStores() {
if (!this.stores) {
this.stores = await createStores();
}
return this.stores;
}
async isAvailable(): Promise<boolean> {
try {
await this.getStores();
return true;
} catch (error) {
return false;
}
}
async createSession(session: SessionInput): Promise<SessionRow> {
const stores = await this.getStores();
return stores.sessions.create(session);
}
async getSession(sessionId: string): Promise<SessionRow | null> {
const stores = await this.getStores();
return stores.sessions.getBySessionId(sessionId);
}
async hasSession(sessionId: string): Promise<boolean> {
const stores = await this.getStores();
return stores.sessions.has(sessionId);
}
async getAllSessionIds(): Promise<Set<string>> {
const stores = await this.getStores();
return stores.sessions.getAllSessionIds();
}
async getRecentSessions(limit = 5): Promise<SessionRow[]> {
const stores = await this.getStores();
return stores.sessions.getRecent(limit);
}
async getRecentSessionsForProject(project: string, limit = 5): Promise<SessionRow[]> {
const stores = await this.getStores();
return stores.sessions.getRecentForProject(project, limit);
}
async createMemory(memory: MemoryInput): Promise<MemoryRow> {
const stores = await this.getStores();
return stores.memories.create(memory);
}
async createMemories(memories: MemoryInput[]): Promise<void> {
const stores = await this.getStores();
stores.memories.createMany(memories);
}
async getRecentMemories(limit = 10): Promise<MemoryRow[]> {
const stores = await this.getStores();
return stores.memories.getRecent(limit);
}
async getRecentMemoriesForProject(project: string, limit = 10): Promise<MemoryRow[]> {
const stores = await this.getStores();
return stores.memories.getRecentForProject(project, limit);
}
async hasDocumentId(documentId: string): Promise<boolean> {
const stores = await this.getStores();
return stores.memories.hasDocumentId(documentId);
}
async createOverview(overview: OverviewInput): Promise<OverviewRow> {
const stores = await this.getStores();
return stores.overviews.create(overview);
}
async upsertOverview(overview: OverviewInput): Promise<OverviewRow> {
const stores = await this.getStores();
return stores.overviews.upsert(overview);
}
async getRecentOverviews(limit = 5): Promise<OverviewRow[]> {
const stores = await this.getStores();
return stores.overviews.getRecent(limit);
}
async getRecentOverviewsForProject(project: string, limit = 5): Promise<OverviewRow[]> {
const stores = await this.getStores();
return stores.overviews.getRecentForProject(project, limit);
}
async createDiagnostic(diagnostic: DiagnosticInput): Promise<DiagnosticRow> {
const stores = await this.getStores();
return stores.diagnostics.create(diagnostic);
}
}
/**
* JSONL-based storage provider (legacy fallback)
*/
export class JSONLStorageProvider implements IStorageProvider {
public readonly backend = 'jsonl';
private pathDiscovery = PathDiscovery.getInstance();
async isAvailable(): Promise<boolean> {
try {
// Ensure data directory exists
const dataDir = this.pathDiscovery.getDataDirectory();
fs.mkdirSync(dataDir, { recursive: true });
return true;
} catch {
return false;
}
}
private appendToIndex(obj: any): void {
const indexPath = this.pathDiscovery.getIndexPath();
fs.appendFileSync(indexPath, JSON.stringify(obj) + '\\n', 'utf8');
}
async createSession(session: SessionInput): Promise<void> {
const sessionRecord = {
type: 'session',
session_id: session.session_id,
project: session.project,
timestamp: session.created_at
};
this.appendToIndex(sessionRecord);
}
async getSession(): Promise<null> {
// Not supported in JSONL mode
return null;
}
async hasSession(sessionId: string): Promise<boolean> {
const sessionIds = await this.getAllSessionIds();
return sessionIds.has(sessionId);
}
async getAllSessionIds(): Promise<Set<string>> {
const indexPath = this.pathDiscovery.getIndexPath();
if (!fs.existsSync(indexPath)) {
return new Set();
}
const content = fs.readFileSync(indexPath, 'utf-8');
const lines = content.trim().split('\\n').filter(line => line.trim());
const sessionIds = new Set<string>();
for (const line of lines) {
try {
const obj = JSON.parse(line);
if (obj.session_id) {
sessionIds.add(obj.session_id);
}
} catch {
// Skip malformed JSON
}
}
return sessionIds;
}
async getRecentSessions(): Promise<SessionRow[]> {
// Not fully supported in JSONL mode - return empty array
return [];
}
async getRecentSessionsForProject(): Promise<SessionRow[]> {
// Not fully supported in JSONL mode - return empty array
return [];
}
async createMemory(memory: MemoryInput): Promise<void> {
const memoryRecord = {
type: 'memory',
text: memory.text,
document_id: memory.document_id,
keywords: memory.keywords,
session_id: memory.session_id,
project: memory.project,
timestamp: memory.created_at,
archive: memory.archive_basename
};
this.appendToIndex(memoryRecord);
}
async createMemories(memories: MemoryInput[]): Promise<void> {
for (const memory of memories) {
await this.createMemory(memory);
}
}
async getRecentMemories(): Promise<MemoryRow[]> {
// Not fully supported in JSONL mode - return empty array
return [];
}
async getRecentMemoriesForProject(): Promise<MemoryRow[]> {
// Not fully supported in JSONL mode - return empty array
return [];
}
async hasDocumentId(documentId: string): Promise<boolean> {
const indexPath = this.pathDiscovery.getIndexPath();
if (!fs.existsSync(indexPath)) {
return false;
}
const content = fs.readFileSync(indexPath, 'utf-8');
const lines = content.trim().split('\\n').filter(line => line.trim());
for (const line of lines) {
try {
const obj = JSON.parse(line);
if (obj.type === 'memory' && obj.document_id === documentId) {
return true;
}
} catch {
// Skip malformed JSON
}
}
return false;
}
async createOverview(overview: OverviewInput): Promise<void> {
const overviewRecord = {
type: 'overview',
content: overview.content,
session_id: overview.session_id,
project: overview.project,
timestamp: overview.created_at
};
this.appendToIndex(overviewRecord);
}
async upsertOverview(overview: OverviewInput): Promise<void> {
// Just append in JSONL mode (no real upsert)
await this.createOverview(overview);
}
async getRecentOverviews(): Promise<OverviewRow[]> {
// Not fully supported in JSONL mode - return empty array
return [];
}
async getRecentOverviewsForProject(): Promise<OverviewRow[]> {
// Not fully supported in JSONL mode - return empty array
return [];
}
async createDiagnostic(diagnostic: DiagnosticInput): Promise<void> {
const diagnosticRecord = {
type: 'diagnostic',
message: diagnostic.message,
session_id: diagnostic.session_id,
project: diagnostic.project,
timestamp: diagnostic.created_at
};
this.appendToIndex(diagnosticRecord);
}
}
/**
* Storage provider factory and singleton
*/
let storageProvider: IStorageProvider | null = null;
/**
* Get the configured storage provider
*/
export async function getStorageProvider(): Promise<IStorageProvider> {
if (storageProvider) {
return storageProvider;
}
// Try SQLite first
const sqliteProvider = new SQLiteStorageProvider();
if (await sqliteProvider.isAvailable()) {
storageProvider = sqliteProvider;
return storageProvider;
}
// Fall back to JSONL
const jsonlProvider = new JSONLStorageProvider();
if (await jsonlProvider.isAvailable()) {
storageProvider = jsonlProvider;
return storageProvider;
}
throw new Error('No storage backend available');
}
/**
* Force a specific storage provider (useful for testing)
*/
export function setStorageProvider(provider: IStorageProvider): void {
storageProvider = provider;
}
/**
* Check if SQLite migration is needed
*/
export async function needsMigration(): Promise<boolean> {
const pathDiscovery = PathDiscovery.getInstance();
const indexPath = pathDiscovery.getIndexPath();
// If JSONL exists but SQLite is not available, migration is needed
if (fs.existsSync(indexPath)) {
const sqliteProvider = new SQLiteStorageProvider();
const sqliteAvailable = await sqliteProvider.isAvailable();
if (!sqliteAvailable) {
return true;
}
// Check if SQLite has data
try {
const stores = await createStores();
const sessionCount = stores.sessions.count();
return sessionCount === 0; // Needs migration if SQLite is empty
} catch {
return true;
}
}
return false;
}