Release v3.9.9

Published from npm package build
Source: https://github.com/thedotmack/claude-mem-source
This commit is contained in:
Alex Newman
2025-10-03 18:20:47 -04:00
parent 4d5b307a74
commit 85ed7c3d2f
85 changed files with 11156 additions and 7458 deletions
-364
View File
@@ -1,364 +0,0 @@
import * as p from '@clack/prompts';
import { TranscriptParser } from './transcript-parser.js';
import path from 'path';
import fs from 'fs';
/**
* Conversation item for selection UI
*/
export interface ConversationItem {
filePath: string;
sessionId: string;
timestamp: string;
messageCount: number;
gitBranch?: string;
cwd: string;
fileSize: number;
displayName: string;
projectName: string;
parsedDate: Date;
relativeDate: string;
dateGroup: string;
}
/**
* Selection result
*/
export interface SelectionResult {
selectedFiles: string[];
cancelled: boolean;
}
/**
* Interactive conversation selector service
*/
export class ConversationSelector {
private parser: TranscriptParser;
constructor() {
this.parser = new TranscriptParser();
}
/**
* Show interactive selection UI for conversations with improved flow
*/
async selectConversations(): Promise<SelectionResult> {
p.intro('🧠 Claude History Import');
const s = p.spinner();
s.start('Scanning for conversation files...');
const conversationFiles = await this.parser.scanConversationFiles();
if (conversationFiles.length === 0) {
s.stop('❌ No conversation files found');
p.outro('No conversation files found in Claude projects directory');
return { selectedFiles: [], cancelled: true };
}
// Get metadata for each file
const conversations: ConversationItem[] = [];
for (const filePath of conversationFiles) {
try {
const metadata = await this.parser.getConversationMetadata(filePath);
const projectName = this.extractProjectName(filePath);
const parsedDate = this.parseTimestamp(metadata.timestamp, filePath);
const relativeDate = this.formatRelativeDate(parsedDate);
const dateGroup = this.getDateGroup(parsedDate);
conversations.push({
filePath,
...metadata,
projectName,
parsedDate,
relativeDate,
dateGroup,
displayName: this.createDisplayName(filePath, metadata)
});
} catch (e) {
// Skip invalid files silently
}
}
if (conversations.length === 0) {
s.stop('❌ No valid conversation files found');
p.outro('No valid conversation files found');
return { selectedFiles: [], cancelled: true };
}
s.stop(`Found ${conversations.length} conversation files`);
// Sort by timestamp (newest first)
conversations.sort((a, b) => b.parsedDate.getTime() - a.parsedDate.getTime());
// If there are too many conversations, offer filtering options first
let filteredConversations = conversations;
if (conversations.length > 100) {
const filterChoice = await p.select({
message: `Found ${conversations.length} conversations. How would you like to proceed?`,
options: [
{ value: 'recent', label: 'Show recent (last 50)', hint: 'Most recent conversations' },
{ value: 'project', label: 'Filter by project', hint: 'Select specific project first' },
{ value: 'all', label: 'Show all', hint: `Display all ${conversations.length} conversations` }
]
});
if (p.isCancel(filterChoice)) {
p.cancel('Selection cancelled');
return { selectedFiles: [], cancelled: true };
}
if (filterChoice === 'recent') {
filteredConversations = conversations.slice(0, 50);
} else if (filterChoice === 'project') {
const projectNames = [...new Set(conversations.map(c => c.projectName))].sort();
const selectedProject = await p.select({
message: 'Select project:',
options: projectNames.map(project => {
const count = conversations.filter(c => c.projectName === project).length;
return {
value: project,
label: project,
hint: `${count} conversation${count === 1 ? '' : 's'}`
};
})
});
if (p.isCancel(selectedProject)) {
p.cancel('Selection cancelled');
return { selectedFiles: [], cancelled: true };
}
filteredConversations = conversations.filter(c => c.projectName === selectedProject);
}
}
// Conversation selection
const selectedConversations = await this.selectConversationsFromList(filteredConversations);
if (!selectedConversations || selectedConversations.length === 0) {
p.cancel('No conversations selected');
return { selectedFiles: [], cancelled: true };
}
// Confirmation
const confirmed = await this.confirmSelection(selectedConversations);
if (!confirmed) {
p.cancel('Import cancelled');
return { selectedFiles: [], cancelled: true };
}
p.outro(`Ready to import ${selectedConversations.length} conversations`);
return { selectedFiles: selectedConversations.map(c => c.filePath), cancelled: false };
}
/**
* Extract project name from file path
*/
private extractProjectName(filePath: string): string {
return path.basename(path.dirname(filePath));
}
/**
* Safely parse timestamp with fallback to file modification time
*/
private parseTimestamp(timestamp: string | undefined, filePath: string): Date {
// Try parsing the provided timestamp
if (timestamp) {
const date = new Date(timestamp);
if (!isNaN(date.getTime())) {
return date;
}
}
// Fallback to file modification time
try {
const stats = fs.statSync(filePath);
return stats.mtime;
} catch (e) {
// Last resort: current time
return new Date();
}
}
/**
* Format date as relative time (e.g., "2 days ago", "3 weeks ago")
*/
private formatRelativeDate(date: Date): string {
const now = new Date();
const diffMs = now.getTime() - date.getTime();
const diffMinutes = Math.floor(diffMs / (1000 * 60));
const diffHours = Math.floor(diffMs / (1000 * 60 * 60));
const diffDays = Math.floor(diffMs / (1000 * 60 * 60 * 24));
const diffWeeks = Math.floor(diffDays / 7);
const diffMonths = Math.floor(diffDays / 30);
if (diffMinutes < 1) return 'just now';
if (diffMinutes < 60) return `${diffMinutes}m ago`;
if (diffHours < 24) return `${diffHours}h ago`;
if (diffDays < 7) return `${diffDays}d ago`;
if (diffWeeks < 4) return `${diffWeeks}w ago`;
if (diffMonths < 12) return `${diffMonths}mo ago`;
const diffYears = Math.floor(diffMonths / 12);
return `${diffYears}y ago`;
}
/**
* Get date group for grouping conversations
*/
private getDateGroup(date: Date): string {
const now = new Date();
const today = new Date(now.getFullYear(), now.getMonth(), now.getDate());
const yesterday = new Date(today.getTime() - 24 * 60 * 60 * 1000);
const thisWeekStart = new Date(today.getTime() - today.getDay() * 24 * 60 * 60 * 1000);
const lastWeekStart = new Date(thisWeekStart.getTime() - 7 * 24 * 60 * 60 * 1000);
const thisMonthStart = new Date(now.getFullYear(), now.getMonth(), 1);
const conversationDate = new Date(date.getFullYear(), date.getMonth(), date.getDate());
if (conversationDate.getTime() >= today.getTime()) {
return 'Today';
} else if (conversationDate.getTime() >= yesterday.getTime()) {
return 'Yesterday';
} else if (conversationDate.getTime() >= thisWeekStart.getTime()) {
return 'This Week';
} else if (conversationDate.getTime() >= lastWeekStart.getTime()) {
return 'Last Week';
} else if (conversationDate.getTime() >= thisMonthStart.getTime()) {
return 'This Month';
} else {
return 'Older';
}
}
/**
* Create display name for conversation
*/
private createDisplayName(filePath: string, metadata: any): string {
const parsedDate = this.parseTimestamp(metadata.timestamp, filePath);
const relativeDate = this.formatRelativeDate(parsedDate);
const sizeKB = Math.round(metadata.fileSize / 1024);
const branchInfo = metadata.gitBranch ? `${metadata.gitBranch}` : '';
return `${relativeDate}${metadata.messageCount} msgs • ${sizeKB}KB${branchInfo ? `${branchInfo}` : ''}`;
}
/**
* Select specific conversations from list
*/
private async selectConversationsFromList(
conversations: ConversationItem[]
): Promise<ConversationItem[] | null> {
// Group conversations by date for better organization
const groupedConversations = this.groupConversationsByDate(conversations);
const options = this.createGroupedOptions(groupedConversations, conversations);
// Multi-select with select all/none shortcuts
const selectedIndices = await p.multiselect({
message: `Select conversations to import (${conversations.length} available, Space=toggle, Enter=confirm):`,
options,
required: false
});
if (p.isCancel(selectedIndices)) return null;
// Return selected conversations
const selected = selectedIndices as number[];
if (selected.length === 0) {
return [];
}
return selected.map(i => conversations[i]);
}
/**
* Confirm selection before processing
*/
private async confirmSelection(conversations: ConversationItem[]): Promise<boolean> {
const totalSize = conversations.reduce((sum, c) => sum + c.fileSize, 0);
const sizeKB = Math.round(totalSize / 1024);
const projects = [...new Set(conversations.map(c => c.projectName))];
const details = [
`${conversations.length} conversation${conversations.length === 1 ? '' : 's'}`,
`${projects.length} project${projects.length === 1 ? '' : 's'}: ${projects.join(', ')}`,
`Total size: ${sizeKB}KB`
].join('\n');
const confirmed = await p.confirm({
message: `Ready to import:\n\n${details}\n\nContinue?`,
initialValue: true
});
return !p.isCancel(confirmed) && confirmed;
}
/**
* Group conversations by date sections
*/
private groupConversationsByDate(conversations: ConversationItem[]): Map<string, ConversationItem[]> {
const groups = new Map<string, ConversationItem[]>();
for (const conv of conversations) {
const group = conv.dateGroup;
if (!groups.has(group)) {
groups.set(group, []);
}
groups.get(group)!.push(conv);
}
return groups;
}
/**
* Create options with date group headers
*/
private createGroupedOptions(groupedConversations: Map<string, ConversationItem[]>, allConversations: ConversationItem[]) {
const options: any[] = [];
// Add hint at top about selecting all/none
options.push({
value: 'hint',
label: '💡 Use Space to toggle, A to select all, I to invert',
disabled: true
});
options.push({ value: 'separator-hint', label: '─'.repeat(60), disabled: true });
// Define order of groups
const groupOrder = ['Today', 'Yesterday', 'This Week', 'Last Week', 'This Month', 'Older'];
for (const groupName of groupOrder) {
const conversations = groupedConversations.get(groupName);
if (!conversations || conversations.length === 0) continue;
// Add group header (disabled option for visual separation)
if (options.length > 2) { // Account for hint and separator
options.push({ value: `separator-${groupName}`, label: '─'.repeat(50), disabled: true });
}
options.push({
value: `header-${groupName}`,
label: `${groupName} (${conversations.length})`,
disabled: true
});
// Add conversations in this group
for (const conv of conversations) {
const index = allConversations.indexOf(conv);
const projectInfo = conv.projectName ? `[${conv.projectName}]` : '';
const workingDir = conv.cwd && conv.cwd !== 'undefined' ? path.basename(conv.cwd) : '';
const hint = `${projectInfo} ${workingDir}`.trim() || (conv.gitBranch ? `Branch: ${conv.gitBranch}` : '');
options.push({
value: index,
label: ` ${conv.displayName}`,
hint: hint
});
}
}
return options;
}
}
+47 -88
View File
@@ -181,7 +181,9 @@ export class PathDiscovery {
const packageJsonPath = require.resolve('claude-mem/package.json');
this._packageRoot = dirname(packageJsonPath);
return this._packageRoot;
} catch {}
} catch {
// Continue to next method
}
// Method 2: Walk up from current module location
const currentFile = fileURLToPath(import.meta.url);
@@ -190,15 +192,13 @@ export class PathDiscovery {
for (let i = 0; i < 10; i++) {
const packageJsonPath = join(currentDir, 'package.json');
if (existsSync(packageJsonPath)) {
try {
const packageJson = require(packageJsonPath);
if (packageJson.name === 'claude-mem') {
this._packageRoot = currentDir;
return this._packageRoot;
}
} catch {}
const packageJson = require(packageJsonPath);
if (packageJson.name === 'claude-mem') {
this._packageRoot = currentDir;
return this._packageRoot;
}
}
const parentDir = dirname(currentDir);
if (parentDir === currentDir) break;
currentDir = parentDir;
@@ -206,36 +206,46 @@ export class PathDiscovery {
// Method 3: Try npm list command
try {
const npmOutput = execSync('npm list -g claude-mem --json 2>/dev/null || npm list claude-mem --json 2>/dev/null', {
encoding: 'utf8'
const npmOutput = execSync('npm list -g claude-mem --json 2>/dev/null || npm list claude-mem --json 2>/dev/null', {
encoding: 'utf8'
});
const npmData = JSON.parse(npmOutput);
if (npmData.dependencies?.['claude-mem']?.resolved) {
this._packageRoot = dirname(npmData.dependencies['claude-mem'].resolved);
return this._packageRoot;
}
} catch {}
} catch {
// Continue to error
}
throw new Error('Cannot locate claude-mem package root. Ensure claude-mem is properly installed.');
}
/**
* Find hooks directory in the installed package
* Find hook templates directory in the installed package
*
* This returns the SOURCE templates directory that gets copied during installation
* to the runtime hooks directory (~/.claude-mem/hooks/)
*/
findPackageHooksDirectory(): string {
findPackageHookTemplatesDirectory(): string {
const packageRoot = this.getPackageRoot();
const hooksDir = join(packageRoot, 'hooks');
// Verify it contains expected hook files
const requiredHooks = ['pre-compact.js', 'session-start.js'];
for (const hookFile of requiredHooks) {
if (!existsSync(join(hooksDir, hookFile))) {
throw new Error(`Package hooks directory missing required file: ${hookFile}`);
const hookTemplatesDir = join(packageRoot, 'hook-templates');
// Verify it contains expected hook template files
const requiredHookTemplates = [
'session-start.js',
'stop.js',
'user-prompt-submit.js',
'post-tool-use.js'
];
for (const hookTemplateFile of requiredHookTemplates) {
if (!existsSync(join(hookTemplatesDir, hookTemplateFile))) {
throw new Error(`Package hook-templates directory missing required template file: ${hookTemplateFile}`);
}
}
return hooksDir;
return hookTemplatesDir;
}
/**
@@ -325,9 +335,19 @@ export class PathDiscovery {
/**
* Get current project directory name
* Uses git repository root's basename if in a git repo, otherwise falls back to cwd basename
*/
static getCurrentProjectName(): string {
return require('path').basename(process.cwd());
try {
const gitRoot = execSync('git rev-parse --show-toplevel', {
cwd: process.cwd(),
encoding: 'utf8',
stdio: ['pipe', 'pipe', 'ignore']
}).trim();
return require('path').basename(gitRoot);
} catch {
return require('path').basename(process.cwd());
}
}
/**
@@ -347,68 +367,7 @@ export class PathDiscovery {
* Check if a path exists and is accessible
*/
static isPathAccessible(path: string): boolean {
try {
return existsSync(path) && statSync(path).isDirectory();
} catch {
return false;
}
return existsSync(path) && statSync(path).isDirectory();
}
// =============================================================================
// STATIC CONVENIENCE METHODS
// =============================================================================
/**
* Quick access to singleton instance methods
*/
static getDataDirectory(): string {
return PathDiscovery.getInstance().getDataDirectory();
}
static getArchivesDirectory(): string {
return PathDiscovery.getInstance().getArchivesDirectory();
}
static getHooksDirectory(): string {
return PathDiscovery.getInstance().getHooksDirectory();
}
static getLogsDirectory(): string {
return PathDiscovery.getInstance().getLogsDirectory();
}
static getClaudeSettingsPath(): string {
return PathDiscovery.getInstance().getClaudeSettingsPath();
}
static getClaudeMdPath(): string {
return PathDiscovery.getInstance().getClaudeMdPath();
}
static findPackageHooksDirectory(): string {
return PathDiscovery.getInstance().findPackageHooksDirectory();
}
static findPackageCommandsDirectory(): string {
return PathDiscovery.getInstance().findPackageCommandsDirectory();
}
}
// Export singleton instance for immediate use
export const pathDiscovery = PathDiscovery.getInstance();
// Export static methods for convenience
export const {
getDataDirectory,
getArchivesDirectory,
getHooksDirectory,
getLogsDirectory,
getClaudeSettingsPath,
getClaudeMdPath,
findPackageHooksDirectory,
findPackageCommandsDirectory,
extractProjectName,
getCurrentProjectName,
createBackupFilename,
isPathAccessible
} = PathDiscovery;
}
+49 -9
View File
@@ -17,12 +17,12 @@ export class MemoryStore {
*/
create(input: MemoryInput): MemoryRow {
const { isoString, epoch } = normalizeTimestamp(input.created_at);
const stmt = this.db.prepare(`
INSERT INTO memories (
session_id, text, document_id, keywords, created_at, created_at_epoch,
project, archive_basename, origin
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
session_id, text, document_id, keywords, created_at, created_at_epoch,
project, archive_basename, origin, title, subtitle, facts, concepts, files_touched
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`);
const info = stmt.run(
@@ -34,7 +34,12 @@ export class MemoryStore {
epoch,
input.project,
input.archive_basename || null,
input.origin || 'transcript'
input.origin || 'transcript',
input.title || null,
input.subtitle || null,
input.facts || null,
input.concepts || null,
input.files_touched || null
);
return this.getById(info.lastInsertRowid as number)!;
@@ -158,15 +163,44 @@ export class MemoryStore {
* Get memories by origin type
*/
getByOrigin(origin: string, limit?: number): MemoryRow[] {
const query = limit
const query = limit
? 'SELECT * FROM memories WHERE origin = ? ORDER BY created_at_epoch DESC LIMIT ?'
: 'SELECT * FROM memories WHERE origin = ? ORDER BY created_at_epoch DESC';
const stmt = this.db.prepare(query);
const params = limit ? [origin, limit] : [origin];
return stmt.all(...params) as MemoryRow[];
}
/**
* Get recent memories for a project filtered by origin
*/
getRecentForProjectByOrigin(project: string, origin: string, limit = 10): MemoryRow[] {
const stmt = this.db.prepare(`
SELECT * FROM memories
WHERE project = ? AND origin = ?
ORDER BY created_at_epoch DESC
LIMIT ?
`);
return stmt.all(project, origin, limit) as MemoryRow[];
}
/**
* Get last N memories for a project, sorted oldest to newest
*/
getLastNForProject(project: string, limit = 10): MemoryRow[] {
const stmt = this.db.prepare(`
SELECT * FROM (
SELECT * FROM memories
WHERE project = ?
ORDER BY created_at_epoch DESC
LIMIT ?
)
ORDER BY created_at_epoch ASC
`);
return stmt.all(project, limit) as MemoryRow[];
}
/**
* Count total memories
*/
@@ -195,11 +229,12 @@ export class MemoryStore {
}
const { isoString, epoch } = normalizeTimestamp(input.created_at || existing.created_at);
const stmt = this.db.prepare(`
UPDATE memories SET
text = ?, document_id = ?, keywords = ?, created_at = ?, created_at_epoch = ?,
project = ?, archive_basename = ?, origin = ?
project = ?, archive_basename = ?, origin = ?, title = ?, subtitle = ?, facts = ?,
concepts = ?, files_touched = ?
WHERE id = ?
`);
@@ -212,6 +247,11 @@ export class MemoryStore {
input.project || existing.project,
input.archive_basename !== undefined ? input.archive_basename : existing.archive_basename,
input.origin || existing.origin,
input.title !== undefined ? input.title : existing.title,
input.subtitle !== undefined ? input.subtitle : existing.subtitle,
input.facts !== undefined ? input.facts : existing.facts,
input.concepts !== undefined ? input.concepts : existing.concepts,
input.files_touched !== undefined ? input.files_touched : existing.files_touched,
id
);
+47 -2
View File
@@ -68,14 +68,26 @@ export class OverviewStore {
*/
getRecentForProject(project: string, limit = 5): OverviewRow[] {
const stmt = this.db.prepare(`
SELECT * FROM overviews
SELECT * FROM overviews
WHERE project = ?
ORDER BY created_at_epoch DESC
ORDER BY created_at_epoch DESC
LIMIT ?
`);
return stmt.all(project, limit) as OverviewRow[];
}
/**
* Get all overviews for a project (oldest to newest)
*/
getAllForProject(project: string): OverviewRow[] {
const stmt = this.db.prepare(`
SELECT * FROM overviews
WHERE project = ?
ORDER BY created_at_epoch ASC
`);
return stmt.all(project) as OverviewRow[];
}
/**
* Get recent overviews across all projects
*/
@@ -193,4 +205,37 @@ export class OverviewStore {
const rows = stmt.all() as { project: string }[];
return rows.map(row => row.project);
}
/**
* Get most recent overview for a specific project
*/
getByProject(project: string): OverviewRow | null {
const stmt = this.db.prepare(`
SELECT * FROM overviews
WHERE project = ?
ORDER BY created_at_epoch DESC
LIMIT 1
`);
return stmt.get(project) as OverviewRow || null;
}
/**
* Create or update overview for a project (keeps only most recent)
*/
upsertByProject(input: OverviewInput): OverviewRow {
const existing = this.getByProject(input.project);
if (existing) {
return this.update(existing.id, input);
}
return this.create(input);
}
/**
* Delete overview by project name
*/
deleteByProject(project: string): boolean {
const stmt = this.db.prepare('DELETE FROM overviews WHERE project = ?');
const info = stmt.run(project);
return info.changes > 0;
}
}
+107
View File
@@ -0,0 +1,107 @@
import { Database } from 'better-sqlite3';
import { getDatabase } from './Database.js';
import {
TranscriptEventInput,
TranscriptEventRow,
normalizeTimestamp
} from './types.js';
/**
* Data access for transcript_events table
*/
export class TranscriptEventStore {
private db: Database.Database;
constructor(db?: Database.Database) {
this.db = db || getDatabase();
}
/**
* Insert or update a transcript event
*/
upsert(event: TranscriptEventInput): TranscriptEventRow {
const { isoString, epoch } = normalizeTimestamp(event.captured_at);
const stmt = this.db.prepare(`
INSERT INTO transcript_events (
session_id,
project,
event_index,
event_type,
raw_json,
captured_at,
captured_at_epoch
) VALUES (?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(session_id, event_index) DO UPDATE SET
project = excluded.project,
event_type = excluded.event_type,
raw_json = excluded.raw_json,
captured_at = excluded.captured_at,
captured_at_epoch = excluded.captured_at_epoch
`);
stmt.run(
event.session_id,
event.project || null,
event.event_index,
event.event_type || null,
event.raw_json,
isoString,
epoch
);
return this.getBySessionAndIndex(event.session_id, event.event_index)!;
}
/**
* Bulk upsert events in a single transaction
*/
upsertMany(events: TranscriptEventInput[]): TranscriptEventRow[] {
const transaction = this.db.transaction((rows: TranscriptEventInput[]) => {
const results: TranscriptEventRow[] = [];
for (const row of rows) {
results.push(this.upsert(row));
}
return results;
});
return transaction(events);
}
/**
* Get event by session and index
*/
getBySessionAndIndex(sessionId: string, eventIndex: number): TranscriptEventRow | null {
const stmt = this.db.prepare(`
SELECT * FROM transcript_events
WHERE session_id = ? AND event_index = ?
`);
return stmt.get(sessionId, eventIndex) as TranscriptEventRow | null;
}
/**
* Get highest event_index stored for a session
*/
getMaxEventIndex(sessionId: string): number {
const stmt = this.db.prepare(`
SELECT MAX(event_index) as max_event_index
FROM transcript_events
WHERE session_id = ?
`);
const row = stmt.get(sessionId) as { max_event_index: number | null } | undefined;
return row?.max_event_index ?? -1;
}
/**
* List recent events for a session
*/
listBySession(sessionId: string, limit = 200, offset = 0): TranscriptEventRow[] {
const stmt = this.db.prepare(`
SELECT * FROM transcript_events
WHERE session_id = ?
ORDER BY event_index ASC
LIMIT ? OFFSET ?
`);
return stmt.all(sessionId, limit, offset) as TranscriptEventRow[];
}
}
+17 -6
View File
@@ -1,6 +1,3 @@
// Import migrations to register them
import './migrations/index.js';
// Export main components
export { DatabaseManager, getDatabase, initializeDatabase } from './Database.js';
@@ -9,24 +6,38 @@ export { SessionStore } from './SessionStore.js';
export { MemoryStore } from './MemoryStore.js';
export { OverviewStore } from './OverviewStore.js';
export { DiagnosticsStore } from './DiagnosticsStore.js';
export { TranscriptEventStore } from './TranscriptEventStore.js';
// Export types
export * from './types.js';
// Export migrations
export { migrations } from './migrations.js';
// Convenience function to get all stores
export async function createStores() {
const { DatabaseManager } = await import('./Database.js');
const db = await DatabaseManager.getInstance().initialize();
const { migrations } = await import('./migrations.js');
// Register migrations before initialization
const manager = DatabaseManager.getInstance();
for (const migration of migrations) {
manager.registerMigration(migration);
}
const db = await manager.initialize();
const { SessionStore } = await import('./SessionStore.js');
const { MemoryStore } = await import('./MemoryStore.js');
const { OverviewStore } = await import('./OverviewStore.js');
const { DiagnosticsStore } = await import('./DiagnosticsStore.js');
const { TranscriptEventStore } = await import('./TranscriptEventStore.js');
return {
sessions: new SessionStore(db),
memories: new MemoryStore(db),
overviews: new OverviewStore(db),
diagnostics: new DiagnosticsStore(db)
diagnostics: new DiagnosticsStore(db),
transcriptEvents: new TranscriptEventStore(db)
};
}
+169
View File
@@ -0,0 +1,169 @@
import { Database } from 'better-sqlite3';
import { Migration } from './Database.js';
/**
* Initial schema migration - creates all core tables
*/
export const migration001: Migration = {
version: 1,
up: (db: Database.Database) => {
// Sessions table - core session tracking
db.exec(`
CREATE TABLE IF NOT EXISTS sessions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
session_id TEXT UNIQUE NOT NULL,
project TEXT NOT NULL,
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
source TEXT NOT NULL DEFAULT 'compress',
archive_path TEXT,
archive_bytes INTEGER,
archive_checksum TEXT,
archived_at TEXT,
metadata_json TEXT
);
CREATE INDEX IF NOT EXISTS idx_sessions_project ON sessions(project);
CREATE INDEX IF NOT EXISTS idx_sessions_created_at ON sessions(created_at_epoch DESC);
CREATE INDEX IF NOT EXISTS idx_sessions_project_created ON sessions(project, created_at_epoch DESC);
`);
// Memories table - compressed memory chunks
db.exec(`
CREATE TABLE IF NOT EXISTS memories (
id INTEGER PRIMARY KEY AUTOINCREMENT,
session_id TEXT NOT NULL,
text TEXT NOT NULL,
document_id TEXT UNIQUE,
keywords TEXT,
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
project TEXT NOT NULL,
archive_basename TEXT,
origin TEXT NOT NULL DEFAULT 'transcript',
FOREIGN KEY (session_id) REFERENCES sessions(session_id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_memories_session ON memories(session_id);
CREATE INDEX IF NOT EXISTS idx_memories_project ON memories(project);
CREATE INDEX IF NOT EXISTS idx_memories_created_at ON memories(created_at_epoch DESC);
CREATE INDEX IF NOT EXISTS idx_memories_project_created ON memories(project, created_at_epoch DESC);
CREATE INDEX IF NOT EXISTS idx_memories_document_id ON memories(document_id);
CREATE INDEX IF NOT EXISTS idx_memories_origin ON memories(origin);
`);
// Overviews table - session summaries (one per project)
db.exec(`
CREATE TABLE IF NOT EXISTS overviews (
id INTEGER PRIMARY KEY AUTOINCREMENT,
session_id TEXT NOT NULL,
content TEXT NOT NULL,
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
project TEXT NOT NULL,
origin TEXT NOT NULL DEFAULT 'claude',
FOREIGN KEY (session_id) REFERENCES sessions(session_id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_overviews_session ON overviews(session_id);
CREATE INDEX IF NOT EXISTS idx_overviews_project ON overviews(project);
CREATE INDEX IF NOT EXISTS idx_overviews_created_at ON overviews(created_at_epoch DESC);
CREATE INDEX IF NOT EXISTS idx_overviews_project_created ON overviews(project, created_at_epoch DESC);
CREATE UNIQUE INDEX IF NOT EXISTS idx_overviews_project_latest ON overviews(project, created_at_epoch DESC);
`);
// Diagnostics table - system health and debug info
db.exec(`
CREATE TABLE IF NOT EXISTS diagnostics (
id INTEGER PRIMARY KEY AUTOINCREMENT,
session_id TEXT,
message TEXT NOT NULL,
severity TEXT NOT NULL DEFAULT 'info',
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
project TEXT NOT NULL,
origin TEXT NOT NULL DEFAULT 'system',
FOREIGN KEY (session_id) REFERENCES sessions(session_id) ON DELETE SET NULL
);
CREATE INDEX IF NOT EXISTS idx_diagnostics_session ON diagnostics(session_id);
CREATE INDEX IF NOT EXISTS idx_diagnostics_project ON diagnostics(project);
CREATE INDEX IF NOT EXISTS idx_diagnostics_severity ON diagnostics(severity);
CREATE INDEX IF NOT EXISTS idx_diagnostics_created ON diagnostics(created_at_epoch DESC);
`);
// Transcript events table - raw conversation events
db.exec(`
CREATE TABLE IF NOT EXISTS transcript_events (
id INTEGER PRIMARY KEY AUTOINCREMENT,
session_id TEXT NOT NULL,
project TEXT,
event_index INTEGER NOT NULL,
event_type TEXT,
raw_json TEXT NOT NULL,
captured_at TEXT NOT NULL,
captured_at_epoch INTEGER NOT NULL,
FOREIGN KEY (session_id) REFERENCES sessions(session_id) ON DELETE CASCADE,
UNIQUE(session_id, event_index)
);
CREATE INDEX IF NOT EXISTS idx_transcript_events_session ON transcript_events(session_id, event_index);
CREATE INDEX IF NOT EXISTS idx_transcript_events_project ON transcript_events(project);
CREATE INDEX IF NOT EXISTS idx_transcript_events_type ON transcript_events(event_type);
CREATE INDEX IF NOT EXISTS idx_transcript_events_captured ON transcript_events(captured_at_epoch DESC);
`);
console.log('✅ Created all database tables successfully');
},
down: (db: Database.Database) => {
db.exec(`
DROP TABLE IF EXISTS transcript_events;
DROP TABLE IF EXISTS diagnostics;
DROP TABLE IF EXISTS overviews;
DROP TABLE IF EXISTS memories;
DROP TABLE IF EXISTS sessions;
`);
}
};
/**
* Migration 002 - Add hierarchical memory fields (v2 format)
*/
export const migration002: Migration = {
version: 2,
up: (db: Database.Database) => {
// Add new columns for hierarchical memory structure
db.exec(`
ALTER TABLE memories ADD COLUMN title TEXT;
ALTER TABLE memories ADD COLUMN subtitle TEXT;
ALTER TABLE memories ADD COLUMN facts TEXT;
ALTER TABLE memories ADD COLUMN concepts TEXT;
ALTER TABLE memories ADD COLUMN files_touched TEXT;
`);
// Create indexes for the new fields to improve search performance
db.exec(`
CREATE INDEX IF NOT EXISTS idx_memories_title ON memories(title);
CREATE INDEX IF NOT EXISTS idx_memories_concepts ON memories(concepts);
`);
console.log('✅ Added hierarchical memory fields to memories table');
},
down: (db: Database.Database) => {
// Note: SQLite doesn't support DROP COLUMN in all versions
// In production, we'd need to recreate the table without these columns
// For now, we'll just log a warning
console.log('⚠️ Warning: SQLite ALTER TABLE DROP COLUMN not fully supported');
console.log('⚠️ To rollback, manually recreate the memories table');
}
};
/**
* All migrations in order
*/
export const migrations: Migration[] = [
migration001,
migration002
];
@@ -1,133 +0,0 @@
import { Migration } from '../Database.js';
/**
* Initial migration: Create all core tables for claude-mem SQLite index
*/
export const migration001: Migration = {
version: 1,
up: (db) => {
// Create sessions table
db.exec(`
CREATE TABLE sessions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
session_id TEXT UNIQUE NOT NULL,
project TEXT NOT NULL,
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
source TEXT DEFAULT 'compress',
archive_path TEXT,
archive_bytes INTEGER,
archive_checksum TEXT,
archived_at TEXT,
metadata_json TEXT
)
`);
// Create indexes for sessions
db.exec(`
CREATE INDEX sessions_project_created_at ON sessions (project, created_at_epoch DESC)
`);
db.exec(`
CREATE INDEX sessions_source_created ON sessions (source, created_at_epoch DESC)
`);
// Create overviews table
db.exec(`
CREATE TABLE overviews (
id INTEGER PRIMARY KEY AUTOINCREMENT,
session_id TEXT NOT NULL REFERENCES sessions(session_id) ON DELETE CASCADE,
content TEXT NOT NULL,
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
project TEXT NOT NULL,
origin TEXT DEFAULT 'claude'
)
`);
// Create index for overviews
db.exec(`
CREATE INDEX overviews_project_created_at ON overviews (project, created_at_epoch DESC)
`);
// Create memories table
db.exec(`
CREATE TABLE memories (
id INTEGER PRIMARY KEY AUTOINCREMENT,
session_id TEXT NOT NULL REFERENCES sessions(session_id) ON DELETE CASCADE,
text TEXT NOT NULL,
document_id TEXT,
keywords TEXT,
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
project TEXT NOT NULL,
archive_basename TEXT,
origin TEXT DEFAULT 'transcript'
)
`);
// Create indexes for memories
db.exec(`
CREATE INDEX memories_project_created_at ON memories (project, created_at_epoch DESC)
`);
db.exec(`
CREATE UNIQUE INDEX memories_document_id_unique ON memories (document_id) WHERE document_id IS NOT NULL
`);
// Create diagnostics table
db.exec(`
CREATE TABLE diagnostics (
id INTEGER PRIMARY KEY AUTOINCREMENT,
session_id TEXT REFERENCES sessions(session_id) ON DELETE SET NULL,
message TEXT NOT NULL,
severity TEXT DEFAULT 'warn',
created_at TEXT NOT NULL,
created_at_epoch INTEGER NOT NULL,
project TEXT NOT NULL,
origin TEXT DEFAULT 'compressor'
)
`);
// Create index for diagnostics
db.exec(`
CREATE INDEX diagnostics_project_created_at ON diagnostics (project, created_at_epoch DESC)
`);
// Create archives table (for future archival workflows)
db.exec(`
CREATE TABLE archives (
id INTEGER PRIMARY KEY AUTOINCREMENT,
session_id TEXT UNIQUE NOT NULL REFERENCES sessions(session_id) ON DELETE CASCADE,
path TEXT NOT NULL,
bytes INTEGER,
checksum TEXT,
stored_at TEXT NOT NULL,
storage_status TEXT DEFAULT 'active'
)
`);
// Create titles table (ready for conversation-titles.jsonl migration)
db.exec(`
CREATE TABLE titles (
id INTEGER PRIMARY KEY AUTOINCREMENT,
session_id TEXT UNIQUE NOT NULL REFERENCES sessions(session_id) ON DELETE CASCADE,
title TEXT NOT NULL,
created_at TEXT NOT NULL,
project TEXT NOT NULL
)
`);
console.log('✅ Created initial database schema with all tables and indexes');
},
down: (db) => {
// Drop tables in reverse order to respect foreign key constraints
const tables = ['titles', 'archives', 'diagnostics', 'memories', 'overviews', 'sessions'];
for (const table of tables) {
db.exec(`DROP TABLE IF EXISTS ${table}`);
}
console.log('🗑️ Dropped all tables from initial migration');
}
};
-15
View File
@@ -1,15 +0,0 @@
import { DatabaseManager } from '../Database.js';
import { migration001 } from './001_initial.js';
/**
* Register all migrations with the database manager
*/
export function registerMigrations(): void {
const manager = DatabaseManager.getInstance();
// Register migrations in order
manager.registerMigration(migration001);
}
// Auto-register migrations when this module is imported
registerMigrations();
+33 -1
View File
@@ -37,6 +37,12 @@ export interface MemoryRow {
project: string;
archive_basename?: string;
origin: string;
// Hierarchical memory fields (v2)
title?: string;
subtitle?: string;
facts?: string; // JSON array of fact strings
concepts?: string; // JSON array of concept strings
files_touched?: string; // JSON array of file paths
}
export interface DiagnosticRow {
@@ -50,6 +56,17 @@ export interface DiagnosticRow {
origin: string;
}
export interface TranscriptEventRow {
id: number;
session_id: string;
project?: string;
event_index: number;
event_type?: string;
raw_json: string;
captured_at: string;
captured_at_epoch: number;
}
export interface ArchiveRow {
id: number;
session_id: string;
@@ -100,6 +117,12 @@ export interface MemoryInput {
project: string;
archive_basename?: string;
origin?: string;
// Hierarchical memory fields (v2)
title?: string;
subtitle?: string;
facts?: string; // JSON array of fact strings
concepts?: string; // JSON array of concept strings
files_touched?: string; // JSON array of file paths
}
export interface DiagnosticInput {
@@ -111,6 +134,15 @@ export interface DiagnosticInput {
origin?: string;
}
export interface TranscriptEventInput {
session_id: string;
project?: string;
event_index: number;
event_type?: string;
raw_json: string;
captured_at?: string | Date | number;
}
/**
* Helper function to normalize timestamps from various formats
*/
@@ -149,4 +181,4 @@ export function normalizeTimestamp(timestamp: string | Date | number | undefined
isoString: date.toISOString(),
epoch: date.getTime()
};
}
}
-218
View File
@@ -1,218 +0,0 @@
import fs from 'fs';
import path from 'path';
import { log } from '../shared/logger.js';
import { PathDiscovery } from './path-discovery.js';
/**
* Interface for Claude Code JSONL conversation entries
*/
export interface ClaudeCodeMessage {
sessionId: string;
timestamp: string;
gitBranch?: string;
cwd: string;
type: 'user' | 'assistant' | 'system' | 'result';
message: {
role: string;
content: Array<{
type: string;
text?: string;
thinking?: string;
}> | string;
};
uuid: string;
version?: string;
isSidechain?: boolean;
userType?: string;
parentUuid?: string;
subtype?: string;
model?: string;
stop_reason?: string;
usage?: any;
}
/**
* Interface matching TranscriptCompressor's expected format
*/
export interface TranscriptMessage {
type: string;
message?: {
content?: string | Array<{
text?: string;
content?: string;
}>;
role?: string;
timestamp?: string;
created_at?: string;
};
content?: string | Array<{
text?: string;
content?: string;
}>;
role?: string;
uuid?: string;
session_id?: string;
timestamp?: string;
created_at?: string;
subtype?: string;
}
/**
* Parsed conversation with metadata
*/
export interface ParsedConversation {
sessionId: string;
filePath: string;
messageCount: number;
timestamp: string;
gitBranch?: string;
cwd: string;
messages: TranscriptMessage[];
}
/**
* Service for parsing Claude Code JSONL conversation files
*/
export class TranscriptParser {
/**
* Parse a single JSONL conversation file
*/
async parseConversation(filePath: string): Promise<ParsedConversation> {
const content = fs.readFileSync(filePath, 'utf-8');
const lines = content.trim().split('\n').filter(line => line.trim());
const claudeMessages: ClaudeCodeMessage[] = [];
let parseErrors = 0;
for (let i = 0; i < lines.length; i++) {
try {
const parsed = JSON.parse(lines[i]);
claudeMessages.push(parsed);
} catch (e) {
parseErrors++;
log.debug(`Parse error on line ${i + 1}: ${(e as Error).message}`);
}
}
if (claudeMessages.length === 0) {
throw new Error(`No valid messages found in ${filePath}`);
}
// Get metadata from first message
const firstMessage = claudeMessages[0];
const sessionId = firstMessage.sessionId;
const timestamp = firstMessage.timestamp;
const gitBranch = firstMessage.gitBranch;
const cwd = firstMessage.cwd;
// Convert to TranscriptMessage format
const messages = claudeMessages.map(msg => this.convertMessage(msg));
log.debug(`Parsed ${filePath}: ${messages.length} messages, ${parseErrors} errors`);
return {
sessionId,
filePath,
messageCount: messages.length,
timestamp,
gitBranch,
cwd,
messages
};
}
/**
* Convert ClaudeCodeMessage to TranscriptMessage format
*/
private convertMessage(claudeMsg: ClaudeCodeMessage): TranscriptMessage {
const converted: TranscriptMessage = {
type: claudeMsg.type,
uuid: claudeMsg.uuid,
session_id: claudeMsg.sessionId,
timestamp: claudeMsg.timestamp,
subtype: claudeMsg.subtype
};
// Handle message content
if (claudeMsg.message) {
converted.message = {
role: claudeMsg.message.role,
timestamp: claudeMsg.timestamp
};
if (Array.isArray(claudeMsg.message.content)) {
// Convert content array to expected format
converted.message.content = claudeMsg.message.content.map(item => ({
text: item.text || item.thinking || '',
content: item.text || item.thinking || ''
}));
} else if (typeof claudeMsg.message.content === 'string') {
converted.message.content = claudeMsg.message.content;
}
}
return converted;
}
/**
* Scan Claude projects directory for conversation files
*/
async scanConversationFiles(): Promise<string[]> {
const pathDiscovery = PathDiscovery.getInstance();
const claudeDir = path.join(pathDiscovery.getClaudeConfigDirectory(), 'projects');
if (!fs.existsSync(claudeDir)) {
return [];
}
const projectDirs = fs.readdirSync(claudeDir);
const conversationFiles: string[] = [];
for (const projectDir of projectDirs) {
const projectPath = path.join(claudeDir, projectDir);
if (!fs.statSync(projectPath).isDirectory()) continue;
const files = fs.readdirSync(projectPath);
for (const file of files) {
if (file.endsWith('.jsonl')) {
conversationFiles.push(path.join(projectPath, file));
}
}
}
return conversationFiles;
}
/**
* Get conversation metadata without fully parsing
*/
async getConversationMetadata(filePath: string): Promise<{
sessionId: string;
timestamp: string;
messageCount: number;
gitBranch?: string;
cwd: string;
fileSize: number;
}> {
const stats = fs.statSync(filePath);
const content = fs.readFileSync(filePath, 'utf-8');
const lines = content.trim().split('\n').filter(line => line.trim());
let firstMessage;
try {
firstMessage = JSON.parse(lines[0]);
} catch (e) {
throw new Error(`Invalid JSONL format in ${filePath}`);
}
return {
sessionId: firstMessage.sessionId,
timestamp: firstMessage.timestamp,
messageCount: lines.length,
gitBranch: firstMessage.gitBranch,
cwd: firstMessage.cwd,
fileSize: stats.size
};
}
}