chore: remove all better-sqlite3 references from codebase (#357)

* fix: export/import scripts now use API instead of direct DB access

Export script fix:
- Add format=json parameter to SearchManager for raw data output
- Add getSdkSessionsBySessionIds method to SessionStore
- Add POST /api/sdk-sessions/batch endpoint to DataRoutes
- Refactor export-memories.ts to use HTTP API

Import script fix:
- Add import methods to SessionStore with duplicate detection
- Add POST /api/import endpoint to DataRoutes
- Refactor import-memories.ts to use HTTP API

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>

* fix: update analyze-transformations-smart.js to use bun:sqlite

Replace better-sqlite3 import with bun:sqlite to align with v7.1.0 migration.

* chore: remove all better-sqlite3 references from codebase

- Updated scripts/analyze-transformations-smart.js to use bun:sqlite
- Merged PR #332: Refactored import/export scripts to use worker API instead of direct DB access
- Updated PM2-to-Bun migration documentation

All better-sqlite3 references have been removed from source code.
Documentation references remain as appropriate historical context.

* build: update plugin artifacts with merged changes

Include built artifacts from PR #332 merge and analyze-transformations-smart.js update.

---------

Co-authored-by: lee <loyalpartner@163.com>
Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
Alex Newman
2025-12-16 17:57:40 -05:00
committed by GitHub
parent 78cb5c38dc
commit db3794762f
20 changed files with 988 additions and 508 deletions
+1 -1
View File
@@ -1,7 +1,7 @@
#!/usr/bin/env node
import fs from 'fs';
import Database from 'better-sqlite3';
import { Database } from 'bun:sqlite';
import readline from 'readline';
import path from 'path';
import { homedir } from 'os';
+12 -27
View File
@@ -5,8 +5,7 @@
* Example: npx tsx scripts/export-memories.ts "windows" windows-memories.json --project=claude-mem
*/
import Database from 'better-sqlite3';
import { existsSync, writeFileSync } from 'fs';
import { writeFileSync } from 'fs';
import { homedir } from 'os';
import { join } from 'path';
import { SettingsDefaultsManager } from '../src/shared/SettingsDefaultsManager';
@@ -127,33 +126,19 @@ async function exportMemories(query: string, outputFile: string, project?: strin
if (s.sdk_session_id) sdkSessionIds.add(s.sdk_session_id);
});
// Get SDK sessions metadata from database
// (We need this because the API doesn't expose sdk_sessions table directly)
// Get SDK sessions metadata via API
console.log('📡 Fetching SDK sessions metadata...');
const sessions: SdkSessionRecord[] = [];
let sessions: SdkSessionRecord[] = [];
if (sdkSessionIds.size > 0) {
// Read directly from database for sdk_sessions table
const Database = (await import('better-sqlite3')).default;
const dbPath = join(homedir(), '.claude-mem', 'claude-mem.db');
if (!existsSync(dbPath)) {
console.error(`❌ Database not found at: ${dbPath}`);
console.error('💡 Has claude-mem been initialized? Try running a session first.');
process.exit(1);
}
const db = new Database(dbPath, { readonly: true });
try {
const placeholders = Array.from(sdkSessionIds).map(() => '?').join(',');
const sessionQuery = `
SELECT * FROM sdk_sessions
WHERE sdk_session_id IN (${placeholders})
ORDER BY started_at_epoch DESC
`;
sessions.push(...db.prepare(sessionQuery).all(...Array.from(sdkSessionIds)));
} finally {
db.close();
const sessionsResponse = await fetch(`${baseUrl}/api/sdk-sessions/batch`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ sdkSessionIds: Array.from(sdkSessionIds) })
});
if (sessionsResponse.ok) {
sessions = await sessionsResponse.json();
} else {
console.warn(`⚠️ Failed to fetch SDK sessions: ${sessionsResponse.status}`);
}
}
console.log(`✅ Found ${sessions.length} SDK sessions`);
+47 -203
View File
@@ -3,37 +3,21 @@
* Import memories from a JSON export file with duplicate prevention
* Usage: npx tsx scripts/import-memories.ts <input-file>
* Example: npx tsx scripts/import-memories.ts windows-memories.json
*
* This script uses the worker API instead of direct database access.
*/
import Database from 'better-sqlite3';
import { existsSync, readFileSync } from 'fs';
import { homedir } from 'os';
import { join } from 'path';
interface ImportStats {
sessionsImported: number;
sessionsSkipped: number;
summariesImported: number;
summariesSkipped: number;
observationsImported: number;
observationsSkipped: number;
promptsImported: number;
promptsSkipped: number;
}
const WORKER_PORT = process.env.CLAUDE_MEM_WORKER_PORT || 37777;
const WORKER_URL = `http://127.0.0.1:${WORKER_PORT}`;
function importMemories(inputFile: string) {
async function importMemories(inputFile: string) {
if (!existsSync(inputFile)) {
console.error(`❌ Input file not found: ${inputFile}`);
process.exit(1);
}
const dbPath = join(homedir(), '.claude-mem', 'claude-mem.db');
if (!existsSync(dbPath)) {
console.error(`❌ Database not found at: ${dbPath}`);
process.exit(1);
}
// Read and parse export file
const exportData = JSON.parse(readFileSync(inputFile, 'utf-8'));
@@ -47,190 +31,50 @@ function importMemories(inputFile: string) {
console.log(`${exportData.totalPrompts} prompts`);
console.log('');
const db = new Database(dbPath);
const stats: ImportStats = {
sessionsImported: 0,
sessionsSkipped: 0,
summariesImported: 0,
summariesSkipped: 0,
observationsImported: 0,
observationsSkipped: 0,
promptsImported: 0,
promptsSkipped: 0
};
// Check if worker is running
try {
// Prepare statements for duplicate checking
const checkSession = db.prepare('SELECT id FROM sdk_sessions WHERE claude_session_id = ?');
const checkSummary = db.prepare('SELECT id FROM session_summaries WHERE sdk_session_id = ?');
const checkObservation = db.prepare(`
SELECT id FROM observations
WHERE sdk_session_id = ?
AND title = ?
AND created_at_epoch = ?
`);
const checkPrompt = db.prepare(`
SELECT id FROM user_prompts
WHERE claude_session_id = ?
AND prompt_number = ?
`);
// Prepare insert statements
const insertSession = db.prepare(`
INSERT INTO sdk_sessions (
claude_session_id, sdk_session_id, project, user_prompt,
started_at, started_at_epoch, completed_at, completed_at_epoch,
status
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
`);
const insertSummary = db.prepare(`
INSERT INTO session_summaries (
sdk_session_id, project, request, investigated, learned,
completed, next_steps, files_read, files_edited, notes,
prompt_number, discovery_tokens, created_at, created_at_epoch
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`);
const insertObservation = db.prepare(`
INSERT INTO observations (
sdk_session_id, project, text, type, title, subtitle,
facts, narrative, concepts, files_read, files_modified,
prompt_number, discovery_tokens, created_at, created_at_epoch
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`);
const insertPrompt = db.prepare(`
INSERT INTO user_prompts (
claude_session_id, prompt_number, prompt_text,
created_at, created_at_epoch
) VALUES (?, ?, ?, ?, ?)
`);
// Import in transaction
db.transaction(() => {
// 1. Import sessions first (dependency for everything else)
console.log('🔄 Importing sessions...');
for (const session of exportData.sessions) {
const exists = checkSession.get(session.claude_session_id);
if (exists) {
stats.sessionsSkipped++;
continue;
}
insertSession.run(
session.claude_session_id,
session.sdk_session_id,
session.project,
session.user_prompt,
session.started_at,
session.started_at_epoch,
session.completed_at,
session.completed_at_epoch,
session.status
);
stats.sessionsImported++;
}
console.log(` ✅ Imported: ${stats.sessionsImported}, Skipped: ${stats.sessionsSkipped}`);
// 2. Import summaries (depends on sessions)
console.log('🔄 Importing summaries...');
for (const summary of exportData.summaries) {
const exists = checkSummary.get(summary.sdk_session_id);
if (exists) {
stats.summariesSkipped++;
continue;
}
insertSummary.run(
summary.sdk_session_id,
summary.project,
summary.request,
summary.investigated,
summary.learned,
summary.completed,
summary.next_steps,
summary.files_read,
summary.files_edited,
summary.notes,
summary.prompt_number,
summary.discovery_tokens || 0,
summary.created_at,
summary.created_at_epoch
);
stats.summariesImported++;
}
console.log(` ✅ Imported: ${stats.summariesImported}, Skipped: ${stats.summariesSkipped}`);
// 3. Import observations (depends on sessions)
console.log('🔄 Importing observations...');
for (const obs of exportData.observations) {
const exists = checkObservation.get(
obs.sdk_session_id,
obs.title,
obs.created_at_epoch
);
if (exists) {
stats.observationsSkipped++;
continue;
}
insertObservation.run(
obs.sdk_session_id,
obs.project,
obs.text,
obs.type,
obs.title,
obs.subtitle,
obs.facts,
obs.narrative,
obs.concepts,
obs.files_read,
obs.files_modified,
obs.prompt_number,
obs.discovery_tokens || 0,
obs.created_at,
obs.created_at_epoch
);
stats.observationsImported++;
}
console.log(` ✅ Imported: ${stats.observationsImported}, Skipped: ${stats.observationsSkipped}`);
// 4. Import prompts (depends on sessions)
console.log('🔄 Importing prompts...');
for (const prompt of exportData.prompts) {
const exists = checkPrompt.get(
prompt.claude_session_id,
prompt.prompt_number
);
if (exists) {
stats.promptsSkipped++;
continue;
}
insertPrompt.run(
prompt.claude_session_id,
prompt.prompt_number,
prompt.prompt_text,
prompt.created_at,
prompt.created_at_epoch
);
stats.promptsImported++;
}
console.log(` ✅ Imported: ${stats.promptsImported}, Skipped: ${stats.promptsSkipped}`);
})();
console.log('\n✅ Import complete!');
console.log('📊 Summary:');
console.log(` Sessions: ${stats.sessionsImported} imported, ${stats.sessionsSkipped} skipped`);
console.log(` Summaries: ${stats.summariesImported} imported, ${stats.summariesSkipped} skipped`);
console.log(` Observations: ${stats.observationsImported} imported, ${stats.observationsSkipped} skipped`);
console.log(` Prompts: ${stats.promptsImported} imported, ${stats.promptsSkipped} skipped`);
} finally {
db.close();
const healthCheck = await fetch(`${WORKER_URL}/api/stats`);
if (!healthCheck.ok) {
throw new Error('Worker not responding');
}
} catch (error) {
console.error(`❌ Worker not running at ${WORKER_URL}`);
console.error(' Please ensure the claude-mem worker is running.');
process.exit(1);
}
console.log('🔄 Importing via worker API...');
// Send import request to worker
const response = await fetch(`${WORKER_URL}/api/import`, {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({
sessions: exportData.sessions || [],
summaries: exportData.summaries || [],
observations: exportData.observations || [],
prompts: exportData.prompts || []
})
});
if (!response.ok) {
const errorText = await response.text();
console.error(`❌ Import failed: ${response.status} ${response.statusText}`);
console.error(` ${errorText}`);
process.exit(1);
}
const result = await response.json();
const stats = result.stats;
console.log('\n✅ Import complete!');
console.log('📊 Summary:');
console.log(` Sessions: ${stats.sessionsImported} imported, ${stats.sessionsSkipped} skipped`);
console.log(` Summaries: ${stats.summariesImported} imported, ${stats.summariesSkipped} skipped`);
console.log(` Observations: ${stats.observationsImported} imported, ${stats.observationsSkipped} skipped`);
console.log(` Prompts: ${stats.promptsImported} imported, ${stats.promptsSkipped} skipped`);
}
// CLI interface