Release v3.7.0
Published from npm package build Source: https://github.com/thedotmack/claude-mem-source
This commit is contained in:
@@ -0,0 +1,179 @@
|
||||
import Database from 'better-sqlite3';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import { PathDiscovery } from '../path-discovery.js';
|
||||
|
||||
export interface Migration {
|
||||
version: number;
|
||||
up: (db: Database.Database) => void;
|
||||
down?: (db: Database.Database) => void;
|
||||
}
|
||||
|
||||
let dbInstance: Database.Database | null = null;
|
||||
|
||||
/**
|
||||
* SQLite Database singleton with migration support and optimized settings
|
||||
*/
|
||||
export class DatabaseManager {
|
||||
private static instance: DatabaseManager;
|
||||
private db: Database.Database | null = null;
|
||||
private migrations: Migration[] = [];
|
||||
|
||||
static getInstance(): DatabaseManager {
|
||||
if (!DatabaseManager.instance) {
|
||||
DatabaseManager.instance = new DatabaseManager();
|
||||
}
|
||||
return DatabaseManager.instance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a migration to be run during initialization
|
||||
*/
|
||||
registerMigration(migration: Migration): void {
|
||||
this.migrations.push(migration);
|
||||
// Keep migrations sorted by version
|
||||
this.migrations.sort((a, b) => a.version - b.version);
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize database connection with optimized settings
|
||||
*/
|
||||
async initialize(): Promise<Database.Database> {
|
||||
if (this.db) {
|
||||
return this.db;
|
||||
}
|
||||
|
||||
// Ensure the data directory exists
|
||||
const dataDir = PathDiscovery.getInstance().getDataDirectory();
|
||||
fs.mkdirSync(dataDir, { recursive: true });
|
||||
|
||||
const dbPath = path.join(dataDir, 'claude-mem.db');
|
||||
this.db = new Database(dbPath);
|
||||
|
||||
// Apply optimized SQLite settings
|
||||
this.db.pragma('journal_mode = WAL');
|
||||
this.db.pragma('synchronous = NORMAL');
|
||||
this.db.pragma('foreign_keys = ON');
|
||||
this.db.pragma('temp_store = memory');
|
||||
this.db.pragma('mmap_size = 268435456'); // 256MB
|
||||
this.db.pragma('cache_size = 10000');
|
||||
|
||||
// Initialize schema_versions table
|
||||
this.initializeSchemaVersions();
|
||||
|
||||
// Run migrations
|
||||
await this.runMigrations();
|
||||
|
||||
dbInstance = this.db;
|
||||
return this.db;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the current database connection
|
||||
*/
|
||||
getConnection(): Database.Database {
|
||||
if (!this.db) {
|
||||
throw new Error('Database not initialized. Call initialize() first.');
|
||||
}
|
||||
return this.db;
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a function within a transaction
|
||||
*/
|
||||
withTransaction<T>(fn: (db: Database.Database) => T): T {
|
||||
const db = this.getConnection();
|
||||
const transaction = db.transaction(fn);
|
||||
return transaction(db);
|
||||
}
|
||||
|
||||
/**
|
||||
* Close the database connection
|
||||
*/
|
||||
close(): void {
|
||||
if (this.db) {
|
||||
this.db.close();
|
||||
this.db = null;
|
||||
dbInstance = null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the schema_versions table
|
||||
*/
|
||||
private initializeSchemaVersions(): void {
|
||||
if (!this.db) return;
|
||||
|
||||
this.db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS schema_versions (
|
||||
id INTEGER PRIMARY KEY,
|
||||
version INTEGER UNIQUE NOT NULL,
|
||||
applied_at TEXT NOT NULL
|
||||
)
|
||||
`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Run all pending migrations
|
||||
*/
|
||||
private async runMigrations(): Promise<void> {
|
||||
if (!this.db) return;
|
||||
|
||||
const appliedVersions = this.db
|
||||
.prepare('SELECT version FROM schema_versions ORDER BY version')
|
||||
.all()
|
||||
.map((row: any) => row.version);
|
||||
|
||||
const maxApplied = appliedVersions.length > 0 ? Math.max(...appliedVersions) : 0;
|
||||
|
||||
for (const migration of this.migrations) {
|
||||
if (migration.version > maxApplied) {
|
||||
console.log(`Applying migration ${migration.version}...`);
|
||||
|
||||
const transaction = this.db.transaction(() => {
|
||||
migration.up(this.db!);
|
||||
|
||||
this.db!
|
||||
.prepare('INSERT INTO schema_versions (version, applied_at) VALUES (?, ?)')
|
||||
.run(migration.version, new Date().toISOString());
|
||||
});
|
||||
|
||||
transaction();
|
||||
console.log(`Migration ${migration.version} applied successfully`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current schema version
|
||||
*/
|
||||
getCurrentVersion(): number {
|
||||
if (!this.db) return 0;
|
||||
|
||||
const result = this.db
|
||||
.prepare('SELECT MAX(version) as version FROM schema_versions')
|
||||
.get() as { version: number } | undefined;
|
||||
|
||||
return result?.version || 0;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the global database instance (for compatibility)
|
||||
*/
|
||||
export function getDatabase(): Database.Database {
|
||||
if (!dbInstance) {
|
||||
throw new Error('Database not initialized. Call DatabaseManager.getInstance().initialize() first.');
|
||||
}
|
||||
return dbInstance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize and get database manager
|
||||
*/
|
||||
export async function initializeDatabase(): Promise<Database.Database> {
|
||||
const manager = DatabaseManager.getInstance();
|
||||
return await manager.initialize();
|
||||
}
|
||||
|
||||
export { Database };
|
||||
@@ -0,0 +1,229 @@
|
||||
import { Database } from 'better-sqlite3';
|
||||
import { getDatabase } from './Database.js';
|
||||
import { DiagnosticRow, DiagnosticInput, normalizeTimestamp } from './types.js';
|
||||
|
||||
/**
|
||||
* Data Access Object for diagnostic records
|
||||
*/
|
||||
export class DiagnosticsStore {
|
||||
private db: Database.Database;
|
||||
|
||||
constructor(db?: Database.Database) {
|
||||
this.db = db || getDatabase();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new diagnostic record
|
||||
*/
|
||||
create(input: DiagnosticInput): DiagnosticRow {
|
||||
const { isoString, epoch } = normalizeTimestamp(input.created_at);
|
||||
|
||||
const stmt = this.db.prepare(`
|
||||
INSERT INTO diagnostics (
|
||||
session_id, message, severity, created_at, created_at_epoch, project, origin
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
`);
|
||||
|
||||
const info = stmt.run(
|
||||
input.session_id || null,
|
||||
input.message,
|
||||
input.severity || 'warn',
|
||||
isoString,
|
||||
epoch,
|
||||
input.project,
|
||||
input.origin || 'compressor'
|
||||
);
|
||||
|
||||
return this.getById(info.lastInsertRowid as number)!;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get diagnostic by primary key
|
||||
*/
|
||||
getById(id: number): DiagnosticRow | null {
|
||||
const stmt = this.db.prepare('SELECT * FROM diagnostics WHERE id = ?');
|
||||
return stmt.get(id) as DiagnosticRow || null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get diagnostics for a specific session
|
||||
*/
|
||||
getBySessionId(sessionId: string): DiagnosticRow[] {
|
||||
const stmt = this.db.prepare(`
|
||||
SELECT * FROM diagnostics
|
||||
WHERE session_id = ?
|
||||
ORDER BY created_at_epoch DESC
|
||||
`);
|
||||
return stmt.all(sessionId) as DiagnosticRow[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get recent diagnostics for a project
|
||||
*/
|
||||
getRecentForProject(project: string, limit = 10): DiagnosticRow[] {
|
||||
const stmt = this.db.prepare(`
|
||||
SELECT * FROM diagnostics
|
||||
WHERE project = ?
|
||||
ORDER BY created_at_epoch DESC
|
||||
LIMIT ?
|
||||
`);
|
||||
return stmt.all(project, limit) as DiagnosticRow[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get recent diagnostics across all projects
|
||||
*/
|
||||
getRecent(limit = 10): DiagnosticRow[] {
|
||||
const stmt = this.db.prepare(`
|
||||
SELECT * FROM diagnostics
|
||||
ORDER BY created_at_epoch DESC
|
||||
LIMIT ?
|
||||
`);
|
||||
return stmt.all(limit) as DiagnosticRow[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get diagnostics by severity level
|
||||
*/
|
||||
getBySeverity(severity: 'info' | 'warn' | 'error', limit?: number): DiagnosticRow[] {
|
||||
const query = limit
|
||||
? 'SELECT * FROM diagnostics WHERE severity = ? ORDER BY created_at_epoch DESC LIMIT ?'
|
||||
: 'SELECT * FROM diagnostics WHERE severity = ? ORDER BY created_at_epoch DESC';
|
||||
|
||||
const stmt = this.db.prepare(query);
|
||||
const params = limit ? [severity, limit] : [severity];
|
||||
return stmt.all(...params) as DiagnosticRow[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get diagnostics by origin
|
||||
*/
|
||||
getByOrigin(origin: string, limit?: number): DiagnosticRow[] {
|
||||
const query = limit
|
||||
? 'SELECT * FROM diagnostics WHERE origin = ? ORDER BY created_at_epoch DESC LIMIT ?'
|
||||
: 'SELECT * FROM diagnostics WHERE origin = ? ORDER BY created_at_epoch DESC';
|
||||
|
||||
const stmt = this.db.prepare(query);
|
||||
const params = limit ? [origin, limit] : [origin];
|
||||
return stmt.all(...params) as DiagnosticRow[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Search diagnostics by message content
|
||||
*/
|
||||
searchByMessage(query: string, project?: string, limit = 20): DiagnosticRow[] {
|
||||
let sql = 'SELECT * FROM diagnostics WHERE message LIKE ?';
|
||||
const params: any[] = [`%${query}%`];
|
||||
|
||||
if (project) {
|
||||
sql += ' AND project = ?';
|
||||
params.push(project);
|
||||
}
|
||||
|
||||
sql += ' ORDER BY created_at_epoch DESC LIMIT ?';
|
||||
params.push(limit);
|
||||
|
||||
const stmt = this.db.prepare(sql);
|
||||
return stmt.all(...params) as DiagnosticRow[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Count total diagnostics
|
||||
*/
|
||||
count(): number {
|
||||
const stmt = this.db.prepare('SELECT COUNT(*) as count FROM diagnostics');
|
||||
const result = stmt.get() as { count: number };
|
||||
return result.count;
|
||||
}
|
||||
|
||||
/**
|
||||
* Count diagnostics by project
|
||||
*/
|
||||
countByProject(project: string): number {
|
||||
const stmt = this.db.prepare('SELECT COUNT(*) as count FROM diagnostics WHERE project = ?');
|
||||
const result = stmt.get(project) as { count: number };
|
||||
return result.count;
|
||||
}
|
||||
|
||||
/**
|
||||
* Count diagnostics by severity
|
||||
*/
|
||||
countBySeverity(severity: 'info' | 'warn' | 'error'): number {
|
||||
const stmt = this.db.prepare('SELECT COUNT(*) as count FROM diagnostics WHERE severity = ?');
|
||||
const result = stmt.get(severity) as { count: number };
|
||||
return result.count;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a diagnostic record
|
||||
*/
|
||||
update(id: number, input: Partial<DiagnosticInput>): DiagnosticRow {
|
||||
const existing = this.getById(id);
|
||||
if (!existing) {
|
||||
throw new Error(`Diagnostic with id ${id} not found`);
|
||||
}
|
||||
|
||||
const { isoString, epoch } = normalizeTimestamp(input.created_at || existing.created_at);
|
||||
|
||||
const stmt = this.db.prepare(`
|
||||
UPDATE diagnostics SET
|
||||
message = ?, severity = ?, created_at = ?, created_at_epoch = ?, project = ?, origin = ?
|
||||
WHERE id = ?
|
||||
`);
|
||||
|
||||
stmt.run(
|
||||
input.message || existing.message,
|
||||
input.severity || existing.severity,
|
||||
isoString,
|
||||
epoch,
|
||||
input.project || existing.project,
|
||||
input.origin || existing.origin,
|
||||
id
|
||||
);
|
||||
|
||||
return this.getById(id)!;
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a diagnostic by ID
|
||||
*/
|
||||
deleteById(id: number): boolean {
|
||||
const stmt = this.db.prepare('DELETE FROM diagnostics WHERE id = ?');
|
||||
const info = stmt.run(id);
|
||||
return info.changes > 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete diagnostics by session_id
|
||||
*/
|
||||
deleteBySessionId(sessionId: string): number {
|
||||
const stmt = this.db.prepare('DELETE FROM diagnostics WHERE session_id = ?');
|
||||
const info = stmt.run(sessionId);
|
||||
return info.changes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get unique projects from diagnostics
|
||||
*/
|
||||
getUniqueProjects(): string[] {
|
||||
const stmt = this.db.prepare('SELECT DISTINCT project FROM diagnostics ORDER BY project');
|
||||
const rows = stmt.all() as { project: string }[];
|
||||
return rows.map(row => row.project);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get diagnostic summary stats
|
||||
*/
|
||||
getStats(): { total: number; info: number; warn: number; error: number } {
|
||||
const stmt = this.db.prepare(`
|
||||
SELECT
|
||||
COUNT(*) as total,
|
||||
COUNT(CASE WHEN severity = 'info' THEN 1 END) as info,
|
||||
COUNT(CASE WHEN severity = 'warn' THEN 1 END) as warn,
|
||||
COUNT(CASE WHEN severity = 'error' THEN 1 END) as error
|
||||
FROM diagnostics
|
||||
`);
|
||||
|
||||
return stmt.get() as { total: number; info: number; warn: number; error: number };
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,247 @@
|
||||
import { Database } from 'better-sqlite3';
|
||||
import { getDatabase } from './Database.js';
|
||||
import { MemoryRow, MemoryInput, normalizeTimestamp } from './types.js';
|
||||
|
||||
/**
|
||||
* Data Access Object for memory records
|
||||
*/
|
||||
export class MemoryStore {
|
||||
private db: Database.Database;
|
||||
|
||||
constructor(db?: Database.Database) {
|
||||
this.db = db || getDatabase();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new memory record
|
||||
*/
|
||||
create(input: MemoryInput): MemoryRow {
|
||||
const { isoString, epoch } = normalizeTimestamp(input.created_at);
|
||||
|
||||
const stmt = this.db.prepare(`
|
||||
INSERT INTO memories (
|
||||
session_id, text, document_id, keywords, created_at, created_at_epoch,
|
||||
project, archive_basename, origin
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`);
|
||||
|
||||
const info = stmt.run(
|
||||
input.session_id,
|
||||
input.text,
|
||||
input.document_id || null,
|
||||
input.keywords || null,
|
||||
isoString,
|
||||
epoch,
|
||||
input.project,
|
||||
input.archive_basename || null,
|
||||
input.origin || 'transcript'
|
||||
);
|
||||
|
||||
return this.getById(info.lastInsertRowid as number)!;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create multiple memory records in a transaction
|
||||
*/
|
||||
createMany(inputs: MemoryInput[]): MemoryRow[] {
|
||||
const transaction = this.db.transaction((memories: MemoryInput[]) => {
|
||||
const results: MemoryRow[] = [];
|
||||
for (const memory of memories) {
|
||||
results.push(this.create(memory));
|
||||
}
|
||||
return results;
|
||||
});
|
||||
|
||||
return transaction(inputs);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get memory by primary key
|
||||
*/
|
||||
getById(id: number): MemoryRow | null {
|
||||
const stmt = this.db.prepare('SELECT * FROM memories WHERE id = ?');
|
||||
return stmt.get(id) as MemoryRow || null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get memory by document_id
|
||||
*/
|
||||
getByDocumentId(documentId: string): MemoryRow | null {
|
||||
const stmt = this.db.prepare('SELECT * FROM memories WHERE document_id = ?');
|
||||
return stmt.get(documentId) as MemoryRow || null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a document_id already exists
|
||||
*/
|
||||
hasDocumentId(documentId: string): boolean {
|
||||
const stmt = this.db.prepare('SELECT 1 FROM memories WHERE document_id = ? LIMIT 1');
|
||||
return Boolean(stmt.get(documentId));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get memories for a specific session
|
||||
*/
|
||||
getBySessionId(sessionId: string): MemoryRow[] {
|
||||
const stmt = this.db.prepare(`
|
||||
SELECT * FROM memories
|
||||
WHERE session_id = ?
|
||||
ORDER BY created_at_epoch DESC
|
||||
`);
|
||||
return stmt.all(sessionId) as MemoryRow[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get recent memories for a project
|
||||
*/
|
||||
getRecentForProject(project: string, limit = 10): MemoryRow[] {
|
||||
const stmt = this.db.prepare(`
|
||||
SELECT * FROM memories
|
||||
WHERE project = ?
|
||||
ORDER BY created_at_epoch DESC
|
||||
LIMIT ?
|
||||
`);
|
||||
return stmt.all(project, limit) as MemoryRow[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get recent memories across all projects
|
||||
*/
|
||||
getRecent(limit = 10): MemoryRow[] {
|
||||
const stmt = this.db.prepare(`
|
||||
SELECT * FROM memories
|
||||
ORDER BY created_at_epoch DESC
|
||||
LIMIT ?
|
||||
`);
|
||||
return stmt.all(limit) as MemoryRow[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Search memories by text content
|
||||
*/
|
||||
searchByText(query: string, project?: string, limit = 20): MemoryRow[] {
|
||||
let sql = 'SELECT * FROM memories WHERE text LIKE ?';
|
||||
const params: any[] = [`%${query}%`];
|
||||
|
||||
if (project) {
|
||||
sql += ' AND project = ?';
|
||||
params.push(project);
|
||||
}
|
||||
|
||||
sql += ' ORDER BY created_at_epoch DESC LIMIT ?';
|
||||
params.push(limit);
|
||||
|
||||
const stmt = this.db.prepare(sql);
|
||||
return stmt.all(...params) as MemoryRow[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Search memories by keywords
|
||||
*/
|
||||
searchByKeywords(keywords: string, project?: string, limit = 20): MemoryRow[] {
|
||||
let sql = 'SELECT * FROM memories WHERE keywords LIKE ?';
|
||||
const params: any[] = [`%${keywords}%`];
|
||||
|
||||
if (project) {
|
||||
sql += ' AND project = ?';
|
||||
params.push(project);
|
||||
}
|
||||
|
||||
sql += ' ORDER BY created_at_epoch DESC LIMIT ?';
|
||||
params.push(limit);
|
||||
|
||||
const stmt = this.db.prepare(sql);
|
||||
return stmt.all(...params) as MemoryRow[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get memories by origin type
|
||||
*/
|
||||
getByOrigin(origin: string, limit?: number): MemoryRow[] {
|
||||
const query = limit
|
||||
? 'SELECT * FROM memories WHERE origin = ? ORDER BY created_at_epoch DESC LIMIT ?'
|
||||
: 'SELECT * FROM memories WHERE origin = ? ORDER BY created_at_epoch DESC';
|
||||
|
||||
const stmt = this.db.prepare(query);
|
||||
const params = limit ? [origin, limit] : [origin];
|
||||
return stmt.all(...params) as MemoryRow[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Count total memories
|
||||
*/
|
||||
count(): number {
|
||||
const stmt = this.db.prepare('SELECT COUNT(*) as count FROM memories');
|
||||
const result = stmt.get() as { count: number };
|
||||
return result.count;
|
||||
}
|
||||
|
||||
/**
|
||||
* Count memories by project
|
||||
*/
|
||||
countByProject(project: string): number {
|
||||
const stmt = this.db.prepare('SELECT COUNT(*) as count FROM memories WHERE project = ?');
|
||||
const result = stmt.get(project) as { count: number };
|
||||
return result.count;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a memory record
|
||||
*/
|
||||
update(id: number, input: Partial<MemoryInput>): MemoryRow {
|
||||
const existing = this.getById(id);
|
||||
if (!existing) {
|
||||
throw new Error(`Memory with id ${id} not found`);
|
||||
}
|
||||
|
||||
const { isoString, epoch } = normalizeTimestamp(input.created_at || existing.created_at);
|
||||
|
||||
const stmt = this.db.prepare(`
|
||||
UPDATE memories SET
|
||||
text = ?, document_id = ?, keywords = ?, created_at = ?, created_at_epoch = ?,
|
||||
project = ?, archive_basename = ?, origin = ?
|
||||
WHERE id = ?
|
||||
`);
|
||||
|
||||
stmt.run(
|
||||
input.text || existing.text,
|
||||
input.document_id !== undefined ? input.document_id : existing.document_id,
|
||||
input.keywords !== undefined ? input.keywords : existing.keywords,
|
||||
isoString,
|
||||
epoch,
|
||||
input.project || existing.project,
|
||||
input.archive_basename !== undefined ? input.archive_basename : existing.archive_basename,
|
||||
input.origin || existing.origin,
|
||||
id
|
||||
);
|
||||
|
||||
return this.getById(id)!;
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a memory by ID
|
||||
*/
|
||||
deleteById(id: number): boolean {
|
||||
const stmt = this.db.prepare('DELETE FROM memories WHERE id = ?');
|
||||
const info = stmt.run(id);
|
||||
return info.changes > 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete memories by session_id
|
||||
*/
|
||||
deleteBySessionId(sessionId: string): number {
|
||||
const stmt = this.db.prepare('DELETE FROM memories WHERE session_id = ?');
|
||||
const info = stmt.run(sessionId);
|
||||
return info.changes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get unique projects from memories
|
||||
*/
|
||||
getUniqueProjects(): string[] {
|
||||
const stmt = this.db.prepare('SELECT DISTINCT project FROM memories ORDER BY project');
|
||||
const rows = stmt.all() as { project: string }[];
|
||||
return rows.map(row => row.project);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,196 @@
|
||||
import { Database } from 'better-sqlite3';
|
||||
import { getDatabase } from './Database.js';
|
||||
import { OverviewRow, OverviewInput, normalizeTimestamp } from './types.js';
|
||||
|
||||
/**
|
||||
* Data Access Object for overview records
|
||||
*/
|
||||
export class OverviewStore {
|
||||
private db: Database.Database;
|
||||
|
||||
constructor(db?: Database.Database) {
|
||||
this.db = db || getDatabase();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new overview record
|
||||
*/
|
||||
create(input: OverviewInput): OverviewRow {
|
||||
const { isoString, epoch } = normalizeTimestamp(input.created_at);
|
||||
|
||||
const stmt = this.db.prepare(`
|
||||
INSERT INTO overviews (
|
||||
session_id, content, created_at, created_at_epoch, project, origin
|
||||
) VALUES (?, ?, ?, ?, ?, ?)
|
||||
`);
|
||||
|
||||
const info = stmt.run(
|
||||
input.session_id,
|
||||
input.content,
|
||||
isoString,
|
||||
epoch,
|
||||
input.project,
|
||||
input.origin || 'claude'
|
||||
);
|
||||
|
||||
return this.getById(info.lastInsertRowid as number)!;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create or replace an overview for a session (since one session should have one overview)
|
||||
*/
|
||||
upsert(input: OverviewInput): OverviewRow {
|
||||
const existing = this.getBySessionId(input.session_id);
|
||||
if (existing) {
|
||||
return this.update(existing.id, input);
|
||||
}
|
||||
return this.create(input);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get overview by primary key
|
||||
*/
|
||||
getById(id: number): OverviewRow | null {
|
||||
const stmt = this.db.prepare('SELECT * FROM overviews WHERE id = ?');
|
||||
return stmt.get(id) as OverviewRow || null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get overview by session_id
|
||||
*/
|
||||
getBySessionId(sessionId: string): OverviewRow | null {
|
||||
const stmt = this.db.prepare('SELECT * FROM overviews WHERE session_id = ?');
|
||||
return stmt.get(sessionId) as OverviewRow || null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get recent overviews for a project
|
||||
*/
|
||||
getRecentForProject(project: string, limit = 5): OverviewRow[] {
|
||||
const stmt = this.db.prepare(`
|
||||
SELECT * FROM overviews
|
||||
WHERE project = ?
|
||||
ORDER BY created_at_epoch DESC
|
||||
LIMIT ?
|
||||
`);
|
||||
return stmt.all(project, limit) as OverviewRow[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get recent overviews across all projects
|
||||
*/
|
||||
getRecent(limit = 5): OverviewRow[] {
|
||||
const stmt = this.db.prepare(`
|
||||
SELECT * FROM overviews
|
||||
ORDER BY created_at_epoch DESC
|
||||
LIMIT ?
|
||||
`);
|
||||
return stmt.all(limit) as OverviewRow[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Search overviews by content
|
||||
*/
|
||||
searchByContent(query: string, project?: string, limit = 10): OverviewRow[] {
|
||||
let sql = 'SELECT * FROM overviews WHERE content LIKE ?';
|
||||
const params: any[] = [`%${query}%`];
|
||||
|
||||
if (project) {
|
||||
sql += ' AND project = ?';
|
||||
params.push(project);
|
||||
}
|
||||
|
||||
sql += ' ORDER BY created_at_epoch DESC LIMIT ?';
|
||||
params.push(limit);
|
||||
|
||||
const stmt = this.db.prepare(sql);
|
||||
return stmt.all(...params) as OverviewRow[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get overviews by origin type
|
||||
*/
|
||||
getByOrigin(origin: string, limit?: number): OverviewRow[] {
|
||||
const query = limit
|
||||
? 'SELECT * FROM overviews WHERE origin = ? ORDER BY created_at_epoch DESC LIMIT ?'
|
||||
: 'SELECT * FROM overviews WHERE origin = ? ORDER BY created_at_epoch DESC';
|
||||
|
||||
const stmt = this.db.prepare(query);
|
||||
const params = limit ? [origin, limit] : [origin];
|
||||
return stmt.all(...params) as OverviewRow[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Count total overviews
|
||||
*/
|
||||
count(): number {
|
||||
const stmt = this.db.prepare('SELECT COUNT(*) as count FROM overviews');
|
||||
const result = stmt.get() as { count: number };
|
||||
return result.count;
|
||||
}
|
||||
|
||||
/**
|
||||
* Count overviews by project
|
||||
*/
|
||||
countByProject(project: string): number {
|
||||
const stmt = this.db.prepare('SELECT COUNT(*) as count FROM overviews WHERE project = ?');
|
||||
const result = stmt.get(project) as { count: number };
|
||||
return result.count;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update an overview record
|
||||
*/
|
||||
update(id: number, input: Partial<OverviewInput>): OverviewRow {
|
||||
const existing = this.getById(id);
|
||||
if (!existing) {
|
||||
throw new Error(`Overview with id ${id} not found`);
|
||||
}
|
||||
|
||||
const { isoString, epoch } = normalizeTimestamp(input.created_at || existing.created_at);
|
||||
|
||||
const stmt = this.db.prepare(`
|
||||
UPDATE overviews SET
|
||||
content = ?, created_at = ?, created_at_epoch = ?, project = ?, origin = ?
|
||||
WHERE id = ?
|
||||
`);
|
||||
|
||||
stmt.run(
|
||||
input.content || existing.content,
|
||||
isoString,
|
||||
epoch,
|
||||
input.project || existing.project,
|
||||
input.origin || existing.origin,
|
||||
id
|
||||
);
|
||||
|
||||
return this.getById(id)!;
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete an overview by ID
|
||||
*/
|
||||
deleteById(id: number): boolean {
|
||||
const stmt = this.db.prepare('DELETE FROM overviews WHERE id = ?');
|
||||
const info = stmt.run(id);
|
||||
return info.changes > 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete overview by session_id
|
||||
*/
|
||||
deleteBySessionId(sessionId: string): boolean {
|
||||
const stmt = this.db.prepare('DELETE FROM overviews WHERE session_id = ?');
|
||||
const info = stmt.run(sessionId);
|
||||
return info.changes > 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get unique projects from overviews
|
||||
*/
|
||||
getUniqueProjects(): string[] {
|
||||
const stmt = this.db.prepare('SELECT DISTINCT project FROM overviews ORDER BY project');
|
||||
const rows = stmt.all() as { project: string }[];
|
||||
return rows.map(row => row.project);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,195 @@
|
||||
import { Database } from 'better-sqlite3';
|
||||
import { getDatabase } from './Database.js';
|
||||
import { SessionRow, SessionInput, normalizeTimestamp } from './types.js';
|
||||
|
||||
/**
|
||||
* Data Access Object for session records
|
||||
*/
|
||||
export class SessionStore {
|
||||
private db: Database.Database;
|
||||
|
||||
constructor(db?: Database.Database) {
|
||||
this.db = db || getDatabase();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new session record
|
||||
*/
|
||||
create(input: SessionInput): SessionRow {
|
||||
const { isoString, epoch } = normalizeTimestamp(input.created_at);
|
||||
|
||||
const stmt = this.db.prepare(`
|
||||
INSERT INTO sessions (
|
||||
session_id, project, created_at, created_at_epoch, source,
|
||||
archive_path, archive_bytes, archive_checksum, archived_at, metadata_json
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`);
|
||||
|
||||
const info = stmt.run(
|
||||
input.session_id,
|
||||
input.project,
|
||||
isoString,
|
||||
epoch,
|
||||
input.source || 'compress',
|
||||
input.archive_path || null,
|
||||
input.archive_bytes || null,
|
||||
input.archive_checksum || null,
|
||||
input.archived_at || null,
|
||||
input.metadata_json || null
|
||||
);
|
||||
|
||||
return this.getById(info.lastInsertRowid as number)!;
|
||||
}
|
||||
|
||||
/**
|
||||
* Upsert a session record (insert or update if session_id exists)
|
||||
*/
|
||||
upsert(input: SessionInput): SessionRow {
|
||||
const existing = this.getBySessionId(input.session_id);
|
||||
if (existing) {
|
||||
return this.update(existing.id, input);
|
||||
}
|
||||
return this.create(input);
|
||||
}
|
||||
|
||||
/**
|
||||
* Update an existing session record
|
||||
*/
|
||||
update(id: number, input: Partial<SessionInput>): SessionRow {
|
||||
const existing = this.getById(id);
|
||||
if (!existing) {
|
||||
throw new Error(`Session with id ${id} not found`);
|
||||
}
|
||||
|
||||
const { isoString, epoch } = normalizeTimestamp(input.created_at || existing.created_at);
|
||||
|
||||
const stmt = this.db.prepare(`
|
||||
UPDATE sessions SET
|
||||
project = ?, created_at = ?, created_at_epoch = ?, source = ?,
|
||||
archive_path = ?, archive_bytes = ?, archive_checksum = ?, archived_at = ?, metadata_json = ?
|
||||
WHERE id = ?
|
||||
`);
|
||||
|
||||
stmt.run(
|
||||
input.project || existing.project,
|
||||
isoString,
|
||||
epoch,
|
||||
input.source || existing.source,
|
||||
input.archive_path !== undefined ? input.archive_path : existing.archive_path,
|
||||
input.archive_bytes !== undefined ? input.archive_bytes : existing.archive_bytes,
|
||||
input.archive_checksum !== undefined ? input.archive_checksum : existing.archive_checksum,
|
||||
input.archived_at !== undefined ? input.archived_at : existing.archived_at,
|
||||
input.metadata_json !== undefined ? input.metadata_json : existing.metadata_json,
|
||||
id
|
||||
);
|
||||
|
||||
return this.getById(id)!;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get session by primary key
|
||||
*/
|
||||
getById(id: number): SessionRow | null {
|
||||
const stmt = this.db.prepare('SELECT * FROM sessions WHERE id = ?');
|
||||
return stmt.get(id) as SessionRow || null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get session by session_id
|
||||
*/
|
||||
getBySessionId(sessionId: string): SessionRow | null {
|
||||
const stmt = this.db.prepare('SELECT * FROM sessions WHERE session_id = ?');
|
||||
return stmt.get(sessionId) as SessionRow || null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a session exists by session_id
|
||||
*/
|
||||
has(sessionId: string): boolean {
|
||||
const stmt = this.db.prepare('SELECT 1 FROM sessions WHERE session_id = ? LIMIT 1');
|
||||
return Boolean(stmt.get(sessionId));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all session_ids as a Set (useful for import-history)
|
||||
*/
|
||||
getAllSessionIds(): Set<string> {
|
||||
const stmt = this.db.prepare('SELECT session_id FROM sessions');
|
||||
const rows = stmt.all() as { session_id: string }[];
|
||||
return new Set(rows.map(row => row.session_id));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get recent sessions for a project
|
||||
*/
|
||||
getRecentForProject(project: string, limit = 5): SessionRow[] {
|
||||
const stmt = this.db.prepare(`
|
||||
SELECT * FROM sessions
|
||||
WHERE project = ?
|
||||
ORDER BY created_at_epoch DESC
|
||||
LIMIT ?
|
||||
`);
|
||||
return stmt.all(project, limit) as SessionRow[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get recent sessions across all projects
|
||||
*/
|
||||
getRecent(limit = 5): SessionRow[] {
|
||||
const stmt = this.db.prepare(`
|
||||
SELECT * FROM sessions
|
||||
ORDER BY created_at_epoch DESC
|
||||
LIMIT ?
|
||||
`);
|
||||
return stmt.all(limit) as SessionRow[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get sessions by source type
|
||||
*/
|
||||
getBySource(source: 'compress' | 'save' | 'legacy-jsonl', limit?: number): SessionRow[] {
|
||||
const query = limit
|
||||
? 'SELECT * FROM sessions WHERE source = ? ORDER BY created_at_epoch DESC LIMIT ?'
|
||||
: 'SELECT * FROM sessions WHERE source = ? ORDER BY created_at_epoch DESC';
|
||||
|
||||
const stmt = this.db.prepare(query);
|
||||
const params = limit ? [source, limit] : [source];
|
||||
return stmt.all(...params) as SessionRow[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Count total sessions
|
||||
*/
|
||||
count(): number {
|
||||
const stmt = this.db.prepare('SELECT COUNT(*) as count FROM sessions');
|
||||
const result = stmt.get() as { count: number };
|
||||
return result.count;
|
||||
}
|
||||
|
||||
/**
|
||||
* Count sessions by project
|
||||
*/
|
||||
countByProject(project: string): number {
|
||||
const stmt = this.db.prepare('SELECT COUNT(*) as count FROM sessions WHERE project = ?');
|
||||
const result = stmt.get(project) as { count: number };
|
||||
return result.count;
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a session by ID (cascades to related records)
|
||||
*/
|
||||
deleteById(id: number): boolean {
|
||||
const stmt = this.db.prepare('DELETE FROM sessions WHERE id = ?');
|
||||
const info = stmt.run(id);
|
||||
return info.changes > 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a session by session_id (cascades to related records)
|
||||
*/
|
||||
deleteBySessionId(sessionId: string): boolean {
|
||||
const stmt = this.db.prepare('DELETE FROM sessions WHERE session_id = ?');
|
||||
const info = stmt.run(sessionId);
|
||||
return info.changes > 0;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,32 @@
|
||||
// Import migrations to register them
|
||||
import './migrations/index.js';
|
||||
|
||||
// Export main components
|
||||
export { DatabaseManager, getDatabase, initializeDatabase } from './Database.js';
|
||||
|
||||
// Export store classes
|
||||
export { SessionStore } from './SessionStore.js';
|
||||
export { MemoryStore } from './MemoryStore.js';
|
||||
export { OverviewStore } from './OverviewStore.js';
|
||||
export { DiagnosticsStore } from './DiagnosticsStore.js';
|
||||
|
||||
// Export types
|
||||
export * from './types.js';
|
||||
|
||||
// Convenience function to get all stores
|
||||
export async function createStores() {
|
||||
const { DatabaseManager } = await import('./Database.js');
|
||||
const db = await DatabaseManager.getInstance().initialize();
|
||||
|
||||
const { SessionStore } = await import('./SessionStore.js');
|
||||
const { MemoryStore } = await import('./MemoryStore.js');
|
||||
const { OverviewStore } = await import('./OverviewStore.js');
|
||||
const { DiagnosticsStore } = await import('./DiagnosticsStore.js');
|
||||
|
||||
return {
|
||||
sessions: new SessionStore(db),
|
||||
memories: new MemoryStore(db),
|
||||
overviews: new OverviewStore(db),
|
||||
diagnostics: new DiagnosticsStore(db)
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,133 @@
|
||||
import { Migration } from '../Database.js';
|
||||
|
||||
/**
|
||||
* Initial migration: Create all core tables for claude-mem SQLite index
|
||||
*/
|
||||
export const migration001: Migration = {
|
||||
version: 1,
|
||||
|
||||
up: (db) => {
|
||||
// Create sessions table
|
||||
db.exec(`
|
||||
CREATE TABLE sessions (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
session_id TEXT UNIQUE NOT NULL,
|
||||
project TEXT NOT NULL,
|
||||
created_at TEXT NOT NULL,
|
||||
created_at_epoch INTEGER NOT NULL,
|
||||
source TEXT DEFAULT 'compress',
|
||||
archive_path TEXT,
|
||||
archive_bytes INTEGER,
|
||||
archive_checksum TEXT,
|
||||
archived_at TEXT,
|
||||
metadata_json TEXT
|
||||
)
|
||||
`);
|
||||
|
||||
// Create indexes for sessions
|
||||
db.exec(`
|
||||
CREATE INDEX sessions_project_created_at ON sessions (project, created_at_epoch DESC)
|
||||
`);
|
||||
db.exec(`
|
||||
CREATE INDEX sessions_source_created ON sessions (source, created_at_epoch DESC)
|
||||
`);
|
||||
|
||||
// Create overviews table
|
||||
db.exec(`
|
||||
CREATE TABLE overviews (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
session_id TEXT NOT NULL REFERENCES sessions(session_id) ON DELETE CASCADE,
|
||||
content TEXT NOT NULL,
|
||||
created_at TEXT NOT NULL,
|
||||
created_at_epoch INTEGER NOT NULL,
|
||||
project TEXT NOT NULL,
|
||||
origin TEXT DEFAULT 'claude'
|
||||
)
|
||||
`);
|
||||
|
||||
// Create index for overviews
|
||||
db.exec(`
|
||||
CREATE INDEX overviews_project_created_at ON overviews (project, created_at_epoch DESC)
|
||||
`);
|
||||
|
||||
// Create memories table
|
||||
db.exec(`
|
||||
CREATE TABLE memories (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
session_id TEXT NOT NULL REFERENCES sessions(session_id) ON DELETE CASCADE,
|
||||
text TEXT NOT NULL,
|
||||
document_id TEXT,
|
||||
keywords TEXT,
|
||||
created_at TEXT NOT NULL,
|
||||
created_at_epoch INTEGER NOT NULL,
|
||||
project TEXT NOT NULL,
|
||||
archive_basename TEXT,
|
||||
origin TEXT DEFAULT 'transcript'
|
||||
)
|
||||
`);
|
||||
|
||||
// Create indexes for memories
|
||||
db.exec(`
|
||||
CREATE INDEX memories_project_created_at ON memories (project, created_at_epoch DESC)
|
||||
`);
|
||||
db.exec(`
|
||||
CREATE UNIQUE INDEX memories_document_id_unique ON memories (document_id) WHERE document_id IS NOT NULL
|
||||
`);
|
||||
|
||||
// Create diagnostics table
|
||||
db.exec(`
|
||||
CREATE TABLE diagnostics (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
session_id TEXT REFERENCES sessions(session_id) ON DELETE SET NULL,
|
||||
message TEXT NOT NULL,
|
||||
severity TEXT DEFAULT 'warn',
|
||||
created_at TEXT NOT NULL,
|
||||
created_at_epoch INTEGER NOT NULL,
|
||||
project TEXT NOT NULL,
|
||||
origin TEXT DEFAULT 'compressor'
|
||||
)
|
||||
`);
|
||||
|
||||
// Create index for diagnostics
|
||||
db.exec(`
|
||||
CREATE INDEX diagnostics_project_created_at ON diagnostics (project, created_at_epoch DESC)
|
||||
`);
|
||||
|
||||
// Create archives table (for future archival workflows)
|
||||
db.exec(`
|
||||
CREATE TABLE archives (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
session_id TEXT UNIQUE NOT NULL REFERENCES sessions(session_id) ON DELETE CASCADE,
|
||||
path TEXT NOT NULL,
|
||||
bytes INTEGER,
|
||||
checksum TEXT,
|
||||
stored_at TEXT NOT NULL,
|
||||
storage_status TEXT DEFAULT 'active'
|
||||
)
|
||||
`);
|
||||
|
||||
// Create titles table (ready for conversation-titles.jsonl migration)
|
||||
db.exec(`
|
||||
CREATE TABLE titles (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
session_id TEXT UNIQUE NOT NULL REFERENCES sessions(session_id) ON DELETE CASCADE,
|
||||
title TEXT NOT NULL,
|
||||
created_at TEXT NOT NULL,
|
||||
project TEXT NOT NULL
|
||||
)
|
||||
`);
|
||||
|
||||
console.log('✅ Created initial database schema with all tables and indexes');
|
||||
},
|
||||
|
||||
down: (db) => {
|
||||
// Drop tables in reverse order to respect foreign key constraints
|
||||
const tables = ['titles', 'archives', 'diagnostics', 'memories', 'overviews', 'sessions'];
|
||||
|
||||
for (const table of tables) {
|
||||
db.exec(`DROP TABLE IF EXISTS ${table}`);
|
||||
}
|
||||
|
||||
console.log('🗑️ Dropped all tables from initial migration');
|
||||
}
|
||||
};
|
||||
@@ -0,0 +1,17 @@
|
||||
import { DatabaseManager } from '../Database.js';
|
||||
import { migration001 } from './001_initial.js';
|
||||
|
||||
/**
|
||||
* Register all migrations with the database manager
|
||||
*/
|
||||
export function registerMigrations(): void {
|
||||
const manager = DatabaseManager.getInstance();
|
||||
|
||||
// Register migrations in order
|
||||
manager.registerMigration(migration001);
|
||||
|
||||
console.log('📋 Registered all database migrations');
|
||||
}
|
||||
|
||||
// Auto-register migrations when this module is imported
|
||||
registerMigrations();
|
||||
@@ -0,0 +1,152 @@
|
||||
/**
|
||||
* Database entity types for SQLite storage
|
||||
*/
|
||||
|
||||
export interface SessionRow {
|
||||
id: number;
|
||||
session_id: string;
|
||||
project: string;
|
||||
created_at: string;
|
||||
created_at_epoch: number;
|
||||
source: 'compress' | 'save' | 'legacy-jsonl';
|
||||
archive_path?: string;
|
||||
archive_bytes?: number;
|
||||
archive_checksum?: string;
|
||||
archived_at?: string;
|
||||
metadata_json?: string;
|
||||
}
|
||||
|
||||
export interface OverviewRow {
|
||||
id: number;
|
||||
session_id: string;
|
||||
content: string;
|
||||
created_at: string;
|
||||
created_at_epoch: number;
|
||||
project: string;
|
||||
origin: string;
|
||||
}
|
||||
|
||||
export interface MemoryRow {
|
||||
id: number;
|
||||
session_id: string;
|
||||
text: string;
|
||||
document_id?: string;
|
||||
keywords?: string;
|
||||
created_at: string;
|
||||
created_at_epoch: number;
|
||||
project: string;
|
||||
archive_basename?: string;
|
||||
origin: string;
|
||||
}
|
||||
|
||||
export interface DiagnosticRow {
|
||||
id: number;
|
||||
session_id?: string;
|
||||
message: string;
|
||||
severity: 'info' | 'warn' | 'error';
|
||||
created_at: string;
|
||||
created_at_epoch: number;
|
||||
project: string;
|
||||
origin: string;
|
||||
}
|
||||
|
||||
export interface ArchiveRow {
|
||||
id: number;
|
||||
session_id: string;
|
||||
path: string;
|
||||
bytes?: number;
|
||||
checksum?: string;
|
||||
stored_at: string;
|
||||
storage_status: 'active' | 'archived' | 'deleted';
|
||||
}
|
||||
|
||||
export interface TitleRow {
|
||||
id: number;
|
||||
session_id: string;
|
||||
title: string;
|
||||
created_at: string;
|
||||
project: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Input types for creating new records (without id and auto-generated fields)
|
||||
*/
|
||||
export interface SessionInput {
|
||||
session_id: string;
|
||||
project: string;
|
||||
created_at: string;
|
||||
source?: 'compress' | 'save' | 'legacy-jsonl';
|
||||
archive_path?: string;
|
||||
archive_bytes?: number;
|
||||
archive_checksum?: string;
|
||||
archived_at?: string;
|
||||
metadata_json?: string;
|
||||
}
|
||||
|
||||
export interface OverviewInput {
|
||||
session_id: string;
|
||||
content: string;
|
||||
created_at: string;
|
||||
project: string;
|
||||
origin?: string;
|
||||
}
|
||||
|
||||
export interface MemoryInput {
|
||||
session_id: string;
|
||||
text: string;
|
||||
document_id?: string;
|
||||
keywords?: string;
|
||||
created_at: string;
|
||||
project: string;
|
||||
archive_basename?: string;
|
||||
origin?: string;
|
||||
}
|
||||
|
||||
export interface DiagnosticInput {
|
||||
session_id?: string;
|
||||
message: string;
|
||||
severity?: 'info' | 'warn' | 'error';
|
||||
created_at: string;
|
||||
project: string;
|
||||
origin?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to normalize timestamps from various formats
|
||||
*/
|
||||
export function normalizeTimestamp(timestamp: string | Date | number | undefined): { isoString: string; epoch: number } {
|
||||
let date: Date;
|
||||
|
||||
if (!timestamp) {
|
||||
date = new Date();
|
||||
} else if (timestamp instanceof Date) {
|
||||
date = timestamp;
|
||||
} else if (typeof timestamp === 'number') {
|
||||
date = new Date(timestamp);
|
||||
} else if (typeof timestamp === 'string') {
|
||||
// Handle empty strings
|
||||
if (!timestamp.trim()) {
|
||||
date = new Date();
|
||||
} else {
|
||||
date = new Date(timestamp);
|
||||
// If invalid date, try to parse it differently
|
||||
if (isNaN(date.getTime())) {
|
||||
// Try common formats
|
||||
const cleaned = timestamp.replace(/\s+/g, 'T').replace(/T+/g, 'T');
|
||||
date = new Date(cleaned);
|
||||
|
||||
// Still invalid? Use current time
|
||||
if (isNaN(date.getTime())) {
|
||||
date = new Date();
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
date = new Date();
|
||||
}
|
||||
|
||||
return {
|
||||
isoString: date.toISOString(),
|
||||
epoch: date.getTime()
|
||||
};
|
||||
}
|
||||
Reference in New Issue
Block a user