diff --git a/src/services/sqlite/SessionStore.ts b/src/services/sqlite/SessionStore.ts index aaaf8754..110990f7 100644 --- a/src/services/sqlite/SessionStore.ts +++ b/src/services/sqlite/SessionStore.ts @@ -14,6 +14,7 @@ import { } from '../../types/database.js'; import type { PendingMessageStore } from './PendingMessageStore.js'; import { computeObservationContentHash, findDuplicateObservation } from './observations/store.js'; +import { parseFileList } from './observations/files.js'; /** * Session data store for SDK sessions, observations, and summaries @@ -1309,20 +1310,10 @@ export class SessionStore { for (const row of rows) { // Parse files_read - if (row.files_read) { - const files = JSON.parse(row.files_read); - if (Array.isArray(files)) { - files.forEach(f => filesReadSet.add(f)); - } - } + parseFileList(row.files_read).forEach(f => filesReadSet.add(f)); // Parse files_modified - if (row.files_modified) { - const files = JSON.parse(row.files_modified); - if (Array.isArray(files)) { - files.forEach(f => filesModifiedSet.add(f)); - } - } + parseFileList(row.files_modified).forEach(f => filesModifiedSet.add(f)); } return { diff --git a/src/services/sqlite/observations/files.ts b/src/services/sqlite/observations/files.ts index e9bcba28..ec122fd3 100644 --- a/src/services/sqlite/observations/files.ts +++ b/src/services/sqlite/observations/files.ts @@ -7,6 +7,21 @@ import { Database } from 'bun:sqlite'; import { logger } from '../../../utils/logger.js'; import type { SessionFilesResult } from './types.js'; +/** + * Safely parse a JSON array string from the DB. + * Handles legacy bare-path strings (e.g. "/foo/bar.ts") by wrapping them + * in an array instead of crashing with a SyntaxError (fix for #1359). + */ +export function parseFileList(value: string | null | undefined): string[] { + if (!value) return []; + try { + const parsed = JSON.parse(value); + return Array.isArray(parsed) ? parsed : [String(parsed)]; + } catch { + return [value]; + } +} + /** * Get aggregated files from all observations for a session */ @@ -30,20 +45,10 @@ export function getFilesForSession( for (const row of rows) { // Parse files_read - if (row.files_read) { - const files = JSON.parse(row.files_read); - if (Array.isArray(files)) { - files.forEach(f => filesReadSet.add(f)); - } - } + parseFileList(row.files_read).forEach(f => filesReadSet.add(f)); // Parse files_modified - if (row.files_modified) { - const files = JSON.parse(row.files_modified); - if (Array.isArray(files)) { - files.forEach(f => filesModifiedSet.add(f)); - } - } + parseFileList(row.files_modified).forEach(f => filesModifiedSet.add(f)); } return { diff --git a/src/services/sync/ChromaSync.ts b/src/services/sync/ChromaSync.ts index 9f3e01d1..ecf208e1 100644 --- a/src/services/sync/ChromaSync.ts +++ b/src/services/sync/ChromaSync.ts @@ -16,6 +16,7 @@ import { ChromaMcpManager } from './ChromaMcpManager.js'; import { ParsedObservation, ParsedSummary } from '../../sdk/parser.js'; import { SessionStore } from '../sqlite/SessionStore.js'; import { logger } from '../../utils/logger.js'; +import { parseFileList } from '../sqlite/observations/files.js'; interface ChromaDocument { id: string; @@ -125,8 +126,8 @@ export class ChromaSync { // Parse JSON fields const facts = obs.facts ? JSON.parse(obs.facts) : []; const concepts = obs.concepts ? JSON.parse(obs.concepts) : []; - const files_read = obs.files_read ? JSON.parse(obs.files_read) : []; - const files_modified = obs.files_modified ? JSON.parse(obs.files_modified) : []; + const files_read = parseFileList(obs.files_read); + const files_modified = parseFileList(obs.files_modified); const baseMetadata: Record = { sqlite_id: obs.id, diff --git a/tests/services/sqlite/parse-file-list.test.ts b/tests/services/sqlite/parse-file-list.test.ts new file mode 100644 index 00000000..e4ee55fb --- /dev/null +++ b/tests/services/sqlite/parse-file-list.test.ts @@ -0,0 +1,46 @@ +/** + * Tests for parseFileList (fix for #1359) + * + * Validates safe JSON array parsing for files_read/files_modified DB columns + * that may contain legacy bare path strings instead of JSON arrays. + */ +import { describe, it, expect } from 'bun:test'; +import { parseFileList } from '../../../src/services/sqlite/observations/files.js'; + +describe('parseFileList', () => { + it('returns [] for null', () => { + expect(parseFileList(null)).toEqual([]); + }); + + it('returns [] for undefined', () => { + expect(parseFileList(undefined)).toEqual([]); + }); + + it('returns [] for empty string', () => { + expect(parseFileList('')).toEqual([]); + }); + + it('returns [] for empty JSON array', () => { + expect(parseFileList('[]')).toEqual([]); + }); + + it('parses a normal JSON array', () => { + expect(parseFileList('["/a/b.ts","/c/d.ts"]')).toEqual(['/a/b.ts', '/c/d.ts']); + }); + + it('wraps a bare path in an array instead of crashing', () => { + expect(parseFileList('/Users/foo/bar.go')).toEqual(['/Users/foo/bar.go']); + }); + + it('wraps a Windows bare path in an array', () => { + expect(parseFileList('C:\\Users\\foo\\bar.ts')).toEqual(['C:\\Users\\foo\\bar.ts']); + }); + + it('handles invalid JSON by treating value as single element', () => { + expect(parseFileList('not valid json {')).toEqual(['not valid json {']); + }); + + it('wraps a JSON scalar string in an array', () => { + expect(parseFileList('"single-file.ts"')).toEqual(['single-file.ts']); + }); +});