Merge pull request #1641 from thedotmack/integration/validation-batch

fix: worker startup crash, missing migration, and merge artifacts
This commit is contained in:
Alex Newman
2026-04-07 14:20:46 -07:00
committed by GitHub
116 changed files with 6915 additions and 1300 deletions
+14 -7
View File
@@ -1,17 +1,24 @@
{
"name": "claude-mem",
"version": "10.4.1",
"description": "Persistent memory system for Claude Code - seamlessly preserve context across sessions",
"version": "11.0.1",
"description": "Memory compression system for Claude Code - persist context across sessions",
"author": {
"name": "Alex Newman"
},
"repository": "https://github.com/thedotmack/claude-mem",
"license": "AGPL-3.0",
"keywords": [
"claude",
"claude-code",
"claude-agent-sdk",
"mcp",
"plugin",
"memory",
"context",
"persistence",
"hooks",
"mcp"
]
"compression",
"knowledge-graph",
"transcript",
"typescript",
"nodejs"
],
"homepage": "https://github.com/thedotmack/claude-mem#readme"
}
+43
View File
@@ -0,0 +1,43 @@
{
"name": "claude-mem",
"version": "11.0.1",
"description": "Memory compression system for Claude Code - persist context across sessions",
"author": {
"name": "Alex Newman",
"url": "https://github.com/thedotmack"
},
"homepage": "https://github.com/thedotmack/claude-mem#readme",
"repository": "https://github.com/thedotmack/claude-mem",
"license": "AGPL-3.0",
"keywords": [
"claude",
"claude-code",
"claude-agent-sdk",
"mcp",
"plugin",
"memory",
"compression",
"knowledge-graph",
"transcript",
"typescript",
"nodejs"
],
"interface": {
"displayName": "claude-mem",
"shortDescription": "Persistent memory and context compression across coding sessions.",
"longDescription": "claude-mem captures coding-session activity, compresses it into reusable observations, and injects relevant context back into future Claude Code and Codex-compatible sessions.",
"developerName": "Alex Newman",
"category": "Productivity",
"capabilities": [
"Interactive",
"Write"
],
"websiteURL": "https://github.com/thedotmack/claude-mem",
"defaultPrompt": [
"Find what I already learned about this codebase before I start a new task.",
"Show recent observations related to the files I am editing right now.",
"Summarize the last session and inject the most relevant context into this one."
],
"brandColor": "#1F6FEB"
}
}
+1 -1
View File
@@ -27,7 +27,7 @@ jobs:
- name: Comment with AI summary
run: |
gh issue comment $ISSUE_NUMBER --body '${{ steps.inference.outputs.response }}'
gh issue comment "$ISSUE_NUMBER" --body "$RESPONSE"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
ISSUE_NUMBER: ${{ github.event.issue.number }}
+1
View File
@@ -0,0 +1 @@
legacy-peer-deps=true
+44 -32
View File
@@ -23,14 +23,14 @@ Claude-mem uses **two distinct session IDs** to track conversations and memory:
┌─────────────────────────────────────────────────────────────┐
│ 2. SDKAgent starts, checks hasRealMemorySessionId │
│ const hasReal = memorySessionId !== null
│ const hasReal = !!memorySessionId
│ → FALSE (it's NULL) │
│ → Resume NOT used (fresh SDK session) │
└─────────────────────────────────────────────────────────────┘
┌─────────────────────────────────────────────────────────────┐
│ 3. First SDK message arrives with session_id │
updateMemorySessionId(sessionDbId, "sdk-gen-abc123")
ensureMemorySessionIdRegistered(sessionDbId, "sdk-gen-abc123") │
│ │
│ Database state: │
│ ├─ content_session_id: "user-session-123" │
@@ -38,45 +38,43 @@ Claude-mem uses **two distinct session IDs** to track conversations and memory:
└─────────────────────────────────────────────────────────────┘
┌─────────────────────────────────────────────────────────────┐
│ 4. Subsequent prompts use resume
│ const hasReal = memorySessionId !== null
→ TRUE (it's not NULL)
│ 4. Subsequent prompts may use resume │
│ const shouldResume =
!!memorySessionId && lastPromptNumber > 1 && !forceInit
│ → TRUE only for continuation prompts in the same runtime │
│ → Resume parameter: { resume: "sdk-gen-abc123" } │
└─────────────────────────────────────────────────────────────┘
```
### Observation Storage
**CRITICAL**: Observations are stored with `contentSessionId`, NOT the captured SDK `memorySessionId`.
**CRITICAL**: Observations are stored with the real `memorySessionId`, NOT `contentSessionId`.
```typescript
// SDKAgent.ts line 332-333
this.dbManager.getSessionStore().storeObservation(
session.contentSessionId, // ← contentSessionId, not memorySessionId!
session.project,
obs,
// ...
);
// SessionStore.ts
storeObservation(memorySessionId, project, observation, ...);
```
Even though the parameter is named `memorySessionId`, it receives `contentSessionId`. This means:
This means:
- Database column: `observations.memory_session_id`
- Stored value: `contentSessionId` (the user's session ID)
- Stored value: the captured or synthesized `memorySessionId`
- Foreign key: References `sdk_sessions.memory_session_id`
The observations are linked to the session via `contentSessionId`, which remains constant throughout the session lifecycle.
Observation storage is blocked until a real `memorySessionId` is registered in `sdk_sessions`.
This is why `SDKAgent` persists the SDK-returned `session_id` immediately through
`ensureMemorySessionIdRegistered(...)` before any observation insert can succeed.
## Key Invariants
### 1. NULL-Based Detection
```typescript
const hasRealMemorySessionId = session.memorySessionId !== null;
const hasRealMemorySessionId = !!session.memorySessionId;
```
- When `memorySessionId === null` → Not yet captured
- When `memorySessionId !== null` → Real SDK session captured
- When `memorySessionId` is falsy → Not yet captured
- When `memorySessionId` is truthy → Real SDK session captured
### 2. Resume Safety
@@ -86,12 +84,20 @@ const hasRealMemorySessionId = session.memorySessionId !== null;
// ❌ FORBIDDEN - Would resume user's session instead of memory session!
query({ resume: contentSessionId })
// ✅ CORRECT - Only resume when we have real memory session ID
// ✅ CORRECT - Only resume for a continuation prompt in a valid runtime
query({
...(hasRealMemorySessionId && { resume: memorySessionId })
...(
!!memorySessionId &&
lastPromptNumber > 1 &&
!forceInit &&
{ resume: memorySessionId }
)
})
```
`memorySessionId` is necessary but not sufficient.
Worker restart and crash-recovery paths may still carry a persisted ID while forcing a fresh INIT run.
### 3. Session Isolation
- Each `contentSessionId` maps to exactly one database session
@@ -103,7 +109,8 @@ query({
- Observations reference `sdk_sessions.memory_session_id`
- Initially, `sdk_sessions.memory_session_id` is NULL (no observations can be stored yet)
- When SDK session ID is captured, `sdk_sessions.memory_session_id` is set to the real value
- Observations are stored using `contentSessionId` and remain retrievable via `contentSessionId`
- Observations are stored using that real `memory_session_id`
- Queries can still find the session from `content_session_id`, but observation rows themselves stay keyed by `memory_session_id`
## Testing Strategy
@@ -116,8 +123,8 @@ The test suite validates all critical invariants:
### Test Categories
1. **NULL-Based Detection** - Validates `hasRealMemorySessionId` logic
2. **Observation Storage** - Confirms observations use `contentSessionId`
3. **Resume Safety** - Prevents `contentSessionId` from being used for resume
2. **Observation Storage** - Confirms observations use real `memorySessionId` values after registration
3. **Resume Safety** - Prevents `contentSessionId` and stale INIT sessions from being used for resume
4. **Cross-Contamination Prevention** - Ensures session isolation
5. **Foreign Key Integrity** - Validates cascade behavior
6. **Session Lifecycle** - Tests create → capture → resume flow
@@ -141,14 +148,14 @@ bun test --verbose
### ❌ Using memorySessionId for observations
```typescript
// WRONG - Don't use the captured SDK session ID
storeObservation(session.memorySessionId, ...)
// WRONG - Don't store observations before memorySessionId is available
storeObservation(session.contentSessionId, ...)
```
### ❌ Resuming without checking for NULL
```typescript
// WRONG - memorySessionId could be NULL!
// WRONG - memorySessionId alone is not enough
if (session.memorySessionId) {
query({ resume: session.memorySessionId })
}
@@ -166,14 +173,14 @@ const resumeId = session.memorySessionId
### ✅ Storing observations
```typescript
// Always use contentSessionId
storeObservation(session.contentSessionId, project, obs, ...)
// Only store after a real memorySessionId has been captured or synthesized
storeObservation(session.memorySessionId, project, obs, ...)
```
### ✅ Checking for real memory session ID
```typescript
const hasRealMemorySessionId = session.memorySessionId !== null;
const hasRealMemorySessionId = !!session.memorySessionId;
```
### ✅ Using resume parameter
@@ -182,7 +189,12 @@ const hasRealMemorySessionId = session.memorySessionId !== null;
query({
prompt: messageGenerator,
options: {
...(hasRealMemorySessionId && { resume: session.memorySessionId }),
...(
hasRealMemorySessionId &&
session.lastPromptNumber > 1 &&
!session.forceInit &&
{ resume: session.memorySessionId }
),
// ... other options
}
})
@@ -234,6 +246,6 @@ WHERE s.content_session_id = 'your-session-id';
## References
- **Implementation**: `src/services/worker/SDKAgent.ts` (lines 72-94)
- **Database Schema**: `src/services/sqlite/SessionStore.ts` (line 95-104)
- **Session Store**: `src/services/sqlite/SessionStore.ts`
- **Tests**: `tests/session_id_usage_validation.test.ts`
- **Related Tests**: `tests/session_id_refactor.test.ts`
+12 -12
View File
@@ -32,7 +32,7 @@ For simple single-turn queries where you don't need to maintain a session, use `
import { unstable_v2_prompt } from '@anthropic-ai/claude-agent-sdk'
const result = await unstable_v2_prompt('What is 2 + 2?', {
model: 'claude-sonnet-4-5-20250929'
model: 'claude-sonnet-4-6-20250929'
})
console.log(result.result)
```
@@ -45,7 +45,7 @@ import { query } from '@anthropic-ai/claude-agent-sdk'
const q = query({
prompt: 'What is 2 + 2?',
options: { model: 'claude-sonnet-4-5-20250929' }
options: { model: 'claude-sonnet-4-6-20250929' }
})
for await (const msg of q) {
@@ -71,7 +71,7 @@ The example below creates a session, sends "Hello!" to Claude, and prints the te
import { unstable_v2_createSession } from '@anthropic-ai/claude-agent-sdk'
await using session = unstable_v2_createSession({
model: 'claude-sonnet-4-5-20250929'
model: 'claude-sonnet-4-6-20250929'
})
await session.send('Hello!')
@@ -97,7 +97,7 @@ import { query } from '@anthropic-ai/claude-agent-sdk'
const q = query({
prompt: 'Hello!',
options: { model: 'claude-sonnet-4-5-20250929' }
options: { model: 'claude-sonnet-4-6-20250929' }
})
for await (const msg of q) {
@@ -123,7 +123,7 @@ This example asks a math question, then asks a follow-up that references the pre
import { unstable_v2_createSession } from '@anthropic-ai/claude-agent-sdk'
await using session = unstable_v2_createSession({
model: 'claude-sonnet-4-5-20250929'
model: 'claude-sonnet-4-6-20250929'
})
// Turn 1
@@ -177,7 +177,7 @@ async function* createInputStream() {
const q = query({
prompt: createInputStream(),
options: { model: 'claude-sonnet-4-5-20250929' }
options: { model: 'claude-sonnet-4-6-20250929' }
})
for await (const msg of q) {
@@ -217,7 +217,7 @@ function getAssistantText(msg: SDKMessage): string | null {
// Create initial session and have a conversation
const session = unstable_v2_createSession({
model: 'claude-sonnet-4-5-20250929'
model: 'claude-sonnet-4-6-20250929'
})
await session.send('Remember this number: 42')
@@ -235,7 +235,7 @@ session.close()
// Later: resume the session using the stored ID
await using resumedSession = unstable_v2_resumeSession(sessionId!, {
model: 'claude-sonnet-4-5-20250929'
model: 'claude-sonnet-4-6-20250929'
})
await resumedSession.send('What number did I ask you to remember?')
@@ -254,7 +254,7 @@ import { query } from '@anthropic-ai/claude-agent-sdk'
// Create initial session
const initialQuery = query({
prompt: 'Remember this number: 42',
options: { model: 'claude-sonnet-4-5-20250929' }
options: { model: 'claude-sonnet-4-6-20250929' }
})
// Get session ID from any message
@@ -276,7 +276,7 @@ console.log('Session ID:', sessionId)
const resumedQuery = query({
prompt: 'What number did I ask you to remember?',
options: {
model: 'claude-sonnet-4-5-20250929',
model: 'claude-sonnet-4-6-20250929',
resume: sessionId
}
})
@@ -304,7 +304,7 @@ Sessions can be closed manually or automatically using [`await using`](https://w
import { unstable_v2_createSession } from '@anthropic-ai/claude-agent-sdk'
await using session = unstable_v2_createSession({
model: 'claude-sonnet-4-5-20250929'
model: 'claude-sonnet-4-6-20250929'
})
// Session closes automatically when the block exits
```
@@ -315,7 +315,7 @@ await using session = unstable_v2_createSession({
import { unstable_v2_createSession } from '@anthropic-ai/claude-agent-sdk'
const session = unstable_v2_createSession({
model: 'claude-sonnet-4-5-20250929'
model: 'claude-sonnet-4-6-20250929'
})
// ... use the session ...
session.close()
+1 -1
View File
@@ -860,7 +860,7 @@ async startSession(session: ActiveSession, worker?: any) {
const queryResult = query({
prompt: messageGenerator,
options: {
model: 'claude-sonnet-4-5',
model: 'claude-sonnet-4-6',
disallowedTools: ['Bash', 'Read', 'Write', ...], // Observer-only
abortController: session.abortController
}
+1
View File
@@ -70,6 +70,7 @@
"pages": [
"context-engineering",
"progressive-disclosure",
"file-read-gate",
"smart-explore-benchmark"
]
},
+180
View File
@@ -0,0 +1,180 @@
---
title: "File Read Gate"
description: "How claude-mem intercepts file reads to save tokens using observation history"
---
# File Read Gate
## What It Is
The File Read Gate is a **PreToolUse hook** that intercepts Claude's `Read` tool calls. When Claude tries to read a file that has prior observations in the database, the gate blocks the read and instead shows a compact timeline of past work on that file. Claude then decides the cheapest path to get the context it needs.
This is a concrete implementation of [progressive disclosure](/progressive-disclosure) -- show what exists first, let the agent decide what to fetch.
---
## How It Works
```
Claude calls Read("src/services/worker-service.ts")
PreToolUse hook fires
File size < 1,500 bytes? ──→ Allow read (timeline costs more than file)
↓ No
Project excluded? ──→ Allow read
↓ No
Query worker: GET /api/observations/by-file
No observations found? ──→ Allow read
↓ Has observations
Deduplicate (1 per session)
Rank by specificity
Limit to 15
DENY read with timeline
```
When the gate fires, Claude sees a message like this:
```
Current: 2026-04-07 3:25pm PDT
Read blocked: This file has prior observations. Choose the cheapest path:
- Already know enough? The timeline below may be all you need (semantic priming).
- Need details? get_observations([IDs]) -- ~300 tokens each.
- Need current code? smart_outline("path") for structure (~1-2k tokens),
smart_unfold("path", "<symbol>") for a specific function (~400-2k tokens).
- Need to edit? Use smart tools for line numbers, then sed via Bash.
### Apr 5, 2026
42301 2:15pm Fixed database connection pooling
42298 1:50pm Refactored worker startup sequence
### Mar 28, 2026
41890 4:30pm Added health check endpoint
```
---
## The Decision Tree
Claude has four options after seeing the timeline, ordered from cheapest to most expensive:
| Option | Token Cost | When to Use |
|--------|-----------|-------------|
| **Semantic priming** | 0 extra | Timeline titles tell Claude enough to proceed |
| **get_observations([IDs])** | ~300 each | Need specific details from past work |
| **smart_outline / smart_unfold** | ~1-2k | Need current code structure or a specific function |
| **Full file read** | 5k-50k | File has changed significantly since observations |
In practice, most file reads resolve at the semantic priming or get_observations level, saving thousands of tokens per interaction.
---
## Current Date/Time for Temporal Reasoning
The timeline includes the current date and time as its first line:
```
Current: 2026-04-07 3:25pm PDT
```
This lets Claude reason about how recent the observations are relative to now. For example:
- **Observations from today** -- likely still accurate, semantic priming is safe
- **Observations from last week** -- probably accurate, get_observations for details
- **Observations from months ago** -- file may have changed, consider smart_outline or full read
The timestamp format matches the session start context header (`YYYY-MM-DD time timezone`), so Claude sees consistent temporal markers throughout its session.
---
## Token Economics
A typical source file costs **5,000-50,000 tokens** to read in full. The File Read Gate replaces that with:
| Component | Tokens |
|-----------|--------|
| Timeline header + instructions | ~120 |
| 15 observation entries | ~250 |
| **Total timeline** | **~370** |
If Claude needs more detail, it fetches individual observations at ~300 tokens each. Even fetching 3 observations totals ~1,270 tokens -- still a **75-97% savings** over reading the full file.
### Real-World Example
Without the gate (reading `worker-service.ts`):
```
Read: 18,000 tokens
```
With the gate:
```
Timeline: 370 tokens
+ 2 observations: 600 tokens
Total: 970 tokens (95% savings)
```
---
## Specificity Ranking
Not all observations about a file are equally relevant. The gate scores each observation by how specifically it relates to the target file:
| Signal | Score Bonus |
|--------|------------|
| File was **modified** (not just read) | +2 |
| Observation covers **3 or fewer** total files | +2 |
| Observation covers **4-8** total files | +1 |
| Observation covers **9+** files (survey-like) | +0 |
Higher-scoring observations appear first in the timeline. An observation where the file was the primary modification target ranks above one where the file was incidentally read alongside 20 others.
---
## Configuration
### Small File Bypass
Files smaller than **1,500 bytes** always pass through the gate without interception. At that size, the timeline (~370 tokens) would cost more than reading the file directly. This threshold is hardcoded in `src/cli/handlers/file-context.ts`.
### Project Exclusions
Projects matching patterns in `CLAUDE_MEM_EXCLUDED_PROJECTS` skip the gate entirely. Configure this in `~/.claude-mem/settings.json`:
```json
{
"CLAUDE_MEM_EXCLUDED_PROJECTS": "/tmp/*,/scratch/*"
}
```
### How to Disable the Gate
The File Read Gate is implemented as a PreToolUse hook on the `Read` tool matcher. To disable it, remove the `Read` matcher entry from the hooks configuration:
1. Open your Claude Code settings:
```
~/.claude/settings.json
```
2. Find the claude-mem hooks section under `hooks.PreToolUse` and remove the entry with the `Read` matcher.
Alternatively, if you want to keep the gate installed but bypass it for a specific read, Claude can ask you to allow the read -- the gate's deny decision is presented to the user, who can override it.
<Note>
Disabling the gate means Claude will read full files every time, which increases token usage but ensures it always sees the latest code. This is a reasonable choice for small projects or when observations are sparse.
</Note>
---
## How It Fits Together
The File Read Gate is one piece of claude-mem's layered context strategy:
1. **Session Start**: Inject timeline of recent observations (layer 1 -- metadata)
2. **File Read Gate**: Intercept reads with observation history (layer 1 -- metadata)
3. **get_observations**: Fetch specific observation details on demand (layer 2 -- details)
4. **smart_outline / smart_unfold**: Read current code structure efficiently (layer 3 -- source)
5. **Full file read**: Last resort when everything else is insufficient
Each layer is progressively more expensive. The gate ensures Claude starts at the cheapest layer and escalates only when needed.
+1 -1
View File
@@ -46,7 +46,7 @@ GET /api/context/recent?project=my-project&limit=3
### Environment Variables
```bash
CLAUDE_MEM_MODEL=claude-sonnet-4-5 # Model for observations/summaries
CLAUDE_MEM_MODEL=claude-sonnet-4-6 # Model for observations/summaries
CLAUDE_MEM_CONTEXT_OBSERVATIONS=50 # Observations injected at SessionStart
CLAUDE_MEM_WORKER_PORT=37777 # Worker service port
CLAUDE_MEM_PYTHON_VERSION=3.13 # Python version for chroma-mcp
+58 -9
View File
@@ -80,17 +80,18 @@ setup_tty() {
if [[ -t 0 ]]; then
# stdin IS a terminal — use it directly
TTY_FD=0
elif [[ -e /dev/tty ]]; then
# stdin is piped (curl | bash) but /dev/tty is available
elif [[ "$NON_INTERACTIVE" == "true" ]]; then
# In non-interactive mode, do not require /dev/tty
TTY_FD=0
elif [[ -r /dev/tty ]]; then
# stdin is piped (curl | bash) but /dev/tty is available and readable
exec 3</dev/tty
TTY_FD=3
else
# No terminal available at all
if [[ "$NON_INTERACTIVE" != "true" ]]; then
echo "Error: No terminal available for interactive prompts." >&2
echo "Use --non-interactive or run directly: bash install.sh" >&2
exit 1
fi
echo "Error: No terminal available for interactive prompts." >&2
echo "Use --non-interactive or run directly: bash install.sh" >&2
exit 1
fi
}
@@ -787,11 +788,16 @@ install_plugin() {
const configPath = process.env.INSTALLER_CONFIG_FILE;
const config = JSON.parse(fs.readFileSync(configPath, 'utf8'));
const entry = config?.plugins?.entries?.['claude-mem'];
if (entry || config?.plugins?.slots?.memory === 'claude-mem') {
const allowHasClaudeMem = Array.isArray(config?.plugins?.allow) && config.plugins.allow.includes('claude-mem');
if (entry || config?.plugins?.slots?.memory === 'claude-mem' || allowHasClaudeMem) {
// Save the config block so we can restore it after install
process.stdout.write(JSON.stringify(entry?.config || {}));
// Remove the stale entry so OpenClaw CLI can run
if (entry) delete config.plugins.entries['claude-mem'];
// Also remove stale allowlist reference — this alone can block ALL CLI commands
if (Array.isArray(config?.plugins?.allow)) {
config.plugins.allow = config.plugins.allow.filter((x) => x !== 'claude-mem');
}
// Also remove the slot reference — if the slot points to a plugin
// that isn't in entries, OpenClaw's config validator rejects ALL commands
if (config?.plugins?.slots?.memory === 'claude-mem') {
@@ -818,6 +824,49 @@ install_plugin() {
exit 1
fi
# Ensure claude-mem is present in plugins.allow after successful install+enable.
# Some OpenClaw environments require explicit allowlisting for local plugins.
# This write is guaranteed: if config doesn't exist, configure_memory_slot() will create it.
if [[ -f "$oc_config" ]]; then
if ! INSTALLER_CONFIG_FILE="$oc_config" node -e "
const fs = require('fs');
const configPath = process.env.INSTALLER_CONFIG_FILE;
const config = JSON.parse(fs.readFileSync(configPath, 'utf8'));
if (!config.plugins) config.plugins = {};
if (!Array.isArray(config.plugins.allow)) config.plugins.allow = [];
if (!config.plugins.allow.includes('claude-mem')) {
config.plugins.allow.push('claude-mem');
fs.writeFileSync(configPath, JSON.stringify(config, null, 2));
console.log('Added claude-mem to plugins.allow');
} else {
console.log('claude-mem already in plugins.allow');
}
" 2>&1; then
warn "Failed to write plugins.allow — claude-mem may need manual allowlisting"
fi
else
# Config doesn't exist yet; configure_memory_slot() will create it with plugins.allow
# We'll add claude-mem to the allowlist in a follow-up step after config is materialized
info "OpenClaw config not yet materialized; will ensure allowlist in post-install"
# Force config materialization by running a harmless OpenClaw command
if run_openclaw status --json >/dev/null 2>&1 && [[ -f "$oc_config" ]]; then
if ! INSTALLER_CONFIG_FILE="$oc_config" node -e "
const fs = require('fs');
const configPath = process.env.INSTALLER_CONFIG_FILE;
const config = JSON.parse(fs.readFileSync(configPath, 'utf8'));
if (!config.plugins) config.plugins = {};
if (!Array.isArray(config.plugins.allow)) config.plugins.allow = [];
if (!config.plugins.allow.includes('claude-mem')) {
config.plugins.allow.push('claude-mem');
fs.writeFileSync(configPath, JSON.stringify(config, null, 2));
console.log('Added claude-mem to plugins.allow (post-materialization)');
}
" 2>&1; then
warn "Failed to write plugins.allow after materialization — configure manually"
fi
fi
fi
# Restore saved plugin config (workerPort, syncMemoryFile, observationFeed, etc.)
# from any pre-existing installation that was temporarily removed above.
if [[ -n "$saved_plugin_config" && "$saved_plugin_config" != "{}" ]]; then
@@ -1101,7 +1150,7 @@ write_settings() {
// All defaults from SettingsDefaultsManager.ts
const defaults = {
CLAUDE_MEM_MODEL: 'claude-sonnet-4-5',
CLAUDE_MEM_MODEL: 'claude-sonnet-4-6',
CLAUDE_MEM_CONTEXT_OBSERVATIONS: '50',
CLAUDE_MEM_WORKER_PORT: '37777',
CLAUDE_MEM_WORKER_HOST: '127.0.0.1',
+5
View File
@@ -27,6 +27,11 @@
"default": 37777,
"description": "Port for Claude-Mem worker service"
},
"workerHost": {
"type": "string",
"default": "127.0.0.1",
"description": "Hostname for Claude-Mem worker service. Set to host.docker.internal when the gateway runs in Docker and the worker runs on the host."
},
"project": {
"type": "string",
"default": "openclaw",
+142 -43
View File
@@ -183,6 +183,7 @@ interface ClaudeMemPluginConfig {
syncMemoryFileExclude?: string[];
project?: string;
workerPort?: number;
workerHost?: string;
observationFeed?: {
enabled?: boolean;
channel?: string;
@@ -198,6 +199,7 @@ interface ClaudeMemPluginConfig {
const MAX_SSE_BUFFER_SIZE = 1024 * 1024; // 1MB
const DEFAULT_WORKER_PORT = 37777;
const DEFAULT_WORKER_HOST = "127.0.0.1";
// Emoji pool for deterministic auto-assignment to unknown agents.
// Uses a hash of the agentId to pick a consistent emoji — no persistent state needed.
@@ -256,8 +258,10 @@ function buildGetSourceLabel(
// Worker HTTP Client
// ============================================================================
let _workerHost = DEFAULT_WORKER_HOST;
function workerBaseUrl(port: number): string {
return `http://127.0.0.1:${port}`;
return `http://${_workerHost}:${port}`;
}
async function workerPost(
@@ -533,6 +537,7 @@ async function connectToSSEStream(
export default function claudeMemPlugin(api: OpenClawPluginApi): void {
const userConfig = (api.pluginConfig || {}) as ClaudeMemPluginConfig;
const workerPort = userConfig.workerPort || DEFAULT_WORKER_PORT;
_workerHost = userConfig.workerHost || DEFAULT_WORKER_HOST;
const baseProjectName = userConfig.project || "openclaw";
const getSourceLabel = buildGetSourceLabel(userConfig.observationFeed?.emojis);
@@ -547,6 +552,14 @@ export default function claudeMemPlugin(api: OpenClawPluginApi): void {
// Session tracking for observation I/O
// ------------------------------------------------------------------
const sessionIds = new Map<string, string>();
const canonicalSessionKeys = new Map<string, string>();
const sessionAliasesByCanonicalKey = new Map<string, Set<string>>();
const pendingCompletionTimers = new Map<string, ReturnType<typeof setTimeout>>();
const recentPromptInits = new Map<string, number>();
const completionDelayMs = (() => {
const val = Number((userConfig as Record<string, unknown>).completionDelayMs);
return Number.isFinite(val) ? Math.max(0, val) : 5000;
})();
const syncMemoryFile = userConfig.syncMemoryFile !== false; // default true
const syncMemoryFileExclude = new Set(userConfig.syncMemoryFileExclude || []);
@@ -565,6 +578,83 @@ export default function claudeMemPlugin(api: OpenClawPluginApi): void {
return true;
}
type SessionTrackingContext = {
sessionKey?: string;
workspaceDir?: string;
channelId?: string;
conversationId?: string;
};
function getSessionAliases(ctx: SessionTrackingContext): string[] {
const aliases = new Set<string>();
for (const rawKey of [ctx.sessionKey, ctx.conversationId, ctx.channelId]) {
const key = typeof rawKey === "string" ? rawKey.trim() : "";
if (key) aliases.add(key);
}
if (aliases.size === 0) aliases.add("default");
return Array.from(aliases);
}
function rememberSessionContext(ctx: SessionTrackingContext): { canonicalKey: string; contentSessionId: string } {
const aliases = getSessionAliases(ctx);
let canonicalKey = aliases.find((alias) => canonicalSessionKeys.has(alias));
canonicalKey = canonicalKey ? canonicalSessionKeys.get(canonicalKey)! : aliases[0];
let aliasSet = sessionAliasesByCanonicalKey.get(canonicalKey);
if (!aliasSet) {
aliasSet = new Set([canonicalKey]);
sessionAliasesByCanonicalKey.set(canonicalKey, aliasSet);
}
for (const alias of aliases) {
aliasSet.add(alias);
canonicalSessionKeys.set(alias, canonicalKey);
}
const contentSessionId = getContentSessionId(canonicalKey);
for (const alias of aliasSet) {
sessionIds.set(alias, contentSessionId);
}
return { canonicalKey, contentSessionId };
}
function shouldSkipDuplicatePromptInit(contentSessionId: string, project: string, prompt: string): boolean {
const now = Date.now();
for (const [key, timestamp] of recentPromptInits) {
if (now - timestamp > 2000) recentPromptInits.delete(key);
}
const cacheKey = `${contentSessionId}::${project}::${prompt}`;
const lastSeenAt = recentPromptInits.get(cacheKey);
// Note: cache is set unconditionally before return. If workerPost fails
// after this check, a retry within 2s would be incorrectly skipped.
// Acceptable because before_agent_start is not retried by the runtime.
recentPromptInits.set(cacheKey, now);
return typeof lastSeenAt === "number" && now - lastSeenAt <= 2000;
}
function clearSessionContext(ctx: SessionTrackingContext): void {
const aliases = getSessionAliases(ctx);
const canonicalKey = aliases
.map((alias) => canonicalSessionKeys.get(alias))
.find(Boolean) || aliases[0];
const knownAliases = sessionAliasesByCanonicalKey.get(canonicalKey) || new Set([canonicalKey, ...aliases]);
for (const alias of knownAliases) {
canonicalSessionKeys.delete(alias);
sessionIds.delete(alias);
}
sessionAliasesByCanonicalKey.delete(canonicalKey);
sessionIds.delete(canonicalKey);
}
function scheduleSessionComplete(contentSessionId: string): void {
const existingTimer = pendingCompletionTimers.get(contentSessionId);
if (existingTimer) clearTimeout(existingTimer);
const timer = setTimeout(() => {
pendingCompletionTimers.delete(contentSessionId);
workerPostFireAndForget(workerPort, "/api/sessions/complete", {
contentSessionId,
}, api.logger);
}, completionDelayMs);
pendingCompletionTimers.set(contentSessionId, timer);
}
// TTL cache for context injection to avoid re-fetching on every LLM turn.
// before_prompt_build fires on every turn; caching for 60s keeps the worker
// load manageable while still picking up new observations reasonably quickly.
@@ -600,61 +690,54 @@ export default function claudeMemPlugin(api: OpenClawPluginApi): void {
}
// ------------------------------------------------------------------
// Event: session_start — init claude-mem session (fires on /new, /reset)
// Event: session_start — track session (fires on /new, /reset)
// Init is deferred to before_agent_start to avoid duplicate prompt records.
// ------------------------------------------------------------------
api.on("session_start", async (_event, ctx) => {
const contentSessionId = getContentSessionId(ctx.sessionKey);
await workerPost(workerPort, "/api/sessions/init", {
contentSessionId,
project: getProjectName(ctx),
prompt: "",
}, api.logger);
api.logger.info(`[claude-mem] Session initialized: ${contentSessionId}`);
const { contentSessionId } = rememberSessionContext(ctx);
api.logger.info(`[claude-mem] Session tracking initialized: ${contentSessionId}`);
});
// ------------------------------------------------------------------
// Event: message_received — capture inbound user prompts from channels
// Event: message_received — alias tracking only; init deferred to before_agent_start
// ------------------------------------------------------------------
api.on("message_received", async (event, ctx) => {
const sessionKey = ctx.conversationId || ctx.channelId || "default";
const contentSessionId = getContentSessionId(sessionKey);
await workerPost(workerPort, "/api/sessions/init", {
contentSessionId,
project: baseProjectName,
prompt: event.content || "[media prompt]",
}, api.logger);
const { canonicalKey, contentSessionId } = rememberSessionContext(ctx);
api.logger.info(`[claude-mem] Message received — prompt capture deferred to before_agent_start: session=${canonicalKey} contentSessionId=${contentSessionId} hasContent=${Boolean(event.content)}`);
});
// ------------------------------------------------------------------
// Event: after_compaction — re-init session after context compaction
// Event: after_compaction — preserve session tracking after context compaction.
// Re-init is intentionally NOT called here; the worker retains session state
// independently and re-initializing would create duplicate prompt records.
// ------------------------------------------------------------------
api.on("after_compaction", async (_event, ctx) => {
const contentSessionId = getContentSessionId(ctx.sessionKey);
await workerPost(workerPort, "/api/sessions/init", {
contentSessionId,
project: getProjectName(ctx),
prompt: "",
}, api.logger);
api.logger.info(`[claude-mem] Session re-initialized after compaction: ${contentSessionId}`);
const { contentSessionId } = rememberSessionContext(ctx);
api.logger.info(`[claude-mem] Session preserved after compaction: ${contentSessionId}`);
});
// ------------------------------------------------------------------
// Event: before_agent_start — init session
// Event: before_agent_start — single init point with dedup guard
// ------------------------------------------------------------------
api.on("before_agent_start", async (event, ctx) => {
const { contentSessionId } = rememberSessionContext(ctx);
const projectName = getProjectName(ctx);
const promptText = event.prompt || "agent run";
if (shouldSkipDuplicatePromptInit(contentSessionId, projectName, promptText)) {
api.logger.info(`[claude-mem] Skipping duplicate prompt init: contentSessionId=${contentSessionId} project=${projectName}`);
return;
}
// Initialize session in the worker so observations are not skipped
// (the privacy check requires a stored user prompt to exist)
const contentSessionId = getContentSessionId(ctx.sessionKey);
await workerPost(workerPort, "/api/sessions/init", {
contentSessionId,
project: getProjectName(ctx),
prompt: event.prompt || "agent run",
project: projectName,
prompt: promptText,
}, api.logger);
api.logger.info(`[claude-mem] Session initialized via before_agent_start: contentSessionId=${contentSessionId} project=${projectName}`);
});
// ------------------------------------------------------------------
@@ -686,7 +769,7 @@ export default function claudeMemPlugin(api: OpenClawPluginApi): void {
// Skip memory_ tools to prevent recursive observation loops
if (toolName.startsWith("memory_")) return;
const contentSessionId = getContentSessionId(ctx.sessionKey);
const { canonicalKey, contentSessionId } = rememberSessionContext(ctx);
// Extract result text from all content blocks
let toolResponseText = "";
@@ -704,13 +787,23 @@ export default function claudeMemPlugin(api: OpenClawPluginApi): void {
toolResponseText = toolResponseText.slice(0, MAX_TOOL_RESPONSE_LENGTH);
}
// Resolve workspaceDir with fallback chain.
// Empty cwd causes worker-side observation queueing failures,
// so we drop the observation rather than sending cwd: "".
const workspaceDir = ctx.workspaceDir;
if (!workspaceDir) {
api.logger.warn(`[claude-mem] Skipping observation persist because workspaceDir is unavailable: session=${canonicalKey} tool=${toolName}`);
return;
}
// Fire-and-forget: send observation to worker
workerPostFireAndForget(workerPort, "/api/sessions/observations", {
contentSessionId,
tool_name: toolName,
tool_input: event.params || {},
tool_response: toolResponseText,
cwd: "",
cwd: workspaceDir,
}, api.logger);
});
@@ -718,7 +811,7 @@ export default function claudeMemPlugin(api: OpenClawPluginApi): void {
// Event: agent_end — summarize and complete session
// ------------------------------------------------------------------
api.on("agent_end", async (event, ctx) => {
const contentSessionId = getContentSessionId(ctx.sessionKey);
const { contentSessionId } = rememberSessionContext(ctx);
// Extract last assistant message for summarization
let lastAssistantMessage = "";
@@ -747,17 +840,16 @@ export default function claudeMemPlugin(api: OpenClawPluginApi): void {
last_assistant_message: lastAssistantMessage,
}, api.logger);
workerPostFireAndForget(workerPort, "/api/sessions/complete", {
contentSessionId,
}, api.logger);
api.logger.info(`[claude-mem] Scheduling session complete in ${completionDelayMs}ms: ${contentSessionId}`);
scheduleSessionComplete(contentSessionId);
});
// ------------------------------------------------------------------
// Event: session_end — clean up session tracking to prevent unbounded growth
// ------------------------------------------------------------------
api.on("session_end", async (_event, ctx) => {
const key = ctx.sessionKey || "default";
sessionIds.delete(key);
clearSessionContext(ctx);
api.logger.info(`[claude-mem] Session tracking cleaned up`);
});
// ------------------------------------------------------------------
@@ -766,6 +858,13 @@ export default function claudeMemPlugin(api: OpenClawPluginApi): void {
api.on("gateway_start", async () => {
sessionIds.clear();
contextCache.clear();
recentPromptInits.clear();
canonicalSessionKeys.clear();
sessionAliasesByCanonicalKey.clear();
for (const timer of pendingCompletionTimers.values()) {
clearTimeout(timer);
}
pendingCompletionTimers.clear();
api.logger.info("[claude-mem] Gateway started — session tracking reset");
});
@@ -1047,5 +1146,5 @@ export default function claudeMemPlugin(api: OpenClawPluginApi): void {
},
});
api.logger.info(`[claude-mem] OpenClaw plugin loaded — v1.0.0 (worker: 127.0.0.1:${workerPort})`);
api.logger.info(`[claude-mem] OpenClaw plugin loaded — v1.0.0 (worker: ${_workerHost}:${workerPort})`);
}
+1 -1
View File
@@ -643,7 +643,7 @@ test_write_settings_new_file() {
local model
model="$(node -e "const s = JSON.parse(require('fs').readFileSync('${settings_file}','utf8')); console.log(s.CLAUDE_MEM_MODEL);")"
assert_eq "claude-sonnet-4-5" "$model" "CLAUDE_MEM_MODEL defaults to claude-sonnet-4-5"
assert_eq "claude-sonnet-4-6" "$model" "CLAUDE_MEM_MODEL defaults to claude-sonnet-4-6"
HOME="$ORIGINAL_HOME"
rm -rf "$fake_home"
+16 -1
View File
@@ -60,7 +60,7 @@
},
"scripts": {
"dev": "npm run build-and-sync",
"build": "node scripts/build-hooks.js",
"build": "node scripts/sync-plugin-manifests.js && node scripts/build-hooks.js",
"build-and-sync": "npm run build && npm run sync-marketplace && sleep 1 && cd ~/.claude/plugins/marketplaces/thedotmack && npm run worker:restart",
"sync-marketplace": "node scripts/sync-marketplace.cjs",
"sync-marketplace:force": "node scripts/sync-marketplace.cjs --force",
@@ -124,6 +124,12 @@
"zod-to-json-schema": "^3.24.6"
},
"devDependencies": {
"@derekstride/tree-sitter-sql": "^0.3.11",
"@tree-sitter-grammars/tree-sitter-lua": "^0.4.1",
"@tree-sitter-grammars/tree-sitter-markdown": "^0.3.2",
"@tree-sitter-grammars/tree-sitter-toml": "^0.7.0",
"@tree-sitter-grammars/tree-sitter-yaml": "^0.7.1",
"@tree-sitter-grammars/tree-sitter-zig": "^1.1.2",
"@types/cors": "^2.8.19",
"@types/dompurify": "^3.0.5",
"@types/express": "^4.17.21",
@@ -132,15 +138,24 @@
"@types/react-dom": "^18.3.0",
"esbuild": "^0.27.2",
"np": "^11.0.2",
"tree-sitter-bash": "^0.25.1",
"tree-sitter-c": "^0.24.1",
"tree-sitter-cli": "^0.26.5",
"tree-sitter-cpp": "^0.23.4",
"tree-sitter-css": "^0.25.0",
"tree-sitter-elixir": "^0.3.5",
"tree-sitter-go": "^0.25.0",
"tree-sitter-haskell": "^0.23.1",
"tree-sitter-java": "^0.23.5",
"tree-sitter-javascript": "^0.25.0",
"tree-sitter-kotlin": "^0.3.8",
"tree-sitter-php": "^0.24.2",
"tree-sitter-python": "^0.25.0",
"tree-sitter-ruby": "^0.23.1",
"tree-sitter-rust": "^0.24.0",
"tree-sitter-scala": "^0.24.0",
"tree-sitter-scss": "^1.0.0",
"tree-sitter-swift": "^0.7.1",
"tree-sitter-typescript": "^0.23.2",
"tsx": "^4.20.6",
"typescript": "^5.3.0"
+21 -9
View File
@@ -7,7 +7,7 @@
"hooks": [
{
"type": "command",
"command": "_R=\"${CLAUDE_PLUGIN_ROOT}\"; [ -z \"$_R\" ] && _R=\"$HOME/.claude/plugins/marketplaces/thedotmack/plugin\"; \"$_R/scripts/setup.sh\"",
"command": "_R=\"${CLAUDE_PLUGIN_ROOT}\"; [ -z \"$_R\" ] && _R=$(ls -dt $HOME/.claude/plugins/cache/thedotmack/claude-mem/[0-9]*/ 2>/dev/null | head -1); _R=\"${_R%/}\"; [ -z \"$_R\" ] && _R=\"$HOME/.claude/plugins/marketplaces/thedotmack/plugin\"; \"$_R/scripts/setup.sh\"",
"timeout": 300
}
]
@@ -19,17 +19,17 @@
"hooks": [
{
"type": "command",
"command": "_R=\"${CLAUDE_PLUGIN_ROOT}\"; [ -z \"$_R\" ] && _R=\"$HOME/.claude/plugins/marketplaces/thedotmack/plugin\"; node \"$_R/scripts/smart-install.js\"",
"command": "export PATH=\"$HOME/.nvm/versions/node/v$(ls \\\"$HOME/.nvm/versions/node\\\" 2>/dev/null | sed 's/^v//' | sort -t. -k1,1n -k2,2n -k3,3n | tail -1)/bin:$HOME/.local/bin:/usr/local/bin:/opt/homebrew/bin:$PATH\"; _R=\"${CLAUDE_PLUGIN_ROOT}\"; [ -z \"$_R\" ] && _R=$(ls -dt $HOME/.claude/plugins/cache/thedotmack/claude-mem/[0-9]*/ 2>/dev/null | head -1); _R=\"${_R%/}\"; [ -z \"$_R\" ] && _R=\"$HOME/.claude/plugins/marketplaces/thedotmack/plugin\"; node \"$_R/scripts/smart-install.js\"",
"timeout": 300
},
{
"type": "command",
"command": "_R=\"${CLAUDE_PLUGIN_ROOT}\"; [ -z \"$_R\" ] && _R=\"$HOME/.claude/plugins/marketplaces/thedotmack/plugin\"; node \"$_R/scripts/bun-runner.js\" \"$_R/scripts/worker-service.cjs\" start",
"command": "export PATH=\"$HOME/.nvm/versions/node/v$(ls \\\"$HOME/.nvm/versions/node\\\" 2>/dev/null | sed 's/^v//' | sort -t. -k1,1n -k2,2n -k3,3n | tail -1)/bin:$HOME/.local/bin:/usr/local/bin:/opt/homebrew/bin:$PATH\"; _R=\"${CLAUDE_PLUGIN_ROOT}\"; [ -z \"$_R\" ] && _R=$(ls -dt $HOME/.claude/plugins/cache/thedotmack/claude-mem/[0-9]*/ 2>/dev/null | head -1); _R=\"${_R%/}\"; [ -z \"$_R\" ] && _R=\"$HOME/.claude/plugins/marketplaces/thedotmack/plugin\"; node \"$_R/scripts/bun-runner.js\" \"$_R/scripts/worker-service.cjs\" start; for i in 1 2 3 4 5 6 7 8; do curl -sf http://localhost:37777/health >/dev/null 2>&1 && break; sleep 1; done; curl -sf http://localhost:37777/health >/dev/null 2>&1 || exit 1; echo '{\"continue\":true,\"suppressOutput\":true}'",
"timeout": 60
},
{
"type": "command",
"command": "_R=\"${CLAUDE_PLUGIN_ROOT}\"; [ -z \"$_R\" ] && _R=\"$HOME/.claude/plugins/marketplaces/thedotmack/plugin\"; node \"$_R/scripts/bun-runner.js\" \"$_R/scripts/worker-service.cjs\" hook claude-code context",
"command": "export PATH=\"$HOME/.nvm/versions/node/v$(ls \\\"$HOME/.nvm/versions/node\\\" 2>/dev/null | sed 's/^v//' | sort -t. -k1,1n -k2,2n -k3,3n | tail -1)/bin:$HOME/.local/bin:/usr/local/bin:/opt/homebrew/bin:$PATH\"; _R=\"${CLAUDE_PLUGIN_ROOT}\"; [ -z \"$_R\" ] && _R=$(ls -dt $HOME/.claude/plugins/cache/thedotmack/claude-mem/[0-9]*/ 2>/dev/null | head -1); _R=\"${_R%/}\"; [ -z \"$_R\" ] && _R=\"$HOME/.claude/plugins/marketplaces/thedotmack/plugin\"; for i in 1 2 3 4 5 6 7 8; do curl -sf http://localhost:37777/health >/dev/null 2>&1 && break; sleep 1; done; node \"$_R/scripts/bun-runner.js\" \"$_R/scripts/worker-service.cjs\" hook claude-code context",
"timeout": 60
}
]
@@ -40,7 +40,7 @@
"hooks": [
{
"type": "command",
"command": "_R=\"${CLAUDE_PLUGIN_ROOT}\"; [ -z \"$_R\" ] && _R=\"$HOME/.claude/plugins/marketplaces/thedotmack/plugin\"; node \"$_R/scripts/bun-runner.js\" \"$_R/scripts/worker-service.cjs\" hook claude-code session-init",
"command": "export PATH=\"$HOME/.nvm/versions/node/v$(ls \\\"$HOME/.nvm/versions/node\\\" 2>/dev/null | sed 's/^v//' | sort -t. -k1,1n -k2,2n -k3,3n | tail -1)/bin:$HOME/.local/bin:/usr/local/bin:/opt/homebrew/bin:$PATH\"; _R=\"${CLAUDE_PLUGIN_ROOT}\"; [ -z \"$_R\" ] && _R=$(ls -dt $HOME/.claude/plugins/cache/thedotmack/claude-mem/[0-9]*/ 2>/dev/null | head -1); _R=\"${_R%/}\"; [ -z \"$_R\" ] && _R=\"$HOME/.claude/plugins/marketplaces/thedotmack/plugin\"; node \"$_R/scripts/bun-runner.js\" \"$_R/scripts/worker-service.cjs\" hook claude-code session-init",
"timeout": 60
}
]
@@ -52,18 +52,30 @@
"hooks": [
{
"type": "command",
"command": "_R=\"${CLAUDE_PLUGIN_ROOT}\"; [ -z \"$_R\" ] && _R=\"$HOME/.claude/plugins/marketplaces/thedotmack/plugin\"; node \"$_R/scripts/bun-runner.js\" \"$_R/scripts/worker-service.cjs\" hook claude-code observation",
"command": "export PATH=\"$HOME/.nvm/versions/node/v$(ls \\\"$HOME/.nvm/versions/node\\\" 2>/dev/null | sed 's/^v//' | sort -t. -k1,1n -k2,2n -k3,3n | tail -1)/bin:$HOME/.local/bin:/usr/local/bin:/opt/homebrew/bin:$PATH\"; _R=\"${CLAUDE_PLUGIN_ROOT}\"; [ -z \"$_R\" ] && _R=$(ls -dt $HOME/.claude/plugins/cache/thedotmack/claude-mem/[0-9]*/ 2>/dev/null | head -1); _R=\"${_R%/}\"; [ -z \"$_R\" ] && _R=\"$HOME/.claude/plugins/marketplaces/thedotmack/plugin\"; node \"$_R/scripts/bun-runner.js\" \"$_R/scripts/worker-service.cjs\" hook claude-code observation",
"timeout": 120
}
]
}
],
"PreToolUse": [
{
"matcher": "Read",
"hooks": [
{
"type": "command",
"command": "_R=\"${CLAUDE_PLUGIN_ROOT}\"; [ -z \"$_R\" ] && _R=\"$HOME/.claude/plugins/marketplaces/thedotmack/plugin\"; node \"$_R/scripts/bun-runner.js\" \"$_R/scripts/worker-service.cjs\" hook claude-code file-context",
"timeout": 2000
}
]
}
],
"Stop": [
{
"hooks": [
{
"type": "command",
"command": "_R=\"${CLAUDE_PLUGIN_ROOT}\"; [ -z \"$_R\" ] && _R=\"$HOME/.claude/plugins/marketplaces/thedotmack/plugin\"; node \"$_R/scripts/bun-runner.js\" \"$_R/scripts/worker-service.cjs\" hook claude-code summarize",
"command": "export PATH=\"$HOME/.nvm/versions/node/v$(ls \\\"$HOME/.nvm/versions/node\\\" 2>/dev/null | sed 's/^v//' | sort -t. -k1,1n -k2,2n -k3,3n | tail -1)/bin:$HOME/.local/bin:/usr/local/bin:/opt/homebrew/bin:$PATH\"; _R=\"${CLAUDE_PLUGIN_ROOT}\"; [ -z \"$_R\" ] && _R=$(ls -dt $HOME/.claude/plugins/cache/thedotmack/claude-mem/[0-9]*/ 2>/dev/null | head -1); _R=\"${_R%/}\"; [ -z \"$_R\" ] && _R=\"$HOME/.claude/plugins/marketplaces/thedotmack/plugin\"; node \"$_R/scripts/bun-runner.js\" \"$_R/scripts/worker-service.cjs\" hook claude-code summarize",
"timeout": 120
}
]
@@ -74,8 +86,8 @@
"hooks": [
{
"type": "command",
"command": "node -e \"let d='';process.stdin.on('data',c=>d+=c);process.stdin.on('end',()=>{try{const{sessionId:s}=JSON.parse(d);if(!s){process.exit(0)}const r=require('http').request({hostname:'127.0.0.1',port:37777,path:'/api/sessions/complete',method:'POST',headers:{'Content-Type':'application/json'}});r.on('error',()=>{});r.end(JSON.stringify({contentSessionId:s}));process.exit(0)}catch{process.exit(0)}})\"",
"timeout": 2
"command": "export PATH=\"$HOME/.nvm/versions/node/v$(ls \\\"$HOME/.nvm/versions/node\\\" 2>/dev/null | sed 's/^v//' | sort -t. -k1,1n -k2,2n -k3,3n | tail -1)/bin:$HOME/.local/bin:/usr/local/bin:/opt/homebrew/bin:$PATH\"; _R=\"${CLAUDE_PLUGIN_ROOT}\"; [ -z \"$_R\" ] && _R=$(ls -dt $HOME/.claude/plugins/cache/thedotmack/claude-mem/[0-9]*/ 2>/dev/null | head -1); _R=\"${_R%/}\"; [ -z \"$_R\" ] && _R=\"$HOME/.claude/plugins/marketplaces/thedotmack/plugin\"; node \"$_R/scripts/bun-runner.js\" \"$_R/scripts/worker-service.cjs\" hook claude-code session-complete",
"timeout": 30
}
]
}
+16 -1
View File
@@ -14,7 +14,22 @@
"tree-sitter-python": "^0.25.0",
"tree-sitter-ruby": "^0.23.1",
"tree-sitter-rust": "^0.24.0",
"tree-sitter-typescript": "^0.23.2"
"tree-sitter-typescript": "^0.23.2",
"tree-sitter-kotlin": "^0.3.8",
"tree-sitter-swift": "^0.7.1",
"tree-sitter-php": "^0.24.2",
"tree-sitter-elixir": "^0.3.5",
"@tree-sitter-grammars/tree-sitter-lua": "^0.4.1",
"tree-sitter-scala": "^0.24.0",
"tree-sitter-bash": "^0.25.1",
"tree-sitter-haskell": "^0.23.1",
"@tree-sitter-grammars/tree-sitter-zig": "^1.1.2",
"tree-sitter-css": "^0.25.0",
"tree-sitter-scss": "^1.0.0",
"@tree-sitter-grammars/tree-sitter-toml": "^0.7.0",
"@tree-sitter-grammars/tree-sitter-yaml": "^0.7.1",
"@derekstride/tree-sitter-sql": "^0.3.11",
"@tree-sitter-grammars/tree-sitter-markdown": "^0.3.2"
},
"engines": {
"node": ">=18.0.0",
+35 -8
View File
@@ -55,6 +55,13 @@ function findBun() {
});
if (pathCheck.status === 0 && pathCheck.stdout.trim()) {
// On Windows, prefer bun.cmd over bun (bun is a shell script, bun.cmd is the Windows batch file)
if (IS_WINDOWS) {
const bunCmdPath = pathCheck.stdout.split('\n').find(line => line.trim().endsWith('bun.cmd'));
if (bunCmdPath) {
return bunCmdPath.trim();
}
}
return 'bun'; // Found in PATH
}
@@ -152,17 +159,31 @@ const stdinData = await collectStdin();
// Spawn Bun with the provided script and args
// Use spawn (not spawnSync) to properly handle stdio
// Note: Don't use shell mode on Windows - it breaks paths with spaces in usernames
// On Windows, use cmd.exe to execute bun.cmd since npm-installed bun is a batch file
// Use windowsHide to prevent a visible console window from spawning on Windows
const child = spawn(bunPath, args, {
stdio: [stdinData ? 'pipe' : 'ignore', 'inherit', 'inherit'],
const spawnOptions = {
stdio: ['pipe', 'inherit', 'inherit'],
windowsHide: true,
env: process.env
});
};
// Write buffered stdin to child's pipe, then close it so the child sees EOF
if (stdinData && child.stdin) {
child.stdin.write(stdinData);
let spawnCmd = bunPath;
let spawnArgs = args;
if (IS_WINDOWS) {
// On Windows, bun.cmd must be executed via cmd /c
spawnCmd = 'cmd';
spawnArgs = ['/c', bunPath, ...args];
}
const child = spawn(spawnCmd, spawnArgs, spawnOptions);
// Write buffered stdin to child's pipe, then close it so the child sees EOF.
// Fall back to '{}' when no stdin data is available so worker-service.cjs
// always receives valid JSON input even when Claude Code doesn't pipe stdin
// (e.g. during SessionStart on some platforms). Fixes #1560.
if (child.stdin) {
child.stdin.write(stdinData || '{}');
child.stdin.end();
}
@@ -171,6 +192,12 @@ child.on('error', (err) => {
process.exit(1);
});
child.on('close', (code) => {
child.on('close', (code, signal) => {
// Fix #1505: When the "start" subcommand forks a daemon, the parent bun
// process may be killed by signal (e.g. SIGKILL, exit code 137). The daemon
// is running fine — treat signal-based exits for "start" as success.
if ((signal || code > 128) && args.includes('start')) {
process.exit(0);
}
process.exit(code || 0);
});
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
+2 -2
View File
@@ -449,7 +449,7 @@ function installDeps() {
console.error('⚠️ Bun install failed, falling back to npm...');
console.error(' (This can happen with npm alias packages like *-cjs)');
try {
execSync('npm install', { cwd: ROOT, stdio: installStdio, shell: IS_WINDOWS });
execSync('npm install --legacy-peer-deps', { cwd: ROOT, stdio: installStdio, shell: IS_WINDOWS });
} catch (npmError) {
throw new Error('Both bun and npm install failed: ' + npmError.message);
}
@@ -546,7 +546,7 @@ try {
if (!verifyCriticalModules()) {
console.error('⚠️ Retrying install with npm...');
try {
execSync('npm install --production', { cwd: ROOT, stdio: ['pipe', 'pipe', 'inherit'], shell: IS_WINDOWS });
execSync('npm install --production --legacy-peer-deps', { cwd: ROOT, stdio: ['pipe', 'pipe', 'inherit'], shell: IS_WINDOWS });
} catch {
// npm also failed
}
File diff suppressed because one or more lines are too long
+48
View File
@@ -125,3 +125,51 @@ get_observations(ids=[11131, 10942, 10855], orderBy="date_desc")
- **Full observation:** ~500-1000 tokens each
- **Batch fetch:** 1 HTTP request vs N individual requests
- **10x token savings** by filtering before fetching
## Smart-Explore Language Support
Smart-explore tools (`smart_search`, `smart_outline`, `smart_unfold`) use tree-sitter AST parsing. The following languages are supported out of the box.
### 24 Bundled Languages
JS, TS, Python, Go, Rust, Ruby, Java, C, C++, Kotlin, Swift, PHP, Elixir, Lua, Scala, Bash, Haskell, Zig, CSS, SCSS, TOML, YAML, SQL, Markdown
### Markdown Special Support
Markdown files get structure-aware parsing beyond generic tree-sitter:
- **Heading hierarchy** -- `#`/`##`/`###` headings are extracted as nested symbols (sections contain subsections)
- **Code block detection** -- fenced code blocks are surfaced as `code` symbols with language annotation
- **Section-aware unfold** -- `smart_unfold` on a heading returns the full section content (heading through all subsections until the next heading of equal or higher level)
### User-Installable Grammars via `.claude-mem.json`
Add custom tree-sitter grammars for languages not in the bundled set. Place `.claude-mem.json` in the project root:
```json
{
"grammars": {
"gleam": {
"package": "tree-sitter-gleam",
"extensions": [".gleam"]
},
"protobuf": {
"package": "tree-sitter-proto",
"extensions": [".proto"],
"query": ".claude-mem/queries/proto.scm"
}
}
}
```
**Fields:**
- `package` (string, required) -- npm package name for the tree-sitter grammar
- `extensions` (array of strings, required) -- file extensions to associate with this language
- `query` (string, optional) -- path to a custom `.scm` query file for symbol extraction. If omitted, a generic query is used.
**Rules:**
- User grammars do NOT override bundled languages. If a language is already bundled, the entry is ignored.
- The npm package must be installed in the project (`npm install tree-sitter-gleam`).
- Config is cached per project root. Changes to `.claude-mem.json` take effect on next worker restart.
File diff suppressed because one or more lines are too long
+124 -1
View File
@@ -355,6 +355,14 @@
box-shadow: 0 1px 3px rgba(0, 0, 0, 0.03);
}
.header-main {
display: flex;
align-items: center;
gap: 18px;
min-width: 0;
flex-wrap: wrap;
}
.sidebar-header {
padding: 14px 18px;
border-bottom: 1px solid var(--color-border-primary);
@@ -549,6 +557,42 @@
font-size: 13px;
}
.source-tabs {
display: inline-flex;
align-items: center;
gap: 6px;
flex-wrap: wrap;
}
.source-tab {
background: transparent;
border: 1px solid var(--color-border-primary);
color: var(--color-text-secondary);
border-radius: 999px;
padding: 6px 12px;
font-size: 12px;
line-height: 1;
font-weight: 600;
letter-spacing: 0.01em;
cursor: pointer;
transition: all 0.2s cubic-bezier(0.4, 0, 0.2, 1);
white-space: nowrap;
}
.source-tab:hover {
background: var(--color-bg-card-hover);
border-color: var(--color-border-focus);
color: var(--color-text-primary);
transform: translateY(-1px);
}
.source-tab.active {
background: linear-gradient(135deg, var(--color-bg-button) 0%, var(--color-accent-primary) 100%);
border-color: var(--color-bg-button);
color: var(--color-text-button);
box-shadow: 0 2px 8px rgba(9, 105, 218, 0.18);
}
.settings-btn,
.theme-toggle-btn {
background: var(--color-bg-card);
@@ -887,6 +931,49 @@
letter-spacing: 0.5px;
}
.card-source {
padding: 2px 8px;
border-radius: 999px;
font-weight: 600;
font-size: 10px;
letter-spacing: 0.04em;
text-transform: uppercase;
border: 1px solid transparent;
}
.source-claude {
background: rgba(255, 138, 61, 0.12);
color: #c25a00;
border-color: rgba(255, 138, 61, 0.22);
}
.source-codex {
background: rgba(33, 150, 243, 0.12);
color: #0f5ba7;
border-color: rgba(33, 150, 243, 0.24);
}
.source-cursor {
background: rgba(124, 58, 237, 0.12);
color: #6d28d9;
border-color: rgba(124, 58, 237, 0.24);
}
[data-theme="dark"] .source-claude {
color: #ffb067;
border-color: rgba(255, 176, 103, 0.2);
}
[data-theme="dark"] .source-codex {
color: #8fc7ff;
border-color: rgba(143, 199, 255, 0.2);
}
[data-theme="dark"] .source-cursor {
color: #c4b5fd;
border-color: rgba(196, 181, 253, 0.2);
}
.card-title {
font-size: 17px;
margin-bottom: 14px;
@@ -1483,6 +1570,10 @@
padding: 14px 20px;
}
.header-main {
gap: 12px;
}
.status {
gap: 6px;
}
@@ -1491,6 +1582,11 @@
max-width: 160px;
}
.source-tab {
padding: 6px 10px;
font-size: 11px;
}
/* Hide icon links (docs, github, twitter) on tablet */
.icon-link {
display: none;
@@ -1544,6 +1640,28 @@
gap: 8px;
}
.header-main {
gap: 10px;
}
.source-tabs {
width: 100%;
flex-wrap: nowrap;
overflow-x: auto;
padding-bottom: 2px;
scrollbar-width: none;
}
.source-tabs::-webkit-scrollbar {
display: none;
}
.source-tab {
flex-shrink: 0;
padding: 5px 10px;
font-size: 11px;
}
.logomark {
height: 28px;
}
@@ -1732,6 +1850,11 @@
white-space: nowrap;
}
.preview-selector select:disabled {
opacity: 0.6;
cursor: not-allowed;
}
.preview-selector select {
background: var(--color-bg-card);
border: 1px solid var(--color-border-primary);
@@ -2873,4 +2996,4 @@
<script src="viewer-bundle.js"></script>
</body>
</html>
</html>
+81
View File
@@ -27,6 +27,48 @@ const CONTEXT_GENERATOR = {
source: 'src/services/context-generator.ts'
};
/**
* Strip hardcoded __dirname/__filename from bundled CJS output.
*
* When esbuild converts ESM TypeScript source to CJS format, it inlines
* __dirname and __filename as static strings based on the SOURCE file paths
* at build time. These `var __dirname = "/build/machine/path/..."` declarations
* shadow the runtime's native __dirname (provided by Bun/Node's CJS module
* wrapper), causing path resolution to fail on end-user machines.
*
* This post-build step removes those hardcoded assignments so the runtime
* globals are used instead.
*
* See: https://github.com/thedotmack/claude-mem/issues/1410
*/
function stripHardcodedDirname(filePath) {
let content = fs.readFileSync(filePath, 'utf-8');
const before = content.length;
// Match both double-quoted and single-quoted string literals.
// esbuild currently emits double quotes, but single quotes are handled
// defensively in case future versions change quoting style.
const str = `(?:"[^"]*"|'[^']*')`;
for (const id of ['__dirname', '__filename']) {
// Remove `var <id> = "...", rest` → `var rest`
content = content.replace(new RegExp(`\\bvar ${id}\\s*=\\s*${str},\\s*`, 'g'), 'var ');
// Remove standalone `var <id> = "...";`
content = content.replace(new RegExp(`\\bvar ${id}\\s*=\\s*${str};\\s*`, 'g'), '');
// Remove `, <id> = "..."` from mid/end of var declarations
content = content.replace(new RegExp(`,\\s*${id}\\s*=\\s*${str}`, 'g'), '');
}
// Clean up dangling `var ;` left when __dirname was the sole declarator
content = content.replace(/\bvar\s*;/g, '');
const removed = before - content.length;
if (removed > 0) {
fs.writeFileSync(filePath, content);
console.log(` ✓ Stripped hardcoded __dirname/__filename paths (${removed} bytes)`);
}
}
async function buildHooks() {
console.log('🔨 Building claude-mem hooks and worker service...\n');
@@ -69,6 +111,21 @@ async function buildHooks() {
'tree-sitter-ruby': '^0.23.1',
'tree-sitter-rust': '^0.24.0',
'tree-sitter-typescript': '^0.23.2',
'tree-sitter-kotlin': '^0.3.8',
'tree-sitter-swift': '^0.7.1',
'tree-sitter-php': '^0.24.2',
'tree-sitter-elixir': '^0.3.5',
'@tree-sitter-grammars/tree-sitter-lua': '^0.4.1',
'tree-sitter-scala': '^0.24.0',
'tree-sitter-bash': '^0.25.1',
'tree-sitter-haskell': '^0.23.1',
'@tree-sitter-grammars/tree-sitter-zig': '^1.1.2',
'tree-sitter-css': '^0.25.0',
'tree-sitter-scss': '^1.0.0',
'@tree-sitter-grammars/tree-sitter-toml': '^0.7.0',
'@tree-sitter-grammars/tree-sitter-yaml': '^0.7.1',
'@derekstride/tree-sitter-sql': '^0.3.11',
'@tree-sitter-grammars/tree-sitter-markdown': '^0.3.2',
},
engines: {
node: '>=18.0.0',
@@ -124,6 +181,9 @@ async function buildHooks() {
}
});
// Fix hardcoded __dirname/__filename in bundled output (#1410)
stripHardcodedDirname(`${hooksDir}/${WORKER_SERVICE.name}.cjs`);
// Make worker service executable
fs.chmodSync(`${hooksDir}/${WORKER_SERVICE.name}.cjs`, 0o755);
const workerStats = fs.statSync(`${hooksDir}/${WORKER_SERVICE.name}.cjs`);
@@ -152,6 +212,21 @@ async function buildHooks() {
'tree-sitter-java',
'tree-sitter-c',
'tree-sitter-cpp',
'tree-sitter-kotlin',
'tree-sitter-swift',
'tree-sitter-php',
'tree-sitter-elixir',
'@tree-sitter-grammars/tree-sitter-lua',
'tree-sitter-scala',
'tree-sitter-bash',
'tree-sitter-haskell',
'@tree-sitter-grammars/tree-sitter-zig',
'tree-sitter-css',
'tree-sitter-scss',
'@tree-sitter-grammars/tree-sitter-toml',
'@tree-sitter-grammars/tree-sitter-yaml',
'@derekstride/tree-sitter-sql',
'@tree-sitter-grammars/tree-sitter-markdown',
],
define: {
'__DEFAULT_PACKAGE_VERSION__': `"${version}"`
@@ -161,6 +236,9 @@ async function buildHooks() {
}
});
// Fix hardcoded __dirname/__filename in bundled output (#1410)
stripHardcodedDirname(`${hooksDir}/${MCP_SERVER.name}.cjs`);
// Make MCP server executable
fs.chmodSync(`${hooksDir}/${MCP_SERVER.name}.cjs`, 0o755);
const mcpServerStats = fs.statSync(`${hooksDir}/${MCP_SERVER.name}.cjs`);
@@ -184,6 +262,9 @@ async function buildHooks() {
// No banner needed: CJS files under Node.js have __dirname/__filename natively
});
// Fix hardcoded __dirname/__filename in bundled output (#1410)
stripHardcodedDirname(`${hooksDir}/${CONTEXT_GENERATOR.name}.cjs`);
const contextGenStats = fs.statSync(`${hooksDir}/${CONTEXT_GENERATOR.name}.cjs`);
console.log(`✓ context-generator built (${(contextGenStats.size / 1024).toFixed(2)} KB)`);
+35 -14
View File
@@ -94,9 +94,12 @@ function getTrackedFolders(workingDir: string): Set<string> {
const absPath = path.join(workingDir, file);
let dir = path.dirname(absPath);
// Add all parent directories up to (but not including) the working dir
while (dir.length > workingDir.length && dir.startsWith(workingDir)) {
// Add all parent directories up to and including the working dir itself.
// The working dir is included so that root-level files (stored in the DB
// as bare filenames with no directory component) can be matched. Fixes #1514.
while (dir.length >= workingDir.length && dir.startsWith(workingDir)) {
folders.add(dir);
if (dir === workingDir) break;
dir = path.dirname(dir);
}
}
@@ -164,19 +167,37 @@ function findObservationsByFolder(db: Database, relativeFolderPath: string, proj
// Query more results than needed since we'll filter some out
const queryLimit = limit * 3;
const sql = `
SELECT o.*, o.discovery_tokens
FROM observations o
WHERE o.project = ?
AND (o.files_modified LIKE ? OR o.files_read LIKE ?)
ORDER BY o.created_at_epoch DESC
LIMIT ?
`;
// For the root folder (empty relativeFolderPath), observations may have bare
// filenames stored without any directory component (e.g. ["dashboard.html"]).
// In that case the LIKE pattern below would never match, so we fetch all
// observations for the project and let isDirectChild filter to root-level files.
// Fixes #1514.
let allMatches: ObservationRow[];
// Files in DB are stored as relative paths like "src/services/foo.ts"
// Match any file that starts with this folder path (we'll filter to direct children below)
const likePattern = `%"${relativeFolderPath}/%`;
const allMatches = db.prepare(sql).all(project, likePattern, likePattern, queryLimit) as ObservationRow[];
if (relativeFolderPath === '' || relativeFolderPath === '.') {
const sql = `
SELECT o.*, o.discovery_tokens
FROM observations o
WHERE o.project = ?
AND (o.files_modified IS NOT NULL OR o.files_read IS NOT NULL)
ORDER BY o.created_at_epoch DESC
LIMIT ?
`;
allMatches = db.prepare(sql).all(project, queryLimit) as ObservationRow[];
} else {
const sql = `
SELECT o.*, o.discovery_tokens
FROM observations o
WHERE o.project = ?
AND (o.files_modified LIKE ? OR o.files_read LIKE ?)
ORDER BY o.created_at_epoch DESC
LIMIT ?
`;
// Files in DB are stored as relative paths like "src/services/foo.ts"
// Match any file that starts with this folder path (we'll filter to direct children below)
const likePattern = `%"${relativeFolderPath}/%`;
allMatches = db.prepare(sql).all(project, likePattern, likePattern, queryLimit) as ObservationRow[];
}
// Filter to only observations with direct child files (not in subfolders)
return allMatches.filter(obs => hasDirectChildFile(obs, relativeFolderPath)).slice(0, limit);
+95
View File
@@ -0,0 +1,95 @@
#!/usr/bin/env node
import fs from 'fs';
import path from 'path';
import { fileURLToPath } from 'url';
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const rootDir = path.resolve(__dirname, '..');
const packageJsonPath = path.join(rootDir, 'package.json');
const codexPluginPath = path.join(rootDir, '.codex-plugin', 'plugin.json');
const claudePluginPath = path.join(rootDir, '.claude-plugin', 'plugin.json');
function readJson(filePath) {
return JSON.parse(fs.readFileSync(filePath, 'utf8'));
}
function writeJson(filePath, value) {
fs.writeFileSync(filePath, JSON.stringify(value, null, 2) + '\n');
}
function syncCodexPlugin(plugin, pkg) {
const author =
typeof plugin.author === 'object' && plugin.author ? plugin.author : {};
return {
...plugin,
name: pkg.name,
version: pkg.version,
description: pkg.description,
homepage: pkg.homepage,
repository: normalizeRepositoryUrl(pkg.repository),
license: pkg.license,
keywords: pkg.keywords,
author: {
...author,
name: normalizeAuthorName(pkg.author),
},
interface: {
...plugin.interface,
developerName: normalizeAuthorName(pkg.author),
websiteURL: normalizeRepositoryUrl(pkg.repository),
},
};
}
function syncClaudePlugin(plugin, pkg) {
return {
...plugin,
name: pkg.name,
version: pkg.version,
description: pkg.description,
homepage: pkg.homepage,
repository: normalizeRepositoryUrl(pkg.repository),
license: pkg.license,
keywords: pkg.keywords,
author: {
...(typeof plugin.author === 'object' && plugin.author ? plugin.author : {}),
name: normalizeAuthorName(pkg.author),
},
};
}
function normalizeAuthorName(author) {
if (typeof author === 'string') return author;
if (author && typeof author === 'object' && typeof author.name === 'string') return author.name;
return '';
}
function normalizeRepositoryUrl(repository) {
if (typeof repository === 'string') return repository.replace(/\.git$/, '');
if (repository && typeof repository === 'object' && typeof repository.url === 'string')
return repository.url.replace(/\.git$/, '');
return '';
}
function main() {
for (const filePath of [packageJsonPath, codexPluginPath, claudePluginPath]) {
if (!fs.existsSync(filePath)) {
console.error(`Missing required file: ${filePath}`);
process.exit(1);
}
}
const pkg = readJson(packageJsonPath);
const codexPlugin = readJson(codexPluginPath);
const claudePlugin = readJson(claudePluginPath);
writeJson(codexPluginPath, syncCodexPlugin(codexPlugin, pkg));
writeJson(claudePluginPath, syncClaudePlugin(claudePlugin, pkg));
console.log('✓ Synced plugin manifests from package.json');
}
main();
+3 -1
View File
@@ -12,6 +12,7 @@ import { HOOK_EXIT_CODES } from '../../shared/hook-constants.js';
import { logger } from '../../utils/logger.js';
import { SettingsDefaultsManager } from '../../shared/SettingsDefaultsManager.js';
import { USER_SETTINGS_PATH } from '../../shared/paths.js';
import { normalizePlatformSource } from '../../shared/platform-source.js';
export const contextHandler: EventHandler = {
async execute(input: NormalizedHookInput): Promise<HookResult> {
@@ -31,6 +32,7 @@ export const contextHandler: EventHandler = {
const cwd = input.cwd ?? process.cwd();
const context = getProjectContext(cwd);
const port = getWorkerPort();
const platformSource = normalizePlatformSource(input.platform);
// Check if terminal output should be shown (load settings early)
const settings = SettingsDefaultsManager.loadFromFile(USER_SETTINGS_PATH);
@@ -38,7 +40,7 @@ export const contextHandler: EventHandler = {
// Pass all projects (parent + worktree if applicable) for unified timeline
const projectsParam = context.allProjects.join(',');
const apiPath = `/api/context/inject?projects=${encodeURIComponent(projectsParam)}`;
const apiPath = `/api/context/inject?projects=${encodeURIComponent(projectsParam)}&platformSource=${encodeURIComponent(platformSource)}`;
const colorApiPath = input.platform === 'claude-code' ? `${apiPath}&colors=true` : apiPath;
// Note: Removed AbortSignal.timeout due to Windows Bun cleanup issue (libuv assertion)
+258
View File
@@ -0,0 +1,258 @@
/**
* File Context Handler - PreToolUse
*
* Injects relevant observation history when Claude reads/edits a file,
* so it can avoid duplicating past work.
*/
import type { EventHandler, NormalizedHookInput, HookResult } from '../types.js';
import { ensureWorkerRunning, workerHttpRequest } from '../../shared/worker-utils.js';
import { logger } from '../../utils/logger.js';
import { parseJsonArray } from '../../shared/timeline-formatting.js';
import { statSync } from 'fs';
import path from 'path';
import { isProjectExcluded } from '../../utils/project-filter.js';
import { SettingsDefaultsManager } from '../../shared/SettingsDefaultsManager.js';
import { USER_SETTINGS_PATH } from '../../shared/paths.js';
import { getProjectContext } from '../../utils/project-name.js';
/** Skip the gate for files smaller than this — timeline overhead exceeds file read cost. */
const FILE_READ_GATE_MIN_BYTES = 1_500;
/** Fetch more candidates than the display limit so dedup still fills 15 slots. */
const FETCH_LOOKAHEAD_LIMIT = 40;
/** Maximum observations to show in the timeline. */
const DISPLAY_LIMIT = 15;
const TYPE_ICONS: Record<string, string> = {
decision: '\u2696\uFE0F',
bugfix: '\uD83D\uDD34',
feature: '\uD83D\uDFE3',
refactor: '\uD83D\uDD04',
discovery: '\uD83D\uDD35',
change: '\u2705',
};
function compactTime(timeStr: string): string {
return timeStr.toLowerCase().replace(' am', 'a').replace(' pm', 'p');
}
function formatTime(epoch: number): string {
const date = new Date(epoch);
return date.toLocaleString('en-US', { hour: 'numeric', minute: '2-digit', hour12: true });
}
function formatDate(epoch: number): string {
const date = new Date(epoch);
return date.toLocaleString('en-US', { month: 'short', day: 'numeric', year: 'numeric' });
}
interface ObservationRow {
id: number;
memory_session_id: string;
title: string | null;
type: string;
created_at_epoch: number;
files_read: string | null;
files_modified: string | null;
}
/**
* Deduplicate and rank observations for the timeline display.
*
* 1. Same-session dedup: keep only the most recent observation per session
* (input is already sorted newest-first by SQL).
* 2. Specificity scoring: rank by how specifically the observation is about
* the target file (modified > read-only, fewer total files > many).
* 3. Truncate to displayLimit.
*/
function deduplicateObservations(
observations: ObservationRow[],
targetPath: string,
displayLimit: number
): ObservationRow[] {
// Phase 1: Keep only the most recent observation per session
const seenSessions = new Set<string>();
const dedupedBySession: ObservationRow[] = [];
for (const obs of observations) {
const sessionKey = obs.memory_session_id ?? `no-session-${obs.id}`;
if (!seenSessions.has(sessionKey)) {
seenSessions.add(sessionKey);
dedupedBySession.push(obs);
}
}
// Phase 2: Score by specificity to the target file
const scored = dedupedBySession.map(obs => {
const filesRead = parseJsonArray(obs.files_read);
const filesModified = parseJsonArray(obs.files_modified);
const totalFiles = filesRead.length + filesModified.length;
const normalizedTarget = targetPath.replace(/\\/g, '/');
const inModified = filesModified.some(f => f.replace(/\\/g, '/') === normalizedTarget);
let specificityScore = 0;
if (inModified) specificityScore += 2;
if (totalFiles <= 3) specificityScore += 2;
else if (totalFiles <= 8) specificityScore += 1;
// totalFiles > 8: no bonus (survey-like observation)
return { obs, specificityScore };
});
// Stable sort: higher specificity first, preserve chronological order within same score
scored.sort((a, b) => b.specificityScore - a.specificityScore);
return scored.slice(0, displayLimit).map(s => s.obs);
}
function formatFileTimeline(observations: ObservationRow[], filePath: string): string {
// Escape filePath for safe interpolation into recovery hints (quotes, backslashes, newlines)
const safePath = filePath.replace(/\\/g, '\\\\').replace(/"/g, '\\"').replace(/\n/g, '\\n');
// Group observations by day
const byDay = new Map<string, ObservationRow[]>();
for (const obs of observations) {
const day = formatDate(obs.created_at_epoch);
if (!byDay.has(day)) {
byDay.set(day, []);
}
byDay.get(day)!.push(obs);
}
// Sort days chronologically (use earliest observation in each group, not first — which is specificity-sorted)
const sortedDays = Array.from(byDay.entries()).sort((a, b) => {
const aEpoch = Math.min(...a[1].map(o => o.created_at_epoch));
const bEpoch = Math.min(...b[1].map(o => o.created_at_epoch));
return aEpoch - bEpoch;
});
// Include current date/time so the model can judge recency of observations
const now = new Date();
const currentDate = now.toLocaleDateString('en-CA'); // YYYY-MM-DD
const currentTime = now.toLocaleTimeString('en-US', {
hour: 'numeric',
minute: '2-digit',
hour12: true
}).toLowerCase().replace(' ', '');
const currentTimezone = now.toLocaleTimeString('en-US', { timeZoneName: 'short' }).split(' ').pop();
const lines: string[] = [
`Current: ${currentDate} ${currentTime} ${currentTimezone}`,
`This file has prior observations. Only line 1 was read to save tokens.`,
`- **Already know enough?** The timeline below may be all you need (semantic priming).`,
`- **Need details?** get_observations([IDs]) — ~300 tokens each.`,
`- **Need full file?** Read again with offset/limit for the section you need.`,
`- **Need to edit?** Edit works — the file is registered as read. Use smart_outline("${safePath}") for line numbers.`,
];
for (const [day, dayObservations] of sortedDays) {
// Sort within each day chronologically (deduplicateObservations reorders by specificity)
const chronological = [...dayObservations].sort((a, b) => a.created_at_epoch - b.created_at_epoch);
lines.push(`### ${day}`);
for (const obs of chronological) {
const title = (obs.title || 'Untitled').replace(/[\r\n\t]+/g, ' ').replace(/\s+/g, ' ').trim().slice(0, 160);
const icon = TYPE_ICONS[obs.type] || '\u2753';
const time = compactTime(formatTime(obs.created_at_epoch));
lines.push(`${obs.id} ${time} ${icon} ${title}`);
}
}
return lines.join('\n');
}
export const fileContextHandler: EventHandler = {
async execute(input: NormalizedHookInput): Promise<HookResult> {
// Extract file_path from toolInput
const toolInput = input.toolInput as Record<string, unknown> | undefined;
const filePath = toolInput?.file_path as string | undefined;
if (!filePath) {
return { continue: true, suppressOutput: true };
}
// Skip gate for files below the token-economics threshold — timeline (~370 tokens)
// costs more than reading small files directly.
try {
const statPath = path.isAbsolute(filePath)
? filePath
: path.resolve(input.cwd || process.cwd(), filePath);
const stat = statSync(statPath);
if (stat.size < FILE_READ_GATE_MIN_BYTES) {
return { continue: true, suppressOutput: true };
}
} catch (err: any) {
if (err.code === 'ENOENT') return { continue: true, suppressOutput: true };
// Other errors (symlink, permission denied) — fall through and let gate proceed
}
// Check if project is excluded from tracking
const settings = SettingsDefaultsManager.loadFromFile(USER_SETTINGS_PATH);
if (input.cwd && isProjectExcluded(input.cwd, settings.CLAUDE_MEM_EXCLUDED_PROJECTS)) {
logger.debug('HOOK', 'Project excluded from tracking, skipping file context', { cwd: input.cwd });
return { continue: true, suppressOutput: true };
}
// Ensure worker is running
const workerReady = await ensureWorkerRunning();
if (!workerReady) {
return { continue: true, suppressOutput: true };
}
// Query worker for observations related to this file
try {
const context = getProjectContext(input.cwd);
// Observations store relative paths — convert absolute to relative using cwd
const cwd = input.cwd || process.cwd();
const absolutePath = path.isAbsolute(filePath) ? filePath : path.resolve(cwd, filePath);
const relativePath = path.relative(cwd, absolutePath).split(path.sep).join("/");
const queryParams = new URLSearchParams({ path: relativePath });
// Pass all project names (parent + worktree) for unified lookup
if (context.allProjects.length > 0) {
queryParams.set('projects', context.allProjects.join(','));
}
queryParams.set('limit', String(FETCH_LOOKAHEAD_LIMIT));
const response = await workerHttpRequest(`/api/observations/by-file?${queryParams.toString()}`, {
method: 'GET',
});
if (!response.ok) {
logger.warn('HOOK', 'File context query failed, skipping', { status: response.status, filePath });
return { continue: true, suppressOutput: true };
}
const data = await response.json() as { observations: ObservationRow[]; count: number };
if (!data.observations || data.observations.length === 0) {
return { continue: true, suppressOutput: true };
}
// Deduplicate: one per session, ranked by specificity to this file
const dedupedObservations = deduplicateObservations(data.observations, relativePath, DISPLAY_LIMIT);
if (dedupedObservations.length === 0) {
return { continue: true, suppressOutput: true };
}
// Allow the read with limit: 1 line — just enough for Edit's "file must be read"
// check to pass, while keeping token cost near zero. The observation timeline
// gives Claude full context about prior work on this file.
const timeline = formatFileTimeline(dedupedObservations, filePath);
return {
hookSpecificOutput: {
hookEventName: 'PreToolUse',
additionalContext: timeline,
permissionDecision: 'allow',
updatedInput: {
file_path: filePath,
limit: 1,
},
},
};
} catch (error) {
logger.warn('HOOK', 'File context fetch error, skipping', {
error: error instanceof Error ? error.message : String(error),
});
return { continue: true, suppressOutput: true };
}
},
};
+3
View File
@@ -9,6 +9,7 @@ import type { EventHandler, NormalizedHookInput, HookResult } from '../types.js'
import { ensureWorkerRunning, workerHttpRequest } from '../../shared/worker-utils.js';
import { logger } from '../../utils/logger.js';
import { HOOK_EXIT_CODES } from '../../shared/hook-constants.js';
import { normalizePlatformSource } from '../../shared/platform-source.js';
export const fileEditHandler: EventHandler = {
async execute(input: NormalizedHookInput): Promise<HookResult> {
@@ -20,6 +21,7 @@ export const fileEditHandler: EventHandler = {
}
const { sessionId, cwd, filePath, edits } = input;
const platformSource = normalizePlatformSource(input.platform);
if (!filePath) {
throw new Error('fileEditHandler requires filePath');
@@ -42,6 +44,7 @@ export const fileEditHandler: EventHandler = {
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
contentSessionId: sessionId,
platformSource,
tool_name: 'write_file',
tool_input: { filePath, edits },
tool_response: { success: true },
+6 -2
View File
@@ -13,6 +13,7 @@ import { observationHandler } from './observation.js';
import { summarizeHandler } from './summarize.js';
import { userMessageHandler } from './user-message.js';
import { fileEditHandler } from './file-edit.js';
import { fileContextHandler } from './file-context.js';
import { sessionCompleteHandler } from './session-complete.js';
export type EventType =
@@ -22,7 +23,8 @@ export type EventType =
| 'summarize' // Stop - generate summary (phase 1)
| 'session-complete' // Stop - complete session (phase 2) - fixes #842
| 'user-message' // SessionStart (parallel) - display to user
| 'file-edit'; // Cursor afterFileEdit
| 'file-edit' // Cursor afterFileEdit
| 'file-context'; // PreToolUse - inject file observation history
const handlers: Record<EventType, EventHandler> = {
'context': contextHandler,
@@ -31,7 +33,8 @@ const handlers: Record<EventType, EventHandler> = {
'summarize': summarizeHandler,
'session-complete': sessionCompleteHandler,
'user-message': userMessageHandler,
'file-edit': fileEditHandler
'file-edit': fileEditHandler,
'file-context': fileContextHandler
};
/**
@@ -64,4 +67,5 @@ export { observationHandler } from './observation.js';
export { summarizeHandler } from './summarize.js';
export { userMessageHandler } from './user-message.js';
export { fileEditHandler } from './file-edit.js';
export { fileContextHandler } from './file-context.js';
export { sessionCompleteHandler } from './session-complete.js';
+3
View File
@@ -11,6 +11,7 @@ import { HOOK_EXIT_CODES } from '../../shared/hook-constants.js';
import { isProjectExcluded } from '../../utils/project-filter.js';
import { SettingsDefaultsManager } from '../../shared/SettingsDefaultsManager.js';
import { USER_SETTINGS_PATH } from '../../shared/paths.js';
import { normalizePlatformSource } from '../../shared/platform-source.js';
export const observationHandler: EventHandler = {
async execute(input: NormalizedHookInput): Promise<HookResult> {
@@ -22,6 +23,7 @@ export const observationHandler: EventHandler = {
}
const { sessionId, cwd, toolName, toolInput, toolResponse } = input;
const platformSource = normalizePlatformSource(input.platform);
if (!toolName) {
// No tool name provided - skip observation gracefully
@@ -51,6 +53,7 @@ export const observationHandler: EventHandler = {
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
contentSessionId: sessionId,
platformSource,
tool_name: toolName,
tool_input: toolInput,
tool_response: toolResponse,
+4 -1
View File
@@ -12,6 +12,7 @@
import type { EventHandler, NormalizedHookInput, HookResult } from '../types.js';
import { ensureWorkerRunning, workerHttpRequest } from '../../shared/worker-utils.js';
import { logger } from '../../utils/logger.js';
import { normalizePlatformSource } from '../../shared/platform-source.js';
export const sessionCompleteHandler: EventHandler = {
async execute(input: NormalizedHookInput): Promise<HookResult> {
@@ -23,6 +24,7 @@ export const sessionCompleteHandler: EventHandler = {
}
const { sessionId } = input;
const platformSource = normalizePlatformSource(input.platform);
if (!sessionId) {
logger.warn('HOOK', 'session-complete: Missing sessionId, skipping');
@@ -39,7 +41,8 @@ export const sessionCompleteHandler: EventHandler = {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
contentSessionId: sessionId
contentSessionId: sessionId,
platformSource
})
});
+4 -1
View File
@@ -12,6 +12,7 @@ import { HOOK_EXIT_CODES } from '../../shared/hook-constants.js';
import { isProjectExcluded } from '../../utils/project-filter.js';
import { SettingsDefaultsManager } from '../../shared/SettingsDefaultsManager.js';
import { USER_SETTINGS_PATH } from '../../shared/paths.js';
import { normalizePlatformSource } from '../../shared/platform-source.js';
export const sessionInitHandler: EventHandler = {
async execute(input: NormalizedHookInput): Promise<HookResult> {
@@ -42,6 +43,7 @@ export const sessionInitHandler: EventHandler = {
const prompt = (!rawPrompt || !rawPrompt.trim()) ? '[media prompt]' : rawPrompt;
const project = getProjectName(cwd);
const platformSource = normalizePlatformSource(input.platform);
logger.debug('HOOK', 'session-init: Calling /api/sessions/init', { contentSessionId: sessionId, project });
@@ -52,7 +54,8 @@ export const sessionInitHandler: EventHandler = {
body: JSON.stringify({
contentSessionId: sessionId,
project,
prompt
prompt,
platformSource
})
});
+7 -1
View File
@@ -17,7 +17,13 @@ export interface NormalizedHookInput {
export interface HookResult {
continue?: boolean;
suppressOutput?: boolean;
hookSpecificOutput?: { hookEventName: string; additionalContext: string };
hookSpecificOutput?: {
hookEventName: string;
additionalContext: string;
permissionDecision?: 'allow' | 'deny';
permissionDecisionReason?: string;
updatedInput?: Record<string, unknown>;
};
systemMessage?: string;
exitCode?: number;
}
+10 -1
View File
@@ -138,7 +138,7 @@ export function parseSummary(text: string, sessionId?: number): ParsedSummary |
const next_steps = extractField(summaryContent, 'next_steps');
const notes = extractField(summaryContent, 'notes'); // Optional
// NOTE FROM THEDOTMACK: 100% of the time we must SAVE the summary, even if fields are missing. 10/24/2025
// NOTE FROM THEDOTMACK: 100% of the time we must SAVE the summary, even if fields are missing. 10/24/2025
// NEVER DO THIS NONSENSE AGAIN.
// Validate required fields are present (notes is optional)
@@ -154,6 +154,15 @@ export function parseSummary(text: string, sessionId?: number): ParsedSummary |
// return null;
// }
// Guard: if NO sub-tags matched at all, this is a false positive —
// <summary> accidentally appeared inside an <observation> response with no structured content.
// This is NOT the same as missing some fields (which we intentionally allow above).
// Fix for #1360.
if (!request && !investigated && !learned && !completed && !next_steps) {
logger.warn('PARSER', 'Summary match has no sub-tags — skipping false positive', { sessionId });
return null;
}
return {
request,
investigated,
+54 -4
View File
@@ -27,7 +27,8 @@ import {
CallToolRequestSchema,
ListToolsRequestSchema,
} from '@modelcontextprotocol/sdk/types.js';
import { workerHttpRequest } from '../shared/worker-utils.js';
import { getWorkerPort, workerHttpRequest } from '../shared/worker-utils.js';
import { ensureWorkerStarted } from '../services/worker-service.js';
import { searchCodebase, formatSearchResults } from '../services/smart-file-read/search.js';
import { parseFile, formatFoldedView, unfoldSymbol } from '../services/smart-file-read/parser.js';
import { readFile } from 'node:fs/promises';
@@ -144,6 +145,26 @@ async function verifyWorkerConnection(): Promise<boolean> {
}
}
/**
* Ensure Worker is available for Codex and other MCP-only clients.
* Claude hooks already start the worker; this path makes Codex turnkey.
*/
async function ensureWorkerConnection(): Promise<boolean> {
if (await verifyWorkerConnection()) {
return true;
}
logger.warn('SYSTEM', 'Worker not available, attempting auto-start for MCP client');
try {
const port = getWorkerPort();
return await ensureWorkerStarted(port);
} catch (error) {
logger.error('SYSTEM', 'Worker auto-start failed', undefined, error as Error);
return false;
}
}
/**
* Tool definitions with HTTP-based handlers
* Minimal descriptions - use help() tool with operation parameter for detailed docs
@@ -392,6 +413,30 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
// Prevents orphaned MCP server processes when Claude Code exits unexpectedly
const HEARTBEAT_INTERVAL_MS = 30_000;
let heartbeatTimer: ReturnType<typeof setInterval> | null = null;
let isCleaningUp = false;
function handleStdioClosed() {
cleanup('stdio-closed');
}
function handleStdioError(error: Error) {
logger.warn('SYSTEM', 'MCP stdio stream errored, shutting down', {
message: error.message
});
cleanup('stdio-error');
}
function attachStdioLifecycle() {
process.stdin.on('end', handleStdioClosed);
process.stdin.on('close', handleStdioClosed);
process.stdin.on('error', handleStdioError);
}
function detachStdioLifecycle() {
process.stdin.off('end', handleStdioClosed);
process.stdin.off('close', handleStdioClosed);
process.stdin.off('error', handleStdioError);
}
function startParentHeartbeat() {
// ppid-based orphan detection only works on Unix
@@ -414,9 +459,13 @@ function startParentHeartbeat() {
// Cleanup function — synchronous to ensure consistent behavior whether called
// from signal handlers, heartbeat interval, or awaited in async context
function cleanup() {
function cleanup(reason: string = 'shutdown') {
if (isCleaningUp) return;
isCleaningUp = true;
if (heartbeatTimer) clearInterval(heartbeatTimer);
logger.info('SYSTEM', 'MCP server shutting down');
detachStdioLifecycle();
logger.info('SYSTEM', 'MCP server shutting down', { reason });
process.exit(0);
}
@@ -428,6 +477,7 @@ process.on('SIGINT', cleanup);
async function main() {
// Start the MCP server
const transport = new StdioServerTransport();
attachStdioLifecycle();
await server.connect(transport);
logger.info('SYSTEM', 'Claude-mem search server started');
@@ -436,7 +486,7 @@ async function main() {
// Check Worker availability in background
setTimeout(async () => {
const workerAvailable = await verifyWorkerConnection();
const workerAvailable = await ensureWorkerConnection();
if (!workerAvailable) {
logger.error('SYSTEM', 'Worker not available', undefined, {});
logger.error('SYSTEM', 'Tools will fail until Worker is started');
+18 -17
View File
@@ -29,8 +29,8 @@ import { renderHeader } from './sections/HeaderRenderer.js';
import { renderTimeline } from './sections/TimelineRenderer.js';
import { shouldShowSummary, renderSummaryFields } from './sections/SummaryRenderer.js';
import { renderPreviouslySection, renderFooter } from './sections/FooterRenderer.js';
import { renderMarkdownEmptyState } from './formatters/MarkdownFormatter.js';
import { renderColorEmptyState } from './formatters/ColorFormatter.js';
import { renderAgentEmptyState } from './formatters/AgentFormatter.js';
import { renderHumanEmptyState } from './formatters/HumanFormatter.js';
// Version marker path for native module error handling
const VERSION_MARKER_PATH = path.join(
@@ -66,8 +66,8 @@ function initializeDatabase(): SessionStore | null {
/**
* Render empty state when no data exists
*/
function renderEmptyState(project: string, useColors: boolean): string {
return useColors ? renderColorEmptyState(project) : renderMarkdownEmptyState(project);
function renderEmptyState(project: string, forHuman: boolean): string {
return forHuman ? renderHumanEmptyState(project) : renderAgentEmptyState(project);
}
/**
@@ -80,7 +80,7 @@ function buildContextOutput(
config: ContextConfig,
cwd: string,
sessionId: string | undefined,
useColors: boolean
forHuman: boolean
): string {
const output: string[] = [];
@@ -88,7 +88,7 @@ function buildContextOutput(
const economics = calculateTokenEconomics(observations);
// Render header section
output.push(...renderHeader(project, economics, config, useColors));
output.push(...renderHeader(project, economics, config, forHuman));
// Prepare timeline data
const displaySummaries = summaries.slice(0, config.sessionCount);
@@ -97,22 +97,22 @@ function buildContextOutput(
const fullObservationIds = getFullObservationIds(observations, config.fullObservationCount);
// Render timeline
output.push(...renderTimeline(timeline, fullObservationIds, config, cwd, useColors));
output.push(...renderTimeline(timeline, fullObservationIds, config, cwd, forHuman));
// Render most recent summary if applicable
const mostRecentSummary = summaries[0];
const mostRecentObservation = observations[0];
if (shouldShowSummary(config, mostRecentSummary, mostRecentObservation)) {
output.push(...renderSummaryFields(mostRecentSummary, useColors));
output.push(...renderSummaryFields(mostRecentSummary, forHuman));
}
// Render previously section (prior assistant message)
const priorMessages = getPriorSessionMessages(observations, config, sessionId, cwd);
output.push(...renderPreviouslySection(priorMessages, useColors));
output.push(...renderPreviouslySection(priorMessages, forHuman));
// Render footer
output.push(...renderFooter(economics, config, useColors));
output.push(...renderFooter(economics, config, forHuman));
return output.join('\n').trimEnd();
}
@@ -125,11 +125,12 @@ function buildContextOutput(
*/
export async function generateContext(
input?: ContextInput,
useColors: boolean = false
forHuman: boolean = false
): Promise<string> {
const config = loadContextConfig();
const cwd = input?.cwd ?? process.cwd();
const project = getProjectName(cwd);
const platformSource = input?.platform_source;
// Use provided projects array (for worktree support) or fall back to single project
const projects = input?.projects || [project];
@@ -149,15 +150,15 @@ export async function generateContext(
try {
// Query data for all projects (supports worktree: parent + worktree combined)
const observations = projects.length > 1
? queryObservationsMulti(db, projects, config)
: queryObservations(db, project, config);
? queryObservationsMulti(db, projects, config, platformSource)
: queryObservations(db, project, config, platformSource);
const summaries = projects.length > 1
? querySummariesMulti(db, projects, config)
: querySummaries(db, project, config);
? querySummariesMulti(db, projects, config, platformSource)
: querySummaries(db, project, config, platformSource);
// Handle empty state
if (observations.length === 0 && summaries.length === 0) {
return renderEmptyState(project, useColors);
return renderEmptyState(project, forHuman);
}
// Build and return context
@@ -168,7 +169,7 @@ export async function generateContext(
config,
cwd,
input?.session_id,
useColors
forHuman
);
return output;
+102 -31
View File
@@ -8,6 +8,7 @@ import path from 'path';
import { existsSync, readFileSync } from 'fs';
import { SessionStore } from '../sqlite/SessionStore.js';
import { logger } from '../../utils/logger.js';
import { SYSTEM_REMINDER_REGEX } from '../../utils/tag-stripping.js';
import { CLAUDE_CONFIG_DIR } from '../../shared/paths.js';
import type {
ContextConfig,
@@ -25,7 +26,8 @@ import { SUMMARY_LOOKAHEAD } from './types.js';
export function queryObservations(
db: SessionStore,
project: string,
config: ContextConfig
config: ContextConfig,
platformSource?: string
): Observation[] {
const typeArray = Array.from(config.observationTypes);
const typePlaceholders = typeArray.map(() => '?').join(',');
@@ -34,19 +36,38 @@ export function queryObservations(
return db.db.prepare(`
SELECT
id, memory_session_id, type, title, subtitle, narrative,
facts, concepts, files_read, files_modified, discovery_tokens,
created_at, created_at_epoch
FROM observations
WHERE project = ?
o.id,
o.memory_session_id,
COALESCE(s.platform_source, 'claude') as platform_source,
o.type,
o.title,
o.subtitle,
o.narrative,
o.facts,
o.concepts,
o.files_read,
o.files_modified,
o.discovery_tokens,
o.created_at,
o.created_at_epoch
FROM observations o
LEFT JOIN sdk_sessions s ON o.memory_session_id = s.memory_session_id
WHERE o.project = ?
AND type IN (${typePlaceholders})
AND EXISTS (
SELECT 1 FROM json_each(concepts)
SELECT 1 FROM json_each(o.concepts)
WHERE value IN (${conceptPlaceholders})
)
ORDER BY created_at_epoch DESC
${platformSource ? "AND COALESCE(s.platform_source, 'claude') = ?" : ''}
ORDER BY o.created_at_epoch DESC
LIMIT ?
`).all(project, ...typeArray, ...conceptArray, config.totalObservationCount) as Observation[];
`).all(
project,
...typeArray,
...conceptArray,
...(platformSource ? [platformSource] : []),
config.totalObservationCount
) as Observation[];
}
/**
@@ -55,15 +76,30 @@ export function queryObservations(
export function querySummaries(
db: SessionStore,
project: string,
config: ContextConfig
config: ContextConfig,
platformSource?: string
): SessionSummary[] {
return db.db.prepare(`
SELECT id, memory_session_id, request, investigated, learned, completed, next_steps, created_at, created_at_epoch
FROM session_summaries
WHERE project = ?
ORDER BY created_at_epoch DESC
SELECT
ss.id,
ss.memory_session_id,
COALESCE(s.platform_source, 'claude') as platform_source,
ss.request,
ss.investigated,
ss.learned,
ss.completed,
ss.next_steps,
ss.created_at,
ss.created_at_epoch
FROM session_summaries ss
LEFT JOIN sdk_sessions s ON ss.memory_session_id = s.memory_session_id
WHERE ss.project = ?
${platformSource ? "AND COALESCE(s.platform_source, 'claude') = ?" : ''}
ORDER BY ss.created_at_epoch DESC
LIMIT ?
`).all(project, config.sessionCount + SUMMARY_LOOKAHEAD) as SessionSummary[];
`).all(
...[project, ...(platformSource ? [platformSource] : []), config.sessionCount + SUMMARY_LOOKAHEAD]
) as SessionSummary[];
}
/**
@@ -75,7 +111,8 @@ export function querySummaries(
export function queryObservationsMulti(
db: SessionStore,
projects: string[],
config: ContextConfig
config: ContextConfig,
platformSource?: string
): Observation[] {
const typeArray = Array.from(config.observationTypes);
const typePlaceholders = typeArray.map(() => '?').join(',');
@@ -87,19 +124,39 @@ export function queryObservationsMulti(
return db.db.prepare(`
SELECT
id, memory_session_id, type, title, subtitle, narrative,
facts, concepts, files_read, files_modified, discovery_tokens,
created_at, created_at_epoch, project
FROM observations
WHERE project IN (${projectPlaceholders})
o.id,
o.memory_session_id,
COALESCE(s.platform_source, 'claude') as platform_source,
o.type,
o.title,
o.subtitle,
o.narrative,
o.facts,
o.concepts,
o.files_read,
o.files_modified,
o.discovery_tokens,
o.created_at,
o.created_at_epoch,
o.project
FROM observations o
LEFT JOIN sdk_sessions s ON o.memory_session_id = s.memory_session_id
WHERE o.project IN (${projectPlaceholders})
AND type IN (${typePlaceholders})
AND EXISTS (
SELECT 1 FROM json_each(concepts)
SELECT 1 FROM json_each(o.concepts)
WHERE value IN (${conceptPlaceholders})
)
ORDER BY created_at_epoch DESC
${platformSource ? "AND COALESCE(s.platform_source, 'claude') = ?" : ''}
ORDER BY o.created_at_epoch DESC
LIMIT ?
`).all(...projects, ...typeArray, ...conceptArray, config.totalObservationCount) as Observation[];
`).all(
...projects,
...typeArray,
...conceptArray,
...(platformSource ? [platformSource] : []),
config.totalObservationCount
) as Observation[];
}
/**
@@ -111,18 +168,32 @@ export function queryObservationsMulti(
export function querySummariesMulti(
db: SessionStore,
projects: string[],
config: ContextConfig
config: ContextConfig,
platformSource?: string
): SessionSummary[] {
// Build IN clause for projects
const projectPlaceholders = projects.map(() => '?').join(',');
return db.db.prepare(`
SELECT id, memory_session_id, request, investigated, learned, completed, next_steps, created_at, created_at_epoch, project
FROM session_summaries
WHERE project IN (${projectPlaceholders})
ORDER BY created_at_epoch DESC
SELECT
ss.id,
ss.memory_session_id,
COALESCE(s.platform_source, 'claude') as platform_source,
ss.request,
ss.investigated,
ss.learned,
ss.completed,
ss.next_steps,
ss.created_at,
ss.created_at_epoch,
ss.project
FROM session_summaries ss
LEFT JOIN sdk_sessions s ON ss.memory_session_id = s.memory_session_id
WHERE ss.project IN (${projectPlaceholders})
${platformSource ? "AND COALESCE(s.platform_source, 'claude') = ?" : ''}
ORDER BY ss.created_at_epoch DESC
LIMIT ?
`).all(...projects, config.sessionCount + SUMMARY_LOOKAHEAD) as SessionSummary[];
`).all(...projects, ...(platformSource ? [platformSource] : []), config.sessionCount + SUMMARY_LOOKAHEAD) as SessionSummary[];
}
/**
@@ -164,7 +235,7 @@ export function extractPriorMessages(transcriptPath: string): PriorMessages {
text += block.text;
}
}
text = text.replace(/<system-reminder>[\s\S]*?<\/system-reminder>/g, '').trim();
text = text.replace(SYSTEM_REMINDER_REGEX, '').trim();
if (text) {
lastAssistantMessage = text;
break;
@@ -1,8 +1,8 @@
/**
* MarkdownFormatter - Formats context output as compact markdown for LLM injection
* AgentFormatter - Formats context output as compact markdown for LLM injection
*
* Optimized for token efficiency: flat lines instead of tables, no repeated headers.
* The colored terminal formatter (ColorFormatter.ts) handles human-readable display separately.
* The human-readable terminal formatter (HumanFormatter.ts) handles human-readable display separately.
*/
import type {
@@ -31,9 +31,9 @@ function formatHeaderDateTime(): string {
}
/**
* Render markdown header
* Render agent header
*/
export function renderMarkdownHeader(project: string): string[] {
export function renderAgentHeader(project: string): string[] {
return [
`# $CMEM ${project} ${formatHeaderDateTime()}`,
''
@@ -41,9 +41,9 @@ export function renderMarkdownHeader(project: string): string[] {
}
/**
* Render markdown legend
* Render agent legend
*/
export function renderMarkdownLegend(): string[] {
export function renderAgentLegend(): string[] {
const mode = ModeManager.getInstance().getActiveMode();
const typeLegendItems = mode.observation_types.map(t => `${t.emoji}${t.id}`).join(' ');
@@ -56,23 +56,23 @@ export function renderMarkdownLegend(): string[] {
}
/**
* Render markdown column key - no longer needed in compact format
* Render agent column key - no longer needed in compact format
*/
export function renderMarkdownColumnKey(): string[] {
export function renderAgentColumnKey(): string[] {
return [];
}
/**
* Render markdown context index instructions - folded into legend
* Render agent context index instructions - folded into legend
*/
export function renderMarkdownContextIndex(): string[] {
export function renderAgentContextIndex(): string[] {
return [];
}
/**
* Render markdown context economics
* Render agent context economics
*/
export function renderMarkdownContextEconomics(
export function renderAgentContextEconomics(
economics: TokenEconomics,
config: ContextConfig
): string[] {
@@ -98,18 +98,18 @@ export function renderMarkdownContextEconomics(
}
/**
* Render markdown day header
* Render agent day header
*/
export function renderMarkdownDayHeader(day: string): string[] {
export function renderAgentDayHeader(day: string): string[] {
return [
`### ${day}`,
];
}
/**
* Render markdown file header - no longer renders table headers in compact format
* Render agent file header - no longer renders table headers in compact format
*/
export function renderMarkdownFileHeader(_file: string): string[] {
export function renderAgentFileHeader(_file: string): string[] {
// File grouping eliminated in compact format - file context is in observation titles
return [];
}
@@ -124,7 +124,7 @@ function compactTime(time: string): string {
/**
* Render compact flat line for observation (replaces table row)
*/
export function renderMarkdownTableRow(
export function renderAgentTableRow(
obs: Observation,
timeDisplay: string,
_config: ContextConfig
@@ -137,9 +137,9 @@ export function renderMarkdownTableRow(
}
/**
* Render markdown full observation
* Render agent full observation
*/
export function renderMarkdownFullObservation(
export function renderAgentFullObservation(
obs: Observation,
timeDisplay: string,
detailField: string | null,
@@ -172,9 +172,9 @@ export function renderMarkdownFullObservation(
}
/**
* Render markdown summary item in timeline
* Render agent summary item in timeline
*/
export function renderMarkdownSummaryItem(
export function renderAgentSummaryItem(
summary: { id: number; request: string | null },
formattedTime: string
): string[] {
@@ -184,17 +184,17 @@ export function renderMarkdownSummaryItem(
}
/**
* Render markdown summary field
* Render agent summary field
*/
export function renderMarkdownSummaryField(label: string, value: string | null): string[] {
export function renderAgentSummaryField(label: string, value: string | null): string[] {
if (!value) return [];
return [`**${label}**: ${value}`, ''];
}
/**
* Render markdown previously section
* Render agent previously section
*/
export function renderMarkdownPreviouslySection(priorMessages: PriorMessages): string[] {
export function renderAgentPreviouslySection(priorMessages: PriorMessages): string[] {
if (!priorMessages.assistantMessage) return [];
return [
@@ -209,9 +209,9 @@ export function renderMarkdownPreviouslySection(priorMessages: PriorMessages): s
}
/**
* Render markdown footer
* Render agent footer
*/
export function renderMarkdownFooter(totalDiscoveryTokens: number, totalReadTokens: number): string[] {
export function renderAgentFooter(totalDiscoveryTokens: number, totalReadTokens: number): string[] {
const workTokensK = Math.round(totalDiscoveryTokens / 1000);
return [
'',
@@ -220,8 +220,8 @@ export function renderMarkdownFooter(totalDiscoveryTokens: number, totalReadToke
}
/**
* Render markdown empty state
* Render agent empty state
*/
export function renderMarkdownEmptyState(project: string): string {
export function renderAgentEmptyState(project: string): string {
return `# $CMEM ${project} ${formatHeaderDateTime()}\n\nNo previous sessions found.`;
}
@@ -1,5 +1,5 @@
/**
* ColorFormatter - Formats context output with ANSI colors for terminal
* HumanFormatter - Formats context output with ANSI colors for terminal
*
* Handles all colored formatting for context injection (terminal display).
*/
@@ -30,9 +30,9 @@ function formatHeaderDateTime(): string {
}
/**
* Render colored header
* Render human-readable header
*/
export function renderColorHeader(project: string): string[] {
export function renderHumanHeader(project: string): string[] {
return [
'',
`${colors.bright}${colors.cyan}[${project}] recent context, ${formatHeaderDateTime()}${colors.reset}`,
@@ -42,9 +42,9 @@ export function renderColorHeader(project: string): string[] {
}
/**
* Render colored legend
* Render human-readable legend
*/
export function renderColorLegend(): string[] {
export function renderHumanLegend(): string[] {
const mode = ModeManager.getInstance().getActiveMode();
const typeLegendItems = mode.observation_types.map(t => `${t.emoji} ${t.id}`).join(' | ');
@@ -55,9 +55,9 @@ export function renderColorLegend(): string[] {
}
/**
* Render colored column key
* Render human-readable column key
*/
export function renderColorColumnKey(): string[] {
export function renderHumanColumnKey(): string[] {
return [
`${colors.bright}Column Key${colors.reset}`,
`${colors.dim} Read: Tokens to read this observation (cost to learn it now)${colors.reset}`,
@@ -67,9 +67,9 @@ export function renderColorColumnKey(): string[] {
}
/**
* Render colored context index instructions
* Render human-readable context index instructions
*/
export function renderColorContextIndex(): string[] {
export function renderHumanContextIndex(): string[] {
return [
`${colors.dim}Context Index: This semantic index (titles, types, files, tokens) is usually sufficient to understand past work.${colors.reset}`,
'',
@@ -82,9 +82,9 @@ export function renderColorContextIndex(): string[] {
}
/**
* Render colored context economics
* Render human-readable context economics
*/
export function renderColorContextEconomics(
export function renderHumanContextEconomics(
economics: TokenEconomics,
config: ContextConfig
): string[] {
@@ -111,9 +111,9 @@ export function renderColorContextEconomics(
}
/**
* Render colored day header
* Render human-readable day header
*/
export function renderColorDayHeader(day: string): string[] {
export function renderHumanDayHeader(day: string): string[] {
return [
`${colors.bright}${colors.cyan}${day}${colors.reset}`,
''
@@ -121,18 +121,18 @@ export function renderColorDayHeader(day: string): string[] {
}
/**
* Render colored file header
* Render human-readable file header
*/
export function renderColorFileHeader(file: string): string[] {
export function renderHumanFileHeader(file: string): string[] {
return [
`${colors.dim}${file}${colors.reset}`
];
}
/**
* Render colored table row for observation
* Render human-readable table row for observation
*/
export function renderColorTableRow(
export function renderHumanTableRow(
obs: Observation,
time: string,
showTime: boolean,
@@ -150,9 +150,9 @@ export function renderColorTableRow(
}
/**
* Render colored full observation
* Render human-readable full observation
*/
export function renderColorFullObservation(
export function renderHumanFullObservation(
obs: Observation,
time: string,
showTime: boolean,
@@ -181,9 +181,9 @@ export function renderColorFullObservation(
}
/**
* Render colored summary item in timeline
* Render human-readable summary item in timeline
*/
export function renderColorSummaryItem(
export function renderHumanSummaryItem(
summary: { id: number; request: string | null },
formattedTime: string
): string[] {
@@ -195,17 +195,17 @@ export function renderColorSummaryItem(
}
/**
* Render colored summary field
* Render human-readable summary field
*/
export function renderColorSummaryField(label: string, value: string | null, color: string): string[] {
export function renderHumanSummaryField(label: string, value: string | null, color: string): string[] {
if (!value) return [];
return [`${color}${label}:${colors.reset} ${value}`, ''];
}
/**
* Render colored previously section
* Render human-readable previously section
*/
export function renderColorPreviouslySection(priorMessages: PriorMessages): string[] {
export function renderHumanPreviouslySection(priorMessages: PriorMessages): string[] {
if (!priorMessages.assistantMessage) return [];
return [
@@ -220,9 +220,9 @@ export function renderColorPreviouslySection(priorMessages: PriorMessages): stri
}
/**
* Render colored footer
* Render human-readable footer
*/
export function renderColorFooter(totalDiscoveryTokens: number, totalReadTokens: number): string[] {
export function renderHumanFooter(totalDiscoveryTokens: number, totalReadTokens: number): string[] {
const workTokensK = Math.round(totalDiscoveryTokens / 1000);
return [
'',
@@ -231,8 +231,8 @@ export function renderColorFooter(totalDiscoveryTokens: number, totalReadTokens:
}
/**
* Render colored empty state
* Render human-readable empty state
*/
export function renderColorEmptyState(project: string): string {
export function renderHumanEmptyState(project: string): string {
return `\n${colors.bright}${colors.cyan}[${project}] recent context, ${formatHeaderDateTime()}${colors.reset}\n${colors.gray}${'─'.repeat(60)}${colors.reset}\n\n${colors.dim}No previous sessions found for this project yet.${colors.reset}\n`;
}
+10 -10
View File
@@ -6,20 +6,20 @@
import type { ContextConfig, TokenEconomics, PriorMessages } from '../types.js';
import { shouldShowContextEconomics } from '../TokenCalculator.js';
import * as Markdown from '../formatters/MarkdownFormatter.js';
import * as Color from '../formatters/ColorFormatter.js';
import * as Agent from '../formatters/AgentFormatter.js';
import * as Human from '../formatters/HumanFormatter.js';
/**
* Render the previously section (prior assistant message)
*/
export function renderPreviouslySection(
priorMessages: PriorMessages,
useColors: boolean
forHuman: boolean
): string[] {
if (useColors) {
return Color.renderColorPreviouslySection(priorMessages);
if (forHuman) {
return Human.renderHumanPreviouslySection(priorMessages);
}
return Markdown.renderMarkdownPreviouslySection(priorMessages);
return Agent.renderAgentPreviouslySection(priorMessages);
}
/**
@@ -28,15 +28,15 @@ export function renderPreviouslySection(
export function renderFooter(
economics: TokenEconomics,
config: ContextConfig,
useColors: boolean
forHuman: boolean
): string[] {
// Only show footer if we have savings to display
if (!shouldShowContextEconomics(config) || economics.totalDiscoveryTokens <= 0 || economics.savings <= 0) {
return [];
}
if (useColors) {
return Color.renderColorFooter(economics.totalDiscoveryTokens, economics.totalReadTokens);
if (forHuman) {
return Human.renderHumanFooter(economics.totalDiscoveryTokens, economics.totalReadTokens);
}
return Markdown.renderMarkdownFooter(economics.totalDiscoveryTokens, economics.totalReadTokens);
return Agent.renderAgentFooter(economics.totalDiscoveryTokens, economics.totalReadTokens);
}
+18 -18
View File
@@ -6,8 +6,8 @@
import type { ContextConfig, TokenEconomics } from '../types.js';
import { shouldShowContextEconomics } from '../TokenCalculator.js';
import * as Markdown from '../formatters/MarkdownFormatter.js';
import * as Color from '../formatters/ColorFormatter.js';
import * as Agent from '../formatters/AgentFormatter.js';
import * as Human from '../formatters/HumanFormatter.js';
/**
* Render the complete header section
@@ -16,44 +16,44 @@ export function renderHeader(
project: string,
economics: TokenEconomics,
config: ContextConfig,
useColors: boolean
forHuman: boolean
): string[] {
const output: string[] = [];
// Main header
if (useColors) {
output.push(...Color.renderColorHeader(project));
if (forHuman) {
output.push(...Human.renderHumanHeader(project));
} else {
output.push(...Markdown.renderMarkdownHeader(project));
output.push(...Agent.renderAgentHeader(project));
}
// Legend
if (useColors) {
output.push(...Color.renderColorLegend());
if (forHuman) {
output.push(...Human.renderHumanLegend());
} else {
output.push(...Markdown.renderMarkdownLegend());
output.push(...Agent.renderAgentLegend());
}
// Column key
if (useColors) {
output.push(...Color.renderColorColumnKey());
if (forHuman) {
output.push(...Human.renderHumanColumnKey());
} else {
output.push(...Markdown.renderMarkdownColumnKey());
output.push(...Agent.renderAgentColumnKey());
}
// Context index instructions
if (useColors) {
output.push(...Color.renderColorContextIndex());
if (forHuman) {
output.push(...Human.renderHumanContextIndex());
} else {
output.push(...Markdown.renderMarkdownContextIndex());
output.push(...Agent.renderAgentContextIndex());
}
// Context economics
if (shouldShowContextEconomics(config)) {
if (useColors) {
output.push(...Color.renderColorContextEconomics(economics, config));
if (forHuman) {
output.push(...Human.renderHumanContextEconomics(economics, config));
} else {
output.push(...Markdown.renderMarkdownContextEconomics(economics, config));
output.push(...Agent.renderAgentContextEconomics(economics, config));
}
}
@@ -6,8 +6,8 @@
import type { ContextConfig, Observation, SessionSummary } from '../types.js';
import { colors } from '../types.js';
import * as Markdown from '../formatters/MarkdownFormatter.js';
import * as Color from '../formatters/ColorFormatter.js';
import * as Agent from '../formatters/AgentFormatter.js';
import * as Human from '../formatters/HumanFormatter.js';
/**
* Check if summary should be displayed
@@ -45,20 +45,20 @@ export function shouldShowSummary(
*/
export function renderSummaryFields(
summary: SessionSummary,
useColors: boolean
forHuman: boolean
): string[] {
const output: string[] = [];
if (useColors) {
output.push(...Color.renderColorSummaryField('Investigated', summary.investigated, colors.blue));
output.push(...Color.renderColorSummaryField('Learned', summary.learned, colors.yellow));
output.push(...Color.renderColorSummaryField('Completed', summary.completed, colors.green));
output.push(...Color.renderColorSummaryField('Next Steps', summary.next_steps, colors.magenta));
if (forHuman) {
output.push(...Human.renderHumanSummaryField('Investigated', summary.investigated, colors.blue));
output.push(...Human.renderHumanSummaryField('Learned', summary.learned, colors.yellow));
output.push(...Human.renderHumanSummaryField('Completed', summary.completed, colors.green));
output.push(...Human.renderHumanSummaryField('Next Steps', summary.next_steps, colors.magenta));
} else {
output.push(...Markdown.renderMarkdownSummaryField('Investigated', summary.investigated));
output.push(...Markdown.renderMarkdownSummaryField('Learned', summary.learned));
output.push(...Markdown.renderMarkdownSummaryField('Completed', summary.completed));
output.push(...Markdown.renderMarkdownSummaryField('Next Steps', summary.next_steps));
output.push(...Agent.renderAgentSummaryField('Investigated', summary.investigated));
output.push(...Agent.renderAgentSummaryField('Learned', summary.learned));
output.push(...Agent.renderAgentSummaryField('Completed', summary.completed));
output.push(...Agent.renderAgentSummaryField('Next Steps', summary.next_steps));
}
return output;
@@ -1,8 +1,8 @@
/**
* TimelineRenderer - Renders the chronological timeline of observations and summaries
*
* Handles day grouping and rendering. In markdown (LLM) mode, uses flat compact lines.
* In color (terminal) mode, uses file grouping with visual formatting.
* Handles day grouping and rendering. In agent (LLM) mode, uses flat compact lines.
* In human (terminal) mode, uses file grouping with visual formatting.
*/
import type {
@@ -12,8 +12,8 @@ import type {
SummaryTimelineItem,
} from '../types.js';
import { formatTime, formatDate, formatDateTime, extractFirstFile, parseJsonArray } from '../../../shared/timeline-formatting.js';
import * as Markdown from '../formatters/MarkdownFormatter.js';
import * as Color from '../formatters/ColorFormatter.js';
import * as Agent from '../formatters/AgentFormatter.js';
import * as Human from '../formatters/HumanFormatter.js';
/**
* Group timeline items by day
@@ -51,9 +51,9 @@ function getDetailField(obs: Observation, config: ContextConfig): string | null
}
/**
* Render a single day's timeline items (markdown/LLM mode - flat compact lines)
* Render a single day's timeline items (agent/LLM mode - flat compact lines)
*/
function renderDayTimelineMarkdown(
function renderDayTimelineAgent(
day: string,
dayItems: TimelineItem[],
fullObservationIds: Set<number>,
@@ -61,17 +61,15 @@ function renderDayTimelineMarkdown(
): string[] {
const output: string[] = [];
output.push(...Markdown.renderMarkdownDayHeader(day));
output.push(...Agent.renderAgentDayHeader(day));
let lastTime = '';
for (const item of dayItems) {
if (item.type === 'summary') {
lastTime = '';
const summary = item.data as SummaryTimelineItem;
const formattedTime = formatDateTime(summary.displayTime);
output.push(...Markdown.renderMarkdownSummaryItem(summary, formattedTime));
output.push(...Agent.renderAgentSummaryItem(summary, formattedTime));
} else {
const obs = item.data as Observation;
const time = formatTime(obs.created_at);
@@ -83,9 +81,9 @@ function renderDayTimelineMarkdown(
if (shouldShowFull) {
const detailField = getDetailField(obs, config);
output.push(...Markdown.renderMarkdownFullObservation(obs, timeDisplay, detailField, config));
output.push(...Agent.renderAgentFullObservation(obs, timeDisplay, detailField, config));
} else {
output.push(Markdown.renderMarkdownTableRow(obs, timeDisplay, config));
output.push(Agent.renderAgentTableRow(obs, timeDisplay, config));
}
}
}
@@ -94,9 +92,9 @@ function renderDayTimelineMarkdown(
}
/**
* Render a single day's timeline items (color/terminal mode - file grouped with tables)
* Render a single day's timeline items (human/terminal mode - file grouped with tables)
*/
function renderDayTimelineColor(
function renderDayTimelineHuman(
day: string,
dayItems: TimelineItem[],
fullObservationIds: Set<number>,
@@ -105,7 +103,7 @@ function renderDayTimelineColor(
): string[] {
const output: string[] = [];
output.push(...Color.renderColorDayHeader(day));
output.push(...Human.renderHumanDayHeader(day));
let currentFile: string | null = null;
let lastTime = '';
@@ -117,7 +115,7 @@ function renderDayTimelineColor(
const summary = item.data as SummaryTimelineItem;
const formattedTime = formatDateTime(summary.displayTime);
output.push(...Color.renderColorSummaryItem(summary, formattedTime));
output.push(...Human.renderHumanSummaryItem(summary, formattedTime));
} else {
const obs = item.data as Observation;
const file = extractFirstFile(obs.files_modified, cwd, obs.files_read);
@@ -129,15 +127,15 @@ function renderDayTimelineColor(
// Check if we need a new file section
if (file !== currentFile) {
output.push(...Color.renderColorFileHeader(file));
output.push(...Human.renderHumanFileHeader(file));
currentFile = file;
}
if (shouldShowFull) {
const detailField = getDetailField(obs, config);
output.push(...Color.renderColorFullObservation(obs, time, showTime, detailField, config));
output.push(...Human.renderHumanFullObservation(obs, time, showTime, detailField, config));
} else {
output.push(Color.renderColorTableRow(obs, time, showTime, config));
output.push(Human.renderHumanTableRow(obs, time, showTime, config));
}
}
}
@@ -156,12 +154,12 @@ export function renderDayTimeline(
fullObservationIds: Set<number>,
config: ContextConfig,
cwd: string,
useColors: boolean
forHuman: boolean
): string[] {
if (useColors) {
return renderDayTimelineColor(day, dayItems, fullObservationIds, config, cwd);
if (forHuman) {
return renderDayTimelineHuman(day, dayItems, fullObservationIds, config, cwd);
}
return renderDayTimelineMarkdown(day, dayItems, fullObservationIds, config);
return renderDayTimelineAgent(day, dayItems, fullObservationIds, config);
}
/**
@@ -172,13 +170,13 @@ export function renderTimeline(
fullObservationIds: Set<number>,
config: ContextConfig,
cwd: string,
useColors: boolean
forHuman: boolean
): string[] {
const output: string[] = [];
const itemsByDay = groupTimelineByDay(timeline);
for (const [day, dayItems] of itemsByDay) {
output.push(...renderDayTimeline(day, dayItems, fullObservationIds, config, cwd, useColors));
output.push(...renderDayTimeline(day, dayItems, fullObservationIds, config, cwd, forHuman));
}
return output;
+3
View File
@@ -15,6 +15,7 @@ export interface ContextInput {
projects?: string[];
/** When true, return ALL observations with no limit */
full?: boolean;
platform_source?: string;
[key: string]: any;
}
@@ -49,6 +50,7 @@ export interface ContextConfig {
export interface Observation {
id: number;
memory_session_id: string;
platform_source?: string;
type: string;
title: string | null;
subtitle: string | null;
@@ -70,6 +72,7 @@ export interface Observation {
export interface SessionSummary {
id: number;
memory_session_id: string;
platform_source?: string;
request: string | null;
investigated: string | null;
learned: string | null;
+15 -2
View File
@@ -453,6 +453,19 @@ export async function aggressiveStartupCleanup(): Promise<void> {
const pidsToKill: number[] = [];
const allPatterns = [...AGGRESSIVE_CLEANUP_PATTERNS, ...AGE_GATED_CLEANUP_PATTERNS];
// Protect parent process (the hook that spawned us) from being killed.
// Without this, a new daemon kills its own parent hook process (#1426).
//
// Note: readPidFile() is not used here because start() writes the new PID
// before initializeBackground() calls this function, so readPidFile() would
// just return process.pid (already protected). If a pre-existing worker needs
// protection, ensureWorkerStarted() handles that by returning early when a
// healthy worker is detected — we never reach this code in that case.
const protectedPids = new Set<number>([currentPid]);
if (process.ppid && process.ppid > 0) {
protectedPids.add(process.ppid);
}
try {
if (isWindows) {
// Use WQL -Filter for server-side filtering (no $_ pipeline syntax).
@@ -475,7 +488,7 @@ export async function aggressiveStartupCleanup(): Promise<void> {
for (const proc of processList) {
const pid = proc.ProcessId;
if (!Number.isInteger(pid) || pid <= 0 || pid === currentPid) continue;
if (!Number.isInteger(pid) || pid <= 0 || protectedPids.has(pid)) continue;
const commandLine = proc.CommandLine || '';
const isAggressive = AGGRESSIVE_CLEANUP_PATTERNS.some(p => commandLine.includes(p));
@@ -518,7 +531,7 @@ export async function aggressiveStartupCleanup(): Promise<void> {
const etime = match[2];
const command = match[3];
if (!Number.isInteger(pid) || pid <= 0 || pid === currentPid) continue;
if (!Number.isInteger(pid) || pid <= 0 || protectedPids.has(pid)) continue;
const isAggressive = AGGRESSIVE_CLEANUP_PATTERNS.some(p => command.includes(p));
+497 -15
View File
@@ -3,13 +3,15 @@
*
* No native bindings. No WASM. Just the CLI binary + query patterns.
*
* Supported: JS, TS, Python, Go, Rust, Ruby, Java, C, C++
* Supported: JS, TS, Python, Go, Rust, Ruby, Java, C, C++,
* Kotlin, Swift, PHP, Elixir, Lua, Scala, Bash, Haskell, Zig,
* CSS, SCSS, TOML, YAML, SQL, Markdown
*
* by Copter Labs
*/
import { execFileSync } from "node:child_process";
import { writeFileSync, mkdtempSync, rmSync, existsSync } from "node:fs";
import { writeFileSync, readFileSync, mkdtempSync, rmSync, existsSync } from "node:fs";
import { join, dirname } from "node:path";
import { tmpdir } from "node:os";
import { createRequire } from "node:module";
@@ -25,7 +27,7 @@ const _require = typeof __filename !== 'undefined'
export interface CodeSymbol {
name: string;
kind: "function" | "class" | "method" | "interface" | "type" | "const" | "variable" | "export" | "struct" | "enum" | "trait" | "impl" | "property" | "getter" | "setter";
kind: "function" | "class" | "method" | "interface" | "type" | "const" | "variable" | "export" | "struct" | "enum" | "trait" | "impl" | "property" | "getter" | "setter" | "mixin" | "section" | "code" | "metadata" | "reference";
signature: string;
jsdoc?: string;
lineStart: number;
@@ -66,6 +68,28 @@ const LANG_MAP: Record<string, string> = {
".cxx": "cpp",
".hpp": "cpp",
".hh": "cpp",
".kt": "kotlin",
".kts": "kotlin",
".swift": "swift",
".php": "php",
".ex": "elixir",
".exs": "elixir",
".lua": "lua",
".scala": "scala",
".sc": "scala",
".sh": "bash",
".bash": "bash",
".zsh": "bash",
".hs": "haskell",
".zig": "zig",
".css": "css",
".scss": "scss",
".toml": "toml",
".yml": "yaml",
".yaml": "yaml",
".sql": "sql",
".md": "markdown",
".mdx": "markdown",
};
export function detectLanguage(filePath: string): string {
@@ -73,6 +97,135 @@ export function detectLanguage(filePath: string): string {
return LANG_MAP[ext] || "unknown";
}
/**
* Detect language with fallback to user-configured grammar extensions.
* Bundled LANG_MAP takes priority.
*/
function detectLanguageWithUserGrammars(filePath: string, userConfig: UserGrammarConfig): string {
const ext = filePath.slice(filePath.lastIndexOf("."));
if (LANG_MAP[ext]) return LANG_MAP[ext];
if (userConfig.extensionToLanguage[ext]) return userConfig.extensionToLanguage[ext];
return "unknown";
}
/**
* Get the query key for a language, checking user config for custom queries.
*/
function getUserAwareQueryKey(language: string, userConfig: UserGrammarConfig): string {
// If user config has a specific query key for this language, use it
if (userConfig.languageToQueryKey[language]) {
return userConfig.languageToQueryKey[language];
}
// Otherwise fall back to the bundled query key mapping
return getQueryKey(language);
}
// --- User-installable grammars via .claude-mem.json ---
export interface UserGrammarEntry {
package: string;
extensions: string[];
query?: string;
}
export interface UserGrammarConfig {
/** language name → grammar entry */
grammars: Record<string, UserGrammarEntry>;
/** file extension → language name (for user-defined extensions only) */
extensionToLanguage: Record<string, string>;
/** language name → query content (custom .scm file content or "generic") */
languageToQueryKey: Record<string, string>;
}
const userGrammarCache = new Map<string, UserGrammarConfig>();
const EMPTY_USER_GRAMMAR_CONFIG: UserGrammarConfig = {
grammars: {},
extensionToLanguage: {},
languageToQueryKey: {},
};
/**
* Load user grammar configuration from .claude-mem.json in a project root.
* Cached per project root. Returns empty config if file doesn't exist or is invalid.
* User entries do NOT override bundled grammars.
*/
export function loadUserGrammars(projectRoot: string): UserGrammarConfig {
if (userGrammarCache.has(projectRoot)) return userGrammarCache.get(projectRoot)!;
const configPath = join(projectRoot, ".claude-mem.json");
let rawConfig: Record<string, unknown>;
try {
const content = readFileSync(configPath, "utf-8");
rawConfig = JSON.parse(content);
} catch {
userGrammarCache.set(projectRoot, EMPTY_USER_GRAMMAR_CONFIG);
return EMPTY_USER_GRAMMAR_CONFIG;
}
const grammarsRaw = rawConfig.grammars;
if (!grammarsRaw || typeof grammarsRaw !== "object" || Array.isArray(grammarsRaw)) {
userGrammarCache.set(projectRoot, EMPTY_USER_GRAMMAR_CONFIG);
return EMPTY_USER_GRAMMAR_CONFIG;
}
const config: UserGrammarConfig = {
grammars: {},
extensionToLanguage: {},
languageToQueryKey: {},
};
for (const [language, entry] of Object.entries(grammarsRaw as Record<string, unknown>)) {
// Skip if this language is already bundled
if (GRAMMAR_PACKAGES[language]) continue;
if (!entry || typeof entry !== "object" || Array.isArray(entry)) continue;
const typedEntry = entry as Record<string, unknown>;
const pkg = typedEntry.package;
const extensions = typedEntry.extensions;
const queryPath = typedEntry.query;
// Validate required fields
if (typeof pkg !== "string" || !Array.isArray(extensions)) continue;
if (!extensions.every((e: unknown) => typeof e === "string")) continue;
config.grammars[language] = {
package: pkg,
extensions: extensions as string[],
query: typeof queryPath === "string" ? queryPath : undefined,
};
// Map extensions to language (skip extensions already handled by bundled LANG_MAP)
for (const ext of extensions as string[]) {
if (!LANG_MAP[ext]) {
config.extensionToLanguage[ext] = language;
}
}
// Resolve query content
if (typeof queryPath === "string") {
const fullQueryPath = join(projectRoot, queryPath);
try {
const queryContent = readFileSync(fullQueryPath, "utf-8");
// Store with a unique key to avoid collisions with built-in queries
const queryKey = `user_${language}`;
QUERIES[queryKey] = queryContent;
config.languageToQueryKey[language] = queryKey;
} catch {
console.error(`[smart-file-read] Custom query file not found: ${fullQueryPath}, falling back to generic`);
config.languageToQueryKey[language] = "generic";
}
} else {
config.languageToQueryKey[language] = "generic";
}
}
userGrammarCache.set(projectRoot, config);
return config;
}
// --- Grammar path resolution ---
const GRAMMAR_PACKAGES: Record<string, string> = {
@@ -86,11 +239,45 @@ const GRAMMAR_PACKAGES: Record<string, string> = {
java: "tree-sitter-java",
c: "tree-sitter-c",
cpp: "tree-sitter-cpp",
kotlin: "tree-sitter-kotlin",
swift: "tree-sitter-swift",
php: "tree-sitter-php/php",
elixir: "tree-sitter-elixir",
lua: "@tree-sitter-grammars/tree-sitter-lua",
scala: "tree-sitter-scala",
bash: "tree-sitter-bash",
haskell: "tree-sitter-haskell",
zig: "@tree-sitter-grammars/tree-sitter-zig",
css: "tree-sitter-css",
scss: "tree-sitter-scss",
toml: "@tree-sitter-grammars/tree-sitter-toml",
yaml: "@tree-sitter-grammars/tree-sitter-yaml",
sql: "@derekstride/tree-sitter-sql",
markdown: "@tree-sitter-grammars/tree-sitter-markdown",
};
// Grammars where the parser source lives in a subdirectory of the npm package root,
// AND that subdirectory lacks its own package.json (so require.resolve won't find it).
// Maps language → subdirectory name under the package root.
const GRAMMAR_SUBDIR: Record<string, string> = {
markdown: "tree-sitter-markdown",
};
function resolveGrammarPath(language: string): string | null {
const pkg = GRAMMAR_PACKAGES[language];
if (!pkg) return null;
const subdir = GRAMMAR_SUBDIR[language];
if (subdir) {
// Package root has no sub-package.json — resolve root then append subdir
try {
const rootPkgPath = _require.resolve(pkg + "/package.json");
const resolved = join(dirname(rootPkgPath), subdir);
if (existsSync(join(resolved, "src"))) return resolved;
} catch { /* fall through */ }
return null;
}
try {
const packageJsonPath = _require.resolve(pkg + "/package.json");
return dirname(packageJsonPath);
@@ -99,6 +286,37 @@ function resolveGrammarPath(language: string): string | null {
}
}
/**
* Resolve grammar path with fallback to user-installed grammars.
* First tries bundled grammars, then falls back to the project's node_modules.
*/
export function resolveGrammarPathWithFallback(language: string, projectRoot?: string): string | null {
// Try bundled grammar first
const bundled = resolveGrammarPath(language);
if (bundled) return bundled;
// Fall back to user-installed grammar in project's node_modules
if (!projectRoot) return null;
const userConfig = loadUserGrammars(projectRoot);
const entry = userConfig.grammars[language];
if (!entry) return null;
try {
const packageJsonPath = join(projectRoot, "node_modules", entry.package, "package.json");
if (existsSync(packageJsonPath)) {
const grammarDir = dirname(packageJsonPath);
// Verify it has a src/ directory (required by tree-sitter CLI)
if (existsSync(join(grammarDir, "src"))) return grammarDir;
}
} catch {
// Grammar package not installed
}
console.error(`[smart-file-read] Grammar package not found for "${language}": ${entry.package} (install it in your project's node_modules)`);
return null;
}
// --- Query patterns (declarative symbol extraction) ---
const QUERIES: Record<string, string> = {
@@ -150,6 +368,104 @@ const QUERIES: Record<string, string> = {
(interface_declaration name: (identifier) @name) @iface
(enum_declaration name: (identifier) @name) @enm
(import_declaration) @imp
`,
kotlin: `
(function_declaration (simple_identifier) @name) @func
(class_declaration (type_identifier) @name) @cls
(object_declaration (type_identifier) @name) @cls
(import_header) @imp
`,
swift: `
(function_declaration name: (simple_identifier) @name) @func
(class_declaration name: (type_identifier) @name) @cls
(protocol_declaration name: (type_identifier) @name) @iface
(import_declaration) @imp
`,
php: `
(function_definition name: (name) @name) @func
(class_declaration name: (name) @name) @cls
(interface_declaration name: (name) @name) @iface
(trait_declaration name: (name) @name) @trait_def
(method_declaration name: (name) @name) @method
(namespace_use_declaration) @imp
`,
lua: `
(function_declaration name: (identifier) @name) @func
(function_declaration name: (dot_index_expression) @name) @func
(function_declaration name: (method_index_expression) @name) @func
`,
scala: `
(function_definition name: (identifier) @name) @func
(class_definition name: (identifier) @name) @cls
(object_definition name: (identifier) @name) @cls
(trait_definition name: (identifier) @name) @trait_def
(import_declaration) @imp
`,
bash: `
(function_definition name: (word) @name) @func
`,
haskell: `
(function name: (variable) @name) @func
(type_synomym name: (name) @name) @tdef
(newtype name: (name) @name) @tdef
(data_type name: (name) @name) @tdef
(class name: (name) @name) @cls
(import) @imp
`,
zig: `
(function_declaration name: (identifier) @name) @func
(test_declaration) @func
`,
css: `
(rule_set (selectors) @name) @func
(media_statement) @cls
(keyframes_statement (keyframes_name) @name) @cls
(import_statement) @imp
`,
scss: `
(rule_set (selectors) @name) @func
(media_statement) @cls
(keyframes_statement (keyframes_name) @name) @cls
(import_statement) @imp
(mixin_statement name: (identifier) @name) @mixin_def
(function_statement name: (identifier) @name) @func
(include_statement) @imp
`,
toml: `
(table (bare_key) @name) @cls
(table (dotted_key) @name) @cls
(table_array_element (bare_key) @name) @cls
(table_array_element (dotted_key) @name) @cls
`,
yaml: `
(block_mapping_pair key: (flow_node) @name) @func
`,
sql: `
(create_table (object_reference) @name) @cls
(create_function (object_reference) @name) @func
(create_view (object_reference) @name) @cls
`,
markdown: `
(atx_heading heading_content: (inline) @name) @heading
(setext_heading heading_content: (paragraph) @name) @heading
(fenced_code_block (info_string (language) @name)) @code_block
(fenced_code_block) @code_block
(minus_metadata) @frontmatter
(link_reference_definition (link_label) @name) @ref
`,
generic: `
@@ -159,6 +475,15 @@ const QUERIES: Record<string, string> = {
(class_definition name: (identifier) @name) @cls
(import_statement) @imp
(import_declaration) @imp
`,
php: `
(function_definition name: (name) @name) @func
(method_declaration name: (name) @name) @method
(class_declaration name: (name) @name) @cls
(interface_declaration name: (name) @name) @iface
(trait_declaration name: (name) @name) @trait_def
(namespace_use_declaration) @imp
`,
};
@@ -173,6 +498,21 @@ function getQueryKey(language: string): string {
case "rust": return "rust";
case "ruby": return "ruby";
case "java": return "java";
case "kotlin": return "kotlin";
case "swift": return "swift";
case "php": return "php";
case "elixir": return "generic";
case "lua": return "lua";
case "scala": return "scala";
case "bash": return "bash";
case "haskell": return "haskell";
case "zig": return "zig";
case "css": return "css";
case "scss": return "scss";
case "toml": return "toml";
case "yaml": return "yaml";
case "sql": return "sql";
case "markdown": return "markdown";
default: return "generic";
}
}
@@ -308,6 +648,11 @@ const KIND_MAP: Record<string, CodeSymbol["kind"]> = {
struct_def: "struct",
trait_def: "trait",
impl_def: "impl",
mixin_def: "mixin",
heading: "section",
code_block: "code",
frontmatter: "metadata",
ref: "reference",
};
const CONTAINER_KINDS = new Set(["class", "struct", "impl", "trait"]);
@@ -407,18 +752,36 @@ function buildSymbols(matches: RawMatch[], lines: string[], language: string): {
const nameCapture = match.captures.find(c => c.tag === "name");
if (!kindCapture) continue;
const name = nameCapture?.text || "anonymous";
const startRow = kindCapture.startRow;
const endRow = kindCapture.endRow;
const kind = KIND_MAP[kindCapture.tag];
const name = nameCapture?.text || "anonymous";
const comment = findCommentAbove(lines, startRow);
// Markdown-specific: extract heading level and build signature
let signature: string;
if (language === "markdown" && kind === "section") {
const headingLine = lines[startRow] || "";
const hashMatch = headingLine.match(/^(#{1,6})\s/);
const level = hashMatch ? hashMatch[1].length : 1;
signature = `${"#".repeat(level)} ${name}`;
} else if (language === "markdown" && kind === "code") {
const langTag = name !== "anonymous" ? name : "";
signature = langTag ? "```" + langTag : "```";
} else if (language === "markdown" && kind === "metadata") {
signature = "---frontmatter---";
} else if (language === "markdown" && kind === "reference") {
signature = lines[startRow]?.trim() || name;
} else {
signature = extractSignatureFromLines(lines, startRow, endRow);
}
const comment = language === "markdown" ? undefined : findCommentAbove(lines, startRow);
const docstring = language === "python" ? findPythonDocstringFromLines(lines, startRow, endRow) : undefined;
const sym: CodeSymbol = {
name,
kind,
signature: extractSignatureFromLines(lines, startRow, endRow),
signature,
jsdoc: comment || docstring,
lineStart: startRow,
lineEnd: endRow,
@@ -433,6 +796,34 @@ function buildSymbols(matches: RawMatch[], lines: string[], language: string): {
symbols.push(sym);
}
// Markdown: deduplicate code_block matches. The catch-all `(fenced_code_block) @code_block`
// pattern and the language-specific pattern both match the same block. Keep the named one.
if (language === "markdown") {
const codeBlocksByRange = new Map<string, CodeSymbol>();
const duplicateCodeBlocks = new Set<CodeSymbol>();
for (const sym of symbols) {
if (sym.kind !== "code") continue;
const rangeKey = `${sym.lineStart}:${sym.lineEnd}`;
const existing = codeBlocksByRange.get(rangeKey);
if (existing) {
// Prefer the named version (has actual language tag vs "anonymous")
if (sym.name !== "anonymous") {
duplicateCodeBlocks.add(existing);
codeBlocksByRange.set(rangeKey, sym);
} else {
duplicateCodeBlocks.add(sym);
}
} else {
codeBlocksByRange.set(rangeKey, sym);
}
}
if (duplicateCodeBlocks.size > 0) {
const filtered = symbols.filter(s => !duplicateCodeBlocks.has(s));
symbols.length = 0;
symbols.push(...filtered);
}
}
// Nest methods inside containers
const nested = new Set<CodeSymbol>();
for (const container of containers) {
@@ -451,11 +842,12 @@ function buildSymbols(matches: RawMatch[], lines: string[], language: string): {
// --- Main parse functions ---
export function parseFile(content: string, filePath: string): FoldedFile {
const language = detectLanguage(filePath);
export function parseFile(content: string, filePath: string, projectRoot?: string): FoldedFile {
const userConfig = projectRoot ? loadUserGrammars(projectRoot) : EMPTY_USER_GRAMMAR_CONFIG;
const language = detectLanguageWithUserGrammars(filePath, userConfig);
const lines = content.split("\n");
const grammarPath = resolveGrammarPath(language);
const grammarPath = resolveGrammarPathWithFallback(language, projectRoot);
if (!grammarPath) {
return {
filePath, language, symbols: [], imports: [],
@@ -463,7 +855,7 @@ export function parseFile(content: string, filePath: string): FoldedFile {
};
}
const queryKey = getQueryKey(language);
const queryKey = getUserAwareQueryKey(language, userConfig);
const queryFile = getQueryFile(queryKey);
// Write content to temp file with correct extension for language detection
@@ -498,20 +890,22 @@ export function parseFile(content: string, filePath: string): FoldedFile {
* Much faster than calling parseFile() per file (one process spawn per language vs per file).
*/
export function parseFilesBatch(
files: Array<{ absolutePath: string; relativePath: string; content: string }>
files: Array<{ absolutePath: string; relativePath: string; content: string }>,
projectRoot?: string
): Map<string, FoldedFile> {
const results = new Map<string, FoldedFile>();
const userConfig = projectRoot ? loadUserGrammars(projectRoot) : EMPTY_USER_GRAMMAR_CONFIG;
// Group files by language (and thus by query + grammar)
const languageGroups = new Map<string, typeof files>();
for (const file of files) {
const language = detectLanguage(file.relativePath);
const language = detectLanguageWithUserGrammars(file.relativePath, userConfig);
if (!languageGroups.has(language)) languageGroups.set(language, []);
languageGroups.get(language)!.push(file);
}
for (const [language, groupFiles] of languageGroups) {
const grammarPath = resolveGrammarPath(language);
const grammarPath = resolveGrammarPathWithFallback(language, projectRoot);
if (!grammarPath) {
// No grammar — return empty results for these files
for (const file of groupFiles) {
@@ -524,7 +918,7 @@ export function parseFilesBatch(
continue;
}
const queryKey = getQueryKey(language);
const queryKey = getUserAwareQueryKey(language, userConfig);
const queryFile = getQueryFile(queryKey);
// Run one batch query for all files of this language
@@ -558,6 +952,10 @@ export function parseFilesBatch(
// --- Formatting ---
export function formatFoldedView(file: FoldedFile): string {
if (file.language === "markdown") {
return formatMarkdownFoldedView(file);
}
const parts: string[] = [];
parts.push(`📁 ${file.filePath} (${file.language}, ${file.totalLines} lines)`);
@@ -581,6 +979,64 @@ export function formatFoldedView(file: FoldedFile): string {
return parts.join("\n");
}
function formatMarkdownFoldedView(file: FoldedFile): string {
const parts: string[] = [];
// Total width for the content column (before the line range)
const COL_WIDTH = 56;
parts.push(`📄 ${file.filePath} (${file.language}, ${file.totalLines} lines)`);
for (const sym of file.symbols) {
if (sym.kind === "section") {
// Extract heading level from the signature (count leading # characters)
const hashMatch = sym.signature.match(/^(#{1,6})\s/);
const level = hashMatch ? hashMatch[1].length : 1;
const indent = " ".repeat(level);
const lineRange = `L${sym.lineStart + 1}`;
const content = `${indent}${sym.signature}`;
parts.push(`${content.padEnd(COL_WIDTH)}${lineRange}`);
} else if (sym.kind === "code") {
// Find containing heading level for indentation
const containingLevel = findContainingHeadingLevel(file.symbols, sym.lineStart);
const indent = " ".repeat(containingLevel + 1);
const lineRange = sym.lineStart === sym.lineEnd
? `L${sym.lineStart + 1}`
: `L${sym.lineStart + 1}-${sym.lineEnd + 1}`;
const content = `${indent}${sym.signature}`;
parts.push(`${content.padEnd(COL_WIDTH)}${lineRange}`);
} else if (sym.kind === "metadata") {
const lineRange = sym.lineStart === sym.lineEnd
? `L${sym.lineStart + 1}`
: `L${sym.lineStart + 1}-${sym.lineEnd + 1}`;
const content = ` ${sym.signature}`;
parts.push(`${content.padEnd(COL_WIDTH)}${lineRange}`);
} else if (sym.kind === "reference") {
const containingLevel = findContainingHeadingLevel(file.symbols, sym.lineStart);
const indent = " ".repeat(containingLevel + 1);
const lineRange = `L${sym.lineStart + 1}`;
const content = `${indent}${sym.name}`;
parts.push(`${content.padEnd(COL_WIDTH)}${lineRange}`);
}
}
return parts.join("\n");
}
/**
* Find the heading level of the most recent section heading before the given line.
* Returns 0 if no heading precedes the line.
*/
function findContainingHeadingLevel(symbols: CodeSymbol[], lineStart: number): number {
let bestLevel = 0;
for (const sym of symbols) {
if (sym.kind === "section" && sym.lineStart < lineStart) {
const hashMatch = sym.signature.match(/^(#{1,6})\s/);
bestLevel = hashMatch ? hashMatch[1].length : 1;
}
}
return bestLevel;
}
function formatSymbol(sym: CodeSymbol, indent: string): string {
const parts: string[] = [];
@@ -621,7 +1077,8 @@ function getSymbolIcon(kind: CodeSymbol["kind"]): string {
function: "ƒ", method: "ƒ", class: "◆", interface: "◇",
type: "◇", const: "●", variable: "○", export: "→",
struct: "◆", enum: "▣", trait: "◇", impl: "◈",
property: "○", getter: "⇢", setter: "⇠",
property: "○", getter: "⇢", setter: "⇠", mixin: "◈",
section: "§", code: "⌘", metadata: "◊", reference: "↗",
};
return icons[kind] || "·";
}
@@ -647,6 +1104,31 @@ export function unfoldSymbol(content: string, filePath: string, symbolName: stri
const lines = content.split("\n");
// Markdown section unfold: return from heading to next heading of same or higher level
if (file.language === "markdown" && symbol.kind === "section") {
const hashMatch = symbol.signature.match(/^(#{1,6})\s/);
const level = hashMatch ? hashMatch[1].length : 1;
const start = symbol.lineStart;
// Find the next heading at same or higher (lower number) level
let end = lines.length - 1;
for (const sym of file.symbols) {
if (sym.kind === "section" && sym.lineStart > start) {
const otherHashMatch = sym.signature.match(/^(#{1,6})\s/);
const otherLevel = otherHashMatch ? otherHashMatch[1].length : 1;
if (otherLevel <= level) {
end = sym.lineStart - 1;
// Trim trailing blank lines
while (end > start && lines[end].trim() === "") end--;
break;
}
}
}
const extracted = lines.slice(start, end + 1).join("\n");
return `<!-- 📍 ${filePath} L${start + 1}-${end + 1} -->\n${extracted}`;
}
// Include preceding comments/decorators
let start = symbol.lineStart;
for (let i = symbol.lineStart - 1; i >= 0; i--) {
+33 -8
View File
@@ -12,7 +12,7 @@
import { readFile, readdir, stat } from "node:fs/promises";
import { join, relative } from "node:path";
import { parseFilesBatch, formatFoldedView, type FoldedFile } from "./parser.js";
import { parseFilesBatch, formatFoldedView, loadUserGrammars, type FoldedFile } from "./parser.js";
const CODE_EXTENSIONS = new Set([
".js", ".jsx", ".ts", ".tsx", ".mjs", ".cjs",
@@ -22,11 +22,22 @@ const CODE_EXTENSIONS = new Set([
".rb",
".java",
".cs",
".cpp", ".c", ".h", ".hpp",
".cpp", ".cc", ".cxx", ".c", ".h", ".hpp", ".hh",
".swift",
".kt",
".kt", ".kts",
".php",
".vue", ".svelte",
".ex", ".exs",
".lua",
".scala", ".sc",
".sh", ".bash", ".zsh",
".hs",
".zig",
".css", ".scss",
".toml",
".yml", ".yaml",
".sql",
".md", ".mdx",
]);
const IGNORE_DIRS = new Set([
@@ -59,8 +70,9 @@ export interface SymbolMatch {
/**
* Walk a directory recursively, yielding file paths.
* extraExtensions: additional file extensions to include (from user grammar config).
*/
async function* walkDir(dir: string, rootDir: string, maxDepth: number = 20): AsyncGenerator<string> {
async function* walkDir(dir: string, rootDir: string, maxDepth: number = 20, extraExtensions?: Set<string>): AsyncGenerator<string> {
if (maxDepth <= 0) return;
let entries;
@@ -77,10 +89,10 @@ async function* walkDir(dir: string, rootDir: string, maxDepth: number = 20): As
const fullPath = join(dir, entry.name);
if (entry.isDirectory()) {
yield* walkDir(fullPath, rootDir, maxDepth - 1);
yield* walkDir(fullPath, rootDir, maxDepth - 1, extraExtensions);
} else if (entry.isFile()) {
const ext = entry.name.slice(entry.name.lastIndexOf("."));
if (CODE_EXTENSIONS.has(ext)) {
if (CODE_EXTENSIONS.has(ext) || (extraExtensions && extraExtensions.has(ext))) {
yield fullPath;
}
}
@@ -121,16 +133,29 @@ export async function searchCodebase(
maxResults?: number;
includeImports?: boolean;
filePattern?: string;
projectRoot?: string;
} = {}
): Promise<SearchResult> {
const maxResults = options.maxResults || 20;
const queryLower = query.toLowerCase();
const queryParts = queryLower.split(/[\s_\-./]+/).filter(p => p.length > 0);
// Load user grammar config for extra file extensions
const projectRoot = options.projectRoot || rootDir;
const userConfig = loadUserGrammars(projectRoot);
const extraExtensions = new Set<string>();
for (const entry of Object.values(userConfig.grammars)) {
for (const ext of entry.extensions) {
if (!CODE_EXTENSIONS.has(ext)) {
extraExtensions.add(ext);
}
}
}
// Phase 1: Collect files
const filesToParse: Array<{ absolutePath: string; relativePath: string; content: string }> = [];
for await (const filePath of walkDir(rootDir, rootDir)) {
for await (const filePath of walkDir(rootDir, rootDir, 20, extraExtensions.size > 0 ? extraExtensions : undefined)) {
if (options.filePattern) {
const relPath = relative(rootDir, filePath);
if (!relPath.toLowerCase().includes(options.filePattern.toLowerCase())) continue;
@@ -147,7 +172,7 @@ export async function searchCodebase(
}
// Phase 2: Batch parse (one CLI call per language)
const parsedFiles = parseFilesBatch(filesToParse);
const parsedFiles = parseFilesBatch(filesToParse, projectRoot);
// Phase 3: Match query against symbols
const foldedFiles: FoldedFile[] = [];
+6 -3
View File
@@ -3,6 +3,7 @@ import { TableNameRow } from '../../types/database.js';
import { DATA_DIR, DB_PATH, ensureDir } from '../../shared/paths.js';
import { logger } from '../../utils/logger.js';
import { isDirectChild } from '../../shared/path-utils.js';
import { AppError } from '../server/ErrorHandler.js';
import {
ObservationSearchResult,
SessionSummarySearchResult,
@@ -22,6 +23,8 @@ import {
export class SessionSearch {
private db: Database;
private static readonly MISSING_SEARCH_INPUT_MESSAGE = 'Either query or filters required for search';
constructor(dbPath?: string) {
if (!dbPath) {
ensureDir(DATA_DIR);
@@ -280,7 +283,7 @@ export class SessionSearch {
if (!query) {
const filterClause = this.buildFilterClause(filters, params, 'o');
if (!filterClause) {
throw new Error('Either query or filters required for search');
throw new AppError(SessionSearch.MISSING_SEARCH_INPUT_MESSAGE, 400, 'INVALID_SEARCH_REQUEST');
}
const orderClause = this.buildOrderClause(orderBy, false);
@@ -317,7 +320,7 @@ export class SessionSearch {
delete filterOptions.type;
const filterClause = this.buildFilterClause(filterOptions, params, 's');
if (!filterClause) {
throw new Error('Either query or filters required for search');
throw new AppError(SessionSearch.MISSING_SEARCH_INPUT_MESSAGE, 400, 'INVALID_SEARCH_REQUEST');
}
const orderClause = orderBy === 'date_asc'
@@ -551,7 +554,7 @@ export class SessionSearch {
// FILTER-ONLY PATH: When no query text, query user_prompts table directly
if (!query) {
if (baseConditions.length === 0) {
throw new Error('Either query or filters required for search');
throw new AppError(SessionSearch.MISSING_SEARCH_INPUT_MESSAGE, 400, 'INVALID_SEARCH_REQUEST');
}
const whereClause = `WHERE ${baseConditions.join(' AND ')}`;
+251 -53
View File
@@ -14,6 +14,18 @@ import {
} from '../../types/database.js';
import type { PendingMessageStore } from './PendingMessageStore.js';
import { computeObservationContentHash, findDuplicateObservation } from './observations/store.js';
import { parseFileList } from './observations/files.js';
import { DEFAULT_PLATFORM_SOURCE, normalizePlatformSource, sortPlatformSources } from '../../shared/platform-source.js';
function resolveCreateSessionArgs(
customTitle?: string,
platformSource?: string
): { customTitle?: string; platformSource?: string } {
return {
customTitle,
platformSource: platformSource ? normalizePlatformSource(platformSource) : undefined
};
}
/**
* Session data store for SDK sessions, observations, and summaries
@@ -51,6 +63,8 @@ export class SessionStore {
this.addOnUpdateCascadeToForeignKeys();
this.addObservationContentHashColumn();
this.addSessionCustomTitleColumn();
this.addSessionPlatformSourceColumn();
this.addObservationModelColumns();
}
/**
@@ -78,6 +92,7 @@ export class SessionStore {
content_session_id TEXT UNIQUE NOT NULL,
memory_session_id TEXT UNIQUE,
project TEXT NOT NULL,
platform_source TEXT NOT NULL DEFAULT 'claude',
user_prompt TEXT,
started_at TEXT NOT NULL,
started_at_epoch INTEGER NOT NULL,
@@ -875,6 +890,60 @@ export class SessionStore {
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(23, new Date().toISOString());
}
/**
* Add platform_source column to sdk_sessions for Claude/Codex isolation (migration 24)
*/
private addSessionPlatformSourceColumn(): void {
const tableInfo = this.db.query('PRAGMA table_info(sdk_sessions)').all() as TableColumnInfo[];
const hasColumn = tableInfo.some(col => col.name === 'platform_source');
const indexInfo = this.db.query('PRAGMA index_list(sdk_sessions)').all() as IndexInfo[];
const hasIndex = indexInfo.some(index => index.name === 'idx_sdk_sessions_platform_source');
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(24) as SchemaVersion | undefined;
if (applied && hasColumn && hasIndex) return;
if (!hasColumn) {
this.db.run(`ALTER TABLE sdk_sessions ADD COLUMN platform_source TEXT NOT NULL DEFAULT '${DEFAULT_PLATFORM_SOURCE}'`);
logger.debug('DB', 'Added platform_source column to sdk_sessions table');
}
this.db.run(`
UPDATE sdk_sessions
SET platform_source = '${DEFAULT_PLATFORM_SOURCE}'
WHERE platform_source IS NULL OR platform_source = ''
`);
if (!hasIndex) {
this.db.run('CREATE INDEX IF NOT EXISTS idx_sdk_sessions_platform_source ON sdk_sessions(platform_source)');
}
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(24, new Date().toISOString());
}
/**
* Add generated_by_model and relevance_count columns to observations (migration 26)
*
* Note: Cannot trust schema_versions alone the old MigrationRunner may have
* recorded version 26 without the ALTER TABLE actually succeeding. Always
* check column existence directly.
*/
private addObservationModelColumns(): void {
const columns = this.db.query('PRAGMA table_info(observations)').all() as TableColumnInfo[];
const hasGeneratedByModel = columns.some(col => col.name === 'generated_by_model');
const hasRelevanceCount = columns.some(col => col.name === 'relevance_count');
if (hasGeneratedByModel && hasRelevanceCount) return;
if (!hasGeneratedByModel) {
this.db.run('ALTER TABLE observations ADD COLUMN generated_by_model TEXT');
}
if (!hasRelevanceCount) {
this.db.run('ALTER TABLE observations ADD COLUMN relevance_count INTEGER DEFAULT 0');
}
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(26, new Date().toISOString());
}
/**
* Update the memory session ID for a session
* Called by SDKAgent when it captures the session ID from the first SDK message
@@ -888,6 +957,16 @@ export class SessionStore {
`).run(memorySessionId, sessionDbId);
}
markSessionCompleted(sessionDbId: number): void {
const nowEpoch = Date.now();
const nowIso = new Date(nowEpoch).toISOString();
this.db.prepare(`
UPDATE sdk_sessions
SET status = 'completed', completed_at = ?, completed_at_epoch = ?
WHERE id = ?
`).run(nowIso, nowEpoch, sessionDbId);
}
/**
* Ensures memory_session_id is registered in sdk_sessions before FK-constrained INSERT.
* This fixes Issue #846 where observations fail after worker restart because the
@@ -1002,14 +1081,26 @@ export class SessionStore {
subtitle: string | null;
text: string;
project: string;
platform_source: string;
prompt_number: number | null;
created_at: string;
created_at_epoch: number;
}> {
const stmt = this.db.prepare(`
SELECT id, type, title, subtitle, text, project, prompt_number, created_at, created_at_epoch
FROM observations
ORDER BY created_at_epoch DESC
SELECT
o.id,
o.type,
o.title,
o.subtitle,
o.text,
o.project,
COALESCE(s.platform_source, '${DEFAULT_PLATFORM_SOURCE}') as platform_source,
o.prompt_number,
o.created_at,
o.created_at_epoch
FROM observations o
LEFT JOIN sdk_sessions s ON o.memory_session_id = s.memory_session_id
ORDER BY o.created_at_epoch DESC
LIMIT ?
`);
@@ -1030,16 +1121,30 @@ export class SessionStore {
files_edited: string | null;
notes: string | null;
project: string;
platform_source: string;
prompt_number: number | null;
created_at: string;
created_at_epoch: number;
}> {
const stmt = this.db.prepare(`
SELECT id, request, investigated, learned, completed, next_steps,
files_read, files_edited, notes, project, prompt_number,
created_at, created_at_epoch
FROM session_summaries
ORDER BY created_at_epoch DESC
SELECT
ss.id,
ss.request,
ss.investigated,
ss.learned,
ss.completed,
ss.next_steps,
ss.files_read,
ss.files_edited,
ss.notes,
ss.project,
COALESCE(s.platform_source, '${DEFAULT_PLATFORM_SOURCE}') as platform_source,
ss.prompt_number,
ss.created_at,
ss.created_at_epoch
FROM session_summaries ss
LEFT JOIN sdk_sessions s ON ss.memory_session_id = s.memory_session_id
ORDER BY ss.created_at_epoch DESC
LIMIT ?
`);
@@ -1053,6 +1158,7 @@ export class SessionStore {
id: number;
content_session_id: string;
project: string;
platform_source: string;
prompt_number: number;
prompt_text: string;
created_at: string;
@@ -1063,6 +1169,7 @@ export class SessionStore {
up.id,
up.content_session_id,
s.project,
COALESCE(s.platform_source, '${DEFAULT_PLATFORM_SOURCE}') as platform_source,
up.prompt_number,
up.prompt_text,
up.created_at,
@@ -1079,18 +1186,74 @@ export class SessionStore {
/**
* Get all unique projects from the database (for web UI project filter)
*/
getAllProjects(): string[] {
const stmt = this.db.prepare(`
getAllProjects(platformSource?: string): string[] {
const normalizedPlatformSource = platformSource ? normalizePlatformSource(platformSource) : undefined;
let query = `
SELECT DISTINCT project
FROM sdk_sessions
WHERE project IS NOT NULL AND project != ''
ORDER BY project ASC
`);
`;
const params: unknown[] = [];
const rows = stmt.all() as Array<{ project: string }>;
if (normalizedPlatformSource) {
query += ' AND COALESCE(platform_source, ?) = ?';
params.push(DEFAULT_PLATFORM_SOURCE, normalizedPlatformSource);
}
query += ' ORDER BY project ASC';
const rows = this.db.prepare(query).all(...params) as Array<{ project: string }>;
return rows.map(row => row.project);
}
getProjectCatalog(): {
projects: string[];
sources: string[];
projectsBySource: Record<string, string[]>;
} {
const rows = this.db.prepare(`
SELECT
COALESCE(platform_source, '${DEFAULT_PLATFORM_SOURCE}') as platform_source,
project,
MAX(started_at_epoch) as latest_epoch
FROM sdk_sessions
WHERE project IS NOT NULL AND project != ''
GROUP BY COALESCE(platform_source, '${DEFAULT_PLATFORM_SOURCE}'), project
ORDER BY latest_epoch DESC
`).all() as Array<{ platform_source: string; project: string; latest_epoch: number }>;
const projects: string[] = [];
const seenProjects = new Set<string>();
const projectsBySource: Record<string, string[]> = {};
for (const row of rows) {
const source = normalizePlatformSource(row.platform_source);
if (!projectsBySource[source]) {
projectsBySource[source] = [];
}
if (!projectsBySource[source].includes(row.project)) {
projectsBySource[source].push(row.project);
}
if (!seenProjects.has(row.project)) {
seenProjects.add(row.project);
projects.push(row.project);
}
}
const sources = sortPlatformSources(Object.keys(projectsBySource));
return {
projects,
sources,
projectsBySource: Object.fromEntries(
sources.map(source => [source, projectsBySource[source] || []])
)
};
}
/**
* Get latest user prompt with session info for a Claude session
* Used for syncing prompts to Chroma during session initialization
@@ -1100,6 +1263,7 @@ export class SessionStore {
content_session_id: string;
memory_session_id: string;
project: string;
platform_source: string;
prompt_number: number;
prompt_text: string;
created_at_epoch: number;
@@ -1108,7 +1272,8 @@ export class SessionStore {
SELECT
up.*,
s.memory_session_id,
s.project
s.project,
COALESCE(s.platform_source, '${DEFAULT_PLATFORM_SOURCE}') as platform_source
FROM user_prompts up
JOIN sdk_sessions s ON up.content_session_id = s.content_session_id
WHERE up.content_session_id = ?
@@ -1309,20 +1474,10 @@ export class SessionStore {
for (const row of rows) {
// Parse files_read
if (row.files_read) {
const files = JSON.parse(row.files_read);
if (Array.isArray(files)) {
files.forEach(f => filesReadSet.add(f));
}
}
parseFileList(row.files_read).forEach(f => filesReadSet.add(f));
// Parse files_modified
if (row.files_modified) {
const files = JSON.parse(row.files_modified);
if (Array.isArray(files)) {
files.forEach(f => filesModifiedSet.add(f));
}
}
parseFileList(row.files_modified).forEach(f => filesModifiedSet.add(f));
}
return {
@@ -1339,11 +1494,14 @@ export class SessionStore {
content_session_id: string;
memory_session_id: string | null;
project: string;
platform_source: string;
user_prompt: string;
custom_title: string | null;
} | null {
const stmt = this.db.prepare(`
SELECT id, content_session_id, memory_session_id, project, user_prompt, custom_title
SELECT id, content_session_id, memory_session_id, project,
COALESCE(platform_source, '${DEFAULT_PLATFORM_SOURCE}') as platform_source,
user_prompt, custom_title
FROM sdk_sessions
WHERE id = ?
LIMIT 1
@@ -1361,6 +1519,7 @@ export class SessionStore {
content_session_id: string;
memory_session_id: string;
project: string;
platform_source: string;
user_prompt: string;
custom_title: string | null;
started_at: string;
@@ -1373,7 +1532,9 @@ export class SessionStore {
const placeholders = memorySessionIds.map(() => '?').join(',');
const stmt = this.db.prepare(`
SELECT id, content_session_id, memory_session_id, project, user_prompt, custom_title,
SELECT id, content_session_id, memory_session_id, project,
COALESCE(platform_source, '${DEFAULT_PLATFORM_SOURCE}') as platform_source,
user_prompt, custom_title,
started_at, started_at_epoch, completed_at, completed_at_epoch, status
FROM sdk_sessions
WHERE memory_session_id IN (${placeholders})
@@ -1418,14 +1579,22 @@ export class SessionStore {
* Pure get-or-create: never modifies memory_session_id.
* Multi-terminal isolation is handled by ON UPDATE CASCADE at the schema level.
*/
createSDKSession(contentSessionId: string, project: string, userPrompt: string, customTitle?: string): number {
createSDKSession(
contentSessionId: string,
project: string,
userPrompt: string,
customTitle?: string,
platformSource?: string
): number {
const now = new Date();
const nowEpoch = now.getTime();
const resolved = resolveCreateSessionArgs(customTitle, platformSource);
const normalizedPlatformSource = resolved.platformSource ?? DEFAULT_PLATFORM_SOURCE;
// Session reuse: Return existing session ID if already created for this contentSessionId.
const existing = this.db.prepare(`
SELECT id FROM sdk_sessions WHERE content_session_id = ?
`).get(contentSessionId) as { id: number } | undefined;
SELECT id, platform_source FROM sdk_sessions WHERE content_session_id = ?
`).get(contentSessionId) as { id: number; platform_source: string | null } | undefined;
if (existing) {
// Backfill project if session was created by another hook with empty project
@@ -1436,11 +1605,29 @@ export class SessionStore {
`).run(project, contentSessionId);
}
// Backfill custom_title if provided and not yet set
if (customTitle) {
if (resolved.customTitle) {
this.db.prepare(`
UPDATE sdk_sessions SET custom_title = ?
WHERE content_session_id = ? AND custom_title IS NULL
`).run(customTitle, contentSessionId);
`).run(resolved.customTitle, contentSessionId);
}
if (resolved.platformSource) {
const storedPlatformSource = existing.platform_source?.trim()
? normalizePlatformSource(existing.platform_source)
: undefined;
if (!storedPlatformSource) {
this.db.prepare(`
UPDATE sdk_sessions SET platform_source = ?
WHERE content_session_id = ?
AND COALESCE(platform_source, '') = ''
`).run(resolved.platformSource, contentSessionId);
} else if (storedPlatformSource !== resolved.platformSource) {
throw new Error(
`Platform source conflict for session ${contentSessionId}: existing=${storedPlatformSource}, received=${resolved.platformSource}`
);
}
}
return existing.id;
}
@@ -1451,9 +1638,9 @@ export class SessionStore {
// must NEVER equal contentSessionId - that would inject memory messages into the user's transcript!
this.db.prepare(`
INSERT INTO sdk_sessions
(content_session_id, memory_session_id, project, user_prompt, custom_title, started_at, started_at_epoch, status)
VALUES (?, NULL, ?, ?, ?, ?, ?, 'active')
`).run(contentSessionId, project, userPrompt, customTitle || null, now.toISOString(), nowEpoch);
(content_session_id, memory_session_id, project, platform_source, user_prompt, custom_title, started_at, started_at_epoch, status)
VALUES (?, NULL, ?, ?, ?, ?, ?, ?, 'active')
`).run(contentSessionId, project, normalizedPlatformSource, userPrompt, resolved.customTitle || null, now.toISOString(), nowEpoch);
// Return new ID
const row = this.db.prepare('SELECT id FROM sdk_sessions WHERE content_session_id = ?')
@@ -1517,7 +1704,8 @@ export class SessionStore {
},
promptNumber?: number,
discoveryTokens: number = 0,
overrideTimestampEpoch?: number
overrideTimestampEpoch?: number,
generatedByModel?: string
): { id: number; createdAtEpoch: number } {
// Use override timestamp if provided (for processing backlog messages with original timestamps)
const timestampEpoch = overrideTimestampEpoch ?? Date.now();
@@ -1533,8 +1721,9 @@ export class SessionStore {
const stmt = this.db.prepare(`
INSERT INTO observations
(memory_session_id, project, type, title, subtitle, facts, narrative, concepts,
files_read, files_modified, prompt_number, discovery_tokens, content_hash, created_at, created_at_epoch)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
files_read, files_modified, prompt_number, discovery_tokens, content_hash, created_at, created_at_epoch,
generated_by_model)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`);
const result = stmt.run(
@@ -1552,7 +1741,8 @@ export class SessionStore {
discoveryTokens,
contentHash,
timestampIso,
timestampEpoch
timestampEpoch,
generatedByModel || null
);
return {
@@ -1651,7 +1841,8 @@ export class SessionStore {
} | null,
promptNumber?: number,
discoveryTokens: number = 0,
overrideTimestampEpoch?: number
overrideTimestampEpoch?: number,
generatedByModel?: string
): { observationIds: number[]; summaryId: number | null; createdAtEpoch: number } {
// Use override timestamp if provided
const timestampEpoch = overrideTimestampEpoch ?? Date.now();
@@ -1665,8 +1856,9 @@ export class SessionStore {
const obsStmt = this.db.prepare(`
INSERT INTO observations
(memory_session_id, project, type, title, subtitle, facts, narrative, concepts,
files_read, files_modified, prompt_number, discovery_tokens, content_hash, created_at, created_at_epoch)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
files_read, files_modified, prompt_number, discovery_tokens, content_hash, created_at, created_at_epoch,
generated_by_model)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`);
for (const observation of observations) {
@@ -1693,7 +1885,8 @@ export class SessionStore {
discoveryTokens,
contentHash,
timestampIso,
timestampEpoch
timestampEpoch,
generatedByModel || null
);
observationIds.push(Number(result.lastInsertRowid));
}
@@ -1780,7 +1973,8 @@ export class SessionStore {
_pendingStore: PendingMessageStore,
promptNumber?: number,
discoveryTokens: number = 0,
overrideTimestampEpoch?: number
overrideTimestampEpoch?: number,
generatedByModel?: string
): { observationIds: number[]; summaryId?: number; createdAtEpoch: number } {
// Use override timestamp if provided
const timestampEpoch = overrideTimestampEpoch ?? Date.now();
@@ -1794,8 +1988,9 @@ export class SessionStore {
const obsStmt = this.db.prepare(`
INSERT INTO observations
(memory_session_id, project, type, title, subtitle, facts, narrative, concepts,
files_read, files_modified, prompt_number, discovery_tokens, content_hash, created_at, created_at_epoch)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
files_read, files_modified, prompt_number, discovery_tokens, content_hash, created_at, created_at_epoch,
generated_by_model)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`);
for (const observation of observations) {
@@ -1822,7 +2017,8 @@ export class SessionStore {
discoveryTokens,
contentHash,
timestampIso,
timestampEpoch
timestampEpoch,
generatedByModel || null
);
observationIds.push(Number(result.lastInsertRowid));
}
@@ -2233,9 +2429,9 @@ export class SessionStore {
// Create new manual session
const now = new Date();
this.db.prepare(`
INSERT INTO sdk_sessions (memory_session_id, content_session_id, project, started_at, started_at_epoch, status)
VALUES (?, ?, ?, ?, ?, 'active')
`).run(memorySessionId, contentSessionId, project, now.toISOString(), now.getTime());
INSERT INTO sdk_sessions (memory_session_id, content_session_id, project, platform_source, started_at, started_at_epoch, status)
VALUES (?, ?, ?, ?, ?, ?, 'active')
`).run(memorySessionId, contentSessionId, project, DEFAULT_PLATFORM_SOURCE, now.toISOString(), now.getTime());
logger.info('SESSION', 'Created manual session', { memorySessionId, project });
@@ -2261,6 +2457,7 @@ export class SessionStore {
content_session_id: string;
memory_session_id: string;
project: string;
platform_source?: string;
user_prompt: string;
started_at: string;
started_at_epoch: number;
@@ -2279,15 +2476,16 @@ export class SessionStore {
const stmt = this.db.prepare(`
INSERT INTO sdk_sessions (
content_session_id, memory_session_id, project, user_prompt,
content_session_id, memory_session_id, project, platform_source, user_prompt,
started_at, started_at_epoch, completed_at, completed_at_epoch, status
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`);
const result = stmt.run(
session.content_session_id,
session.memory_session_id,
session.project,
normalizePlatformSource(session.platform_source),
session.user_prompt,
session.started_at,
session.started_at_epoch,
+33 -1
View File
@@ -541,6 +541,37 @@ export const migration008: Migration = {
}
};
/**
* Migration 009: Add missing columns to observations table
*
* The generated_by_model column tracks which model generated each observation
* (required for model selection optimization via Thompson Sampling).
* The relevance_count column tracks how many times an observation was reused
* (incremented by the feedback recording pipeline).
*
* Both columns may already exist in databases created by the compiled binary
* (v10.6.3) but are missing from the migration source. This migration
* conditionally adds them.
*/
export const migration009: Migration = {
version: 26,
up: (db: Database) => {
const columns = db.prepare('PRAGMA table_info(observations)').all() as any[];
const hasGeneratedByModel = columns.some((c: any) => c.name === 'generated_by_model');
const hasRelevanceCount = columns.some((c: any) => c.name === 'relevance_count');
if (!hasGeneratedByModel) {
db.run('ALTER TABLE observations ADD COLUMN generated_by_model TEXT');
}
if (!hasRelevanceCount) {
db.run('ALTER TABLE observations ADD COLUMN relevance_count INTEGER DEFAULT 0');
}
},
down: (_db: Database) => {
// SQLite does not support DROP COLUMN in older versions; no-op
}
};
/**
* All migrations in order
*/
@@ -552,5 +583,6 @@ export const migrations: Migration[] = [
migration005,
migration006,
migration007,
migration008
migration008,
migration009
];
+37 -6
View File
@@ -6,6 +6,7 @@ import {
TableNameRow,
SchemaVersion
} from '../../../types/database.js';
import { DEFAULT_PLATFORM_SOURCE } from '../../../shared/platform-source.js';
/**
* MigrationRunner handles all database schema migrations
@@ -35,6 +36,7 @@ export class MigrationRunner {
this.addObservationContentHashColumn();
this.addSessionCustomTitleColumn();
this.createObservationFeedbackTable();
this.addSessionPlatformSourceColumn();
}
/**
@@ -62,6 +64,7 @@ export class MigrationRunner {
content_session_id TEXT UNIQUE NOT NULL,
memory_session_id TEXT UNIQUE,
project TEXT NOT NULL,
platform_source TEXT NOT NULL DEFAULT 'claude',
user_prompt TEXT,
started_at TEXT NOT NULL,
started_at_epoch INTEGER NOT NULL,
@@ -654,10 +657,9 @@ export class MigrationRunner {
this.db.run('BEGIN TRANSACTION');
try {
// ==========================================
// ===================================
// 1. Recreate observations table
// ==========================================
// ===================================
// Drop FTS triggers first (they reference the observations table)
this.db.run('DROP TRIGGER IF EXISTS observations_ai');
this.db.run('DROP TRIGGER IF EXISTS observations_ad');
@@ -730,10 +732,9 @@ export class MigrationRunner {
`);
}
// ==========================================
// ===================================
// 2. Recreate session_summaries table
// ==========================================
// ===================================
// Clean up leftover temp table from a previously-crashed run
this.db.run('DROP TABLE IF EXISTS session_summaries_new');
@@ -891,4 +892,34 @@ export class MigrationRunner {
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(24, new Date().toISOString());
logger.debug('DB', 'Created observation_feedback table for usage tracking');
}
/**
* Add platform_source column to sdk_sessions for Claude/Codex isolation (migration 25)
*/
private addSessionPlatformSourceColumn(): void {
const tableInfo = this.db.query('PRAGMA table_info(sdk_sessions)').all() as TableColumnInfo[];
const hasColumn = tableInfo.some(col => col.name === 'platform_source');
const indexInfo = this.db.query('PRAGMA index_list(sdk_sessions)').all() as IndexInfo[];
const hasIndex = indexInfo.some(index => index.name === 'idx_sdk_sessions_platform_source');
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(25) as SchemaVersion | undefined;
if (applied && hasColumn && hasIndex) return;
if (!hasColumn) {
this.db.run(`ALTER TABLE sdk_sessions ADD COLUMN platform_source TEXT NOT NULL DEFAULT '${DEFAULT_PLATFORM_SOURCE}'`);
logger.debug('DB', 'Added platform_source column to sdk_sessions table');
}
this.db.run(`
UPDATE sdk_sessions
SET platform_source = '${DEFAULT_PLATFORM_SOURCE}'
WHERE platform_source IS NULL OR platform_source = ''
`);
if (!hasIndex) {
this.db.run('CREATE INDEX IF NOT EXISTS idx_sdk_sessions_platform_source ON sdk_sessions(platform_source)');
}
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(25, new Date().toISOString());
}
}
+17 -12
View File
@@ -7,6 +7,21 @@ import { Database } from 'bun:sqlite';
import { logger } from '../../../utils/logger.js';
import type { SessionFilesResult } from './types.js';
/**
* Safely parse a JSON array string from the DB.
* Handles legacy bare-path strings (e.g. "/foo/bar.ts") by wrapping them
* in an array instead of crashing with a SyntaxError (fix for #1359).
*/
export function parseFileList(value: string | null | undefined): string[] {
if (!value) return [];
try {
const parsed = JSON.parse(value);
return Array.isArray(parsed) ? parsed : [String(parsed)];
} catch {
return [value];
}
}
/**
* Get aggregated files from all observations for a session
*/
@@ -30,20 +45,10 @@ export function getFilesForSession(
for (const row of rows) {
// Parse files_read
if (row.files_read) {
const files = JSON.parse(row.files_read);
if (Array.isArray(files)) {
files.forEach(f => filesReadSet.add(f));
}
}
parseFileList(row.files_read).forEach(f => filesReadSet.add(f));
// Parse files_modified
if (row.files_modified) {
const files = JSON.parse(row.files_modified);
if (Array.isArray(files)) {
files.forEach(f => filesModifiedSet.add(f));
}
}
parseFileList(row.files_modified).forEach(f => filesModifiedSet.add(f));
}
return {
+39
View File
@@ -111,3 +111,42 @@ export function getObservationsForSession(
return stmt.all(memorySessionId) as ObservationSessionRow[];
}
/**
* Get observations associated with a given file path, scoped to specific projects.
* Matches on the full file path (not just basename) to avoid cross-project collisions.
*/
export function getObservationsByFilePath(
db: Database,
filePath: string,
options?: { projects?: string[]; limit?: number }
): ObservationRecord[] {
const rawLimit = options?.limit;
const limit = Number.isInteger(rawLimit) && (rawLimit as number) > 0
? Math.min(rawLimit as number, 100)
: 15;
const params: (string | number)[] = [filePath, filePath];
let projectClause = '';
if (options?.projects?.length) {
const placeholders = options.projects.map(() => '?').join(',');
projectClause = `AND project IN (${placeholders})`;
params.push(...options.projects);
}
params.push(limit);
const stmt = db.prepare(`
SELECT *
FROM observations
WHERE (
(files_read LIKE '[%' AND EXISTS (SELECT 1 FROM json_each(files_read) WHERE value = ?))
OR (files_modified LIKE '[%' AND EXISTS (SELECT 1 FROM json_each(files_modified) WHERE value = ?))
)
${projectClause}
ORDER BY created_at_epoch DESC
LIMIT ?
`);
return stmt.all(...params) as ObservationRecord[];
}
+1 -1
View File
@@ -22,7 +22,7 @@ export function computeObservationContentHash(
narrative: string | null
): string {
return createHash('sha256')
.update((memorySessionId || '') + (title || '') + (narrative || ''))
.update([memorySessionId || '', title || '', narrative || ''].join('\x00'))
.digest('hex')
.slice(0, 16);
}
+40 -8
View File
@@ -5,6 +5,17 @@
import type { Database } from 'bun:sqlite';
import { logger } from '../../../utils/logger.js';
import { DEFAULT_PLATFORM_SOURCE, normalizePlatformSource } from '../../../shared/platform-source.js';
function resolveCreateSessionArgs(
customTitle?: string,
platformSource?: string
): { customTitle?: string; platformSource?: string } {
return {
customTitle,
platformSource: platformSource ? normalizePlatformSource(platformSource) : undefined
};
}
/**
* Create a new SDK session (idempotent - returns existing session ID if already exists)
@@ -22,15 +33,18 @@ export function createSDKSession(
contentSessionId: string,
project: string,
userPrompt: string,
customTitle?: string
customTitle?: string,
platformSource?: string
): number {
const now = new Date();
const nowEpoch = now.getTime();
const resolved = resolveCreateSessionArgs(customTitle, platformSource);
const normalizedPlatformSource = resolved.platformSource ?? DEFAULT_PLATFORM_SOURCE;
// Check for existing session
const existing = db.prepare(`
SELECT id FROM sdk_sessions WHERE content_session_id = ?
`).get(contentSessionId) as { id: number } | undefined;
SELECT id, platform_source FROM sdk_sessions WHERE content_session_id = ?
`).get(contentSessionId) as { id: number; platform_source: string | null } | undefined;
if (existing) {
// Backfill project if session was created by another hook with empty project
@@ -41,11 +55,29 @@ export function createSDKSession(
`).run(project, contentSessionId);
}
// Backfill custom_title if provided and not yet set
if (customTitle) {
if (resolved.customTitle) {
db.prepare(`
UPDATE sdk_sessions SET custom_title = ?
WHERE content_session_id = ? AND custom_title IS NULL
`).run(customTitle, contentSessionId);
`).run(resolved.customTitle, contentSessionId);
}
if (resolved.platformSource) {
const storedPlatformSource = existing.platform_source?.trim()
? normalizePlatformSource(existing.platform_source)
: undefined;
if (!storedPlatformSource) {
db.prepare(`
UPDATE sdk_sessions SET platform_source = ?
WHERE content_session_id = ?
AND COALESCE(platform_source, '') = ''
`).run(resolved.platformSource, contentSessionId);
} else if (storedPlatformSource !== resolved.platformSource) {
throw new Error(
`Platform source conflict for session ${contentSessionId}: existing=${storedPlatformSource}, received=${resolved.platformSource}`
);
}
}
return existing.id;
}
@@ -56,9 +88,9 @@ export function createSDKSession(
// must NEVER equal contentSessionId - that would inject memory messages into the user's transcript!
db.prepare(`
INSERT INTO sdk_sessions
(content_session_id, memory_session_id, project, user_prompt, custom_title, started_at, started_at_epoch, status)
VALUES (?, NULL, ?, ?, ?, ?, ?, 'active')
`).run(contentSessionId, project, userPrompt, customTitle || null, now.toISOString(), nowEpoch);
(content_session_id, memory_session_id, project, platform_source, user_prompt, custom_title, started_at, started_at_epoch, status)
VALUES (?, NULL, ?, ?, ?, ?, ?, ?, 'active')
`).run(contentSessionId, project, normalizedPlatformSource, userPrompt, resolved.customTitle || null, now.toISOString(), nowEpoch);
// Return new ID
const row = db.prepare('SELECT id FROM sdk_sessions WHERE content_session_id = ?')
+6 -2
View File
@@ -17,7 +17,9 @@ import type {
*/
export function getSessionById(db: Database, id: number): SessionBasic | null {
const stmt = db.prepare(`
SELECT id, content_session_id, memory_session_id, project, user_prompt, custom_title
SELECT id, content_session_id, memory_session_id, project,
COALESCE(platform_source, 'claude') as platform_source,
user_prompt, custom_title
FROM sdk_sessions
WHERE id = ?
LIMIT 1
@@ -38,7 +40,9 @@ export function getSdkSessionsBySessionIds(
const placeholders = memorySessionIds.map(() => '?').join(',');
const stmt = db.prepare(`
SELECT id, content_session_id, memory_session_id, project, user_prompt, custom_title,
SELECT id, content_session_id, memory_session_id, project,
COALESCE(platform_source, 'claude') as platform_source,
user_prompt, custom_title,
started_at, started_at_epoch, completed_at, completed_at_epoch, status
FROM sdk_sessions
WHERE memory_session_id IN (${placeholders})
+2
View File
@@ -12,6 +12,7 @@ export interface SessionBasic {
content_session_id: string;
memory_session_id: string | null;
project: string;
platform_source: string;
user_prompt: string;
custom_title: string | null;
}
@@ -24,6 +25,7 @@ export interface SessionFull {
content_session_id: string;
memory_session_id: string;
project: string;
platform_source: string;
user_prompt: string;
custom_title: string | null;
started_at: string;
+3 -2
View File
@@ -16,6 +16,7 @@ import { ChromaMcpManager } from './ChromaMcpManager.js';
import { ParsedObservation, ParsedSummary } from '../../sdk/parser.js';
import { SessionStore } from '../sqlite/SessionStore.js';
import { logger } from '../../utils/logger.js';
import { parseFileList } from '../sqlite/observations/files.js';
interface ChromaDocument {
id: string;
@@ -125,8 +126,8 @@ export class ChromaSync {
// Parse JSON fields
const facts = obs.facts ? JSON.parse(obs.facts) : [];
const concepts = obs.concepts ? JSON.parse(obs.concepts) : [];
const files_read = obs.files_read ? JSON.parse(obs.files_read) : [];
const files_modified = obs.files_modified ? JSON.parse(obs.files_modified) : [];
const files_read = parseFileList(obs.files_read);
const files_modified = parseFileList(obs.files_modified);
const baseMetadata: Record<string, string | number> = {
sqlite_id: obs.id,
+10 -6
View File
@@ -9,9 +9,11 @@ import { writeAgentsMd } from '../../utils/agents-md-utils.js';
import { resolveFieldSpec, resolveFields, matchesRule } from './field-utils.js';
import { expandHomePath } from './config.js';
import type { TranscriptSchema, WatchTarget, SchemaEvent } from './types.js';
import { normalizePlatformSource } from '../../shared/platform-source.js';
interface SessionState {
sessionId: string;
platformSource: string;
cwd?: string;
project?: string;
lastUserMessage?: string;
@@ -51,6 +53,7 @@ export class TranscriptEventProcessor {
if (!session) {
session = {
sessionId,
platformSource: normalizePlatformSource(watch.name),
pendingTools: new Map()
};
this.sessions.set(key, session);
@@ -181,7 +184,7 @@ export class TranscriptEventProcessor {
sessionId: session.sessionId,
cwd,
prompt,
platform: 'transcript'
platform: session.platformSource
});
}
@@ -250,7 +253,7 @@ export class TranscriptEventProcessor {
toolName,
toolInput: this.maybeParseJson(fields.toolInput),
toolResponse: this.maybeParseJson(fields.toolResponse),
platform: 'transcript'
platform: session.platformSource
});
}
@@ -263,7 +266,7 @@ export class TranscriptEventProcessor {
cwd: session.cwd ?? process.cwd(),
filePath,
edits: Array.isArray(fields.edits) ? fields.edits : undefined,
platform: 'transcript'
platform: session.platformSource
});
}
@@ -305,7 +308,7 @@ export class TranscriptEventProcessor {
await sessionCompleteHandler.execute({
sessionId: session.sessionId,
cwd: session.cwd ?? process.cwd(),
platform: 'transcript'
platform: session.platformSource
});
await this.updateContext(session, watch);
session.pendingTools.clear();
@@ -325,7 +328,8 @@ export class TranscriptEventProcessor {
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
contentSessionId: session.sessionId,
last_assistant_message: lastAssistantMessage
last_assistant_message: lastAssistantMessage,
platformSource: session.platformSource
})
});
} catch (error) {
@@ -350,7 +354,7 @@ export class TranscriptEventProcessor {
try {
const response = await workerHttpRequest(
`/api/context/inject?projects=${encodeURIComponent(projectsParam)}`
`/api/context/inject?projects=${encodeURIComponent(projectsParam)}&platformSource=${encodeURIComponent(session.platformSource)}`
);
if (!response.ok) return;
+12 -5
View File
@@ -117,15 +117,15 @@ export class TranscriptWatcher {
const files = this.resolveWatchFiles(resolvedPath);
for (const filePath of files) {
await this.addTailer(filePath, watch, schema);
await this.addTailer(filePath, watch, schema, true);
}
const rescanIntervalMs = watch.rescanIntervalMs ?? 5000;
const timer = setInterval(async () => {
const timer = setInterval(async () => {
const newFiles = this.resolveWatchFiles(resolvedPath);
for (const filePath of newFiles) {
if (!this.tailers.has(filePath)) {
await this.addTailer(filePath, watch, schema);
await this.addTailer(filePath, watch, schema, false);
}
}
}, rescanIntervalMs);
@@ -164,13 +164,20 @@ export class TranscriptWatcher {
return /[*?[\]{}()]/.test(inputPath);
}
private async addTailer(filePath: string, watch: WatchTarget, schema: TranscriptSchema): Promise<void> {
private async addTailer(
filePath: string,
watch: WatchTarget,
schema: TranscriptSchema,
initialDiscovery: boolean
): Promise<void> {
if (this.tailers.has(filePath)) return;
const sessionIdOverride = this.extractSessionIdFromPath(filePath);
let offset = this.state.offsets[filePath] ?? 0;
if (offset === 0 && watch.startAtEnd) {
// `startAtEnd` is useful on worker startup to avoid replaying the full backlog,
// but new transcript files must be read from byte 0 or we lose session_meta/user_message.
if (offset === 0 && watch.startAtEnd && initialDiscovery) {
try {
offset = statSync(filePath).size;
} catch {
+89 -76
View File
@@ -80,7 +80,6 @@ import {
cleanStalePidFile,
isProcessAlive,
spawnDaemon,
isPidFileRecent,
touchPidFile
} from './infrastructure/ProcessManager.js';
import {
@@ -88,8 +87,7 @@ import {
waitForHealth,
waitForReadiness,
waitForPortFree,
httpShutdown,
checkVersionMatch
httpShutdown
} from './infrastructure/HealthMonitor.js';
import { performGracefulShutdown } from './infrastructure/GracefulShutdown.js';
@@ -118,6 +116,8 @@ import { SearchManager } from './worker/SearchManager.js';
import { FormattingService } from './worker/FormattingService.js';
import { TimelineService } from './worker/TimelineService.js';
import { SessionEventBroadcaster } from './worker/events/SessionEventBroadcaster.js';
import { DEFAULT_CONFIG_PATH, DEFAULT_STATE_PATH, expandHomePath, loadTranscriptWatchConfig, writeSampleConfig } from './transcripts/config.js';
import { TranscriptWatcher } from './transcripts/watcher.js';
// HTTP route handlers
import { ViewerRoutes } from './worker/http/routes/ViewerRoutes.js';
@@ -131,10 +131,6 @@ import { MemoryRoutes } from './worker/http/routes/MemoryRoutes.js';
// Process management for zombie cleanup (Issue #737)
import { startOrphanReaper, reapOrphanedProcesses, getProcessBySession, ensureProcessExit } from './worker/ProcessRegistry.js';
// Transcript watcher for external CLI session monitoring
import { TranscriptWatcher } from './transcripts/watcher.js';
import { loadTranscriptWatchConfig, expandHomePath, DEFAULT_CONFIG_PATH as TRANSCRIPT_CONFIG_PATH } from './transcripts/config.js';
/**
* Build JSON status output for hook framework communication.
* This is a pure function extracted for testability.
@@ -186,6 +182,9 @@ export class WorkerService {
// Chroma MCP manager (lazy - connects on first use)
private chromaMcpManager: ChromaMcpManager | null = null;
// Transcript watcher for Codex and other transcript-based clients
private transcriptWatcher: TranscriptWatcher | null = null;
// Initialization tracking
private initializationComplete: Promise<void>;
private resolveInitialization!: () => void;
@@ -196,9 +195,6 @@ export class WorkerService {
// Stale session reaper interval (Issue #1168)
private staleSessionReaperInterval: ReturnType<typeof setInterval> | null = null;
// Transcript watcher for external CLI sessions (e.g. Codex, Gemini)
private transcriptWatcher: TranscriptWatcher | null = null;
// AI interaction tracking for health endpoint
private lastAiInteraction: {
timestamp: number;
@@ -431,21 +427,7 @@ export class WorkerService {
this.resolveInitialization();
logger.info('SYSTEM', 'Core initialization complete (DB + search ready)');
// Auto-start transcript watchers if configured
if (existsSync(TRANSCRIPT_CONFIG_PATH)) {
try {
const transcriptConfig = loadTranscriptWatchConfig(TRANSCRIPT_CONFIG_PATH);
if (transcriptConfig.watches.length > 0) {
const transcriptStatePath = expandHomePath(transcriptConfig.stateFile ?? '~/.claude-mem/transcript-watch-state.json');
this.transcriptWatcher = new TranscriptWatcher(transcriptConfig, transcriptStatePath);
await this.transcriptWatcher.start();
logger.info('SYSTEM', `Transcript watcher started with ${transcriptConfig.watches.length} watch target(s)`);
}
} catch (transcriptError) {
logger.warn('SYSTEM', 'Failed to start transcript watcher (non-fatal)', {}, transcriptError as Error);
// Non-fatal — worker continues without transcript watching
}
}
await this.startTranscriptWatcher(settings);
// Auto-backfill Chroma for all projects if out of sync with SQLite (fire-and-forget)
if (this.chromaMcpManager) {
@@ -456,8 +438,13 @@ export class WorkerService {
});
}
// Connect to MCP server
// Mark MCP as externally ready once the bundled stdio server binary exists.
// Codex/Claude Desktop connect to this binary directly; the loopback client
// below is only a best-effort self-check and should not mark health false.
const mcpServerPath = path.join(__dirname, 'mcp-server.cjs');
this.mcpReady = existsSync(mcpServerPath);
// Best-effort loopback MCP self-check
getSupervisor().assertCanSpawn('mcp server');
const transport = new StdioClientTransport({
command: 'node',
@@ -479,7 +466,7 @@ export class WorkerService {
await Promise.race([mcpConnectionPromise, timeoutPromise]);
} catch (connectionError) {
clearTimeout(timeoutId!);
logger.warn('WORKER', 'MCP server connection failed, cleaning up subprocess', {
logger.warn('WORKER', 'MCP loopback self-check failed, cleaning up subprocess', {
error: connectionError instanceof Error ? connectionError.message : String(connectionError)
});
try {
@@ -487,7 +474,10 @@ export class WorkerService {
} catch {
// Best effort: the supervisor handles later process cleanup for survivors.
}
throw connectionError;
logger.info('WORKER', 'Bundled MCP server remains available for external stdio clients', {
path: mcpServerPath
});
return;
}
clearTimeout(timeoutId!);
@@ -502,8 +492,7 @@ export class WorkerService {
getSupervisor().unregisterProcess('mcp-server');
});
}
this.mcpReady = true;
logger.success('WORKER', 'MCP server connected');
logger.success('WORKER', 'MCP loopback self-check connected');
// Start orphan reaper to clean up zombie processes (Issue #737)
this.stopOrphanReaper = startOrphanReaper(() => {
@@ -545,6 +534,48 @@ export class WorkerService {
}
}
/**
* Start transcript watcher for Codex and other transcript-based clients.
* This is intentionally non-fatal so Claude hooks remain usable even if
* transcript ingestion is misconfigured.
*/
private async startTranscriptWatcher(settings: ReturnType<typeof SettingsDefaultsManager.loadFromFile>): Promise<void> {
const transcriptsEnabled = settings.CLAUDE_MEM_TRANSCRIPTS_ENABLED !== 'false';
if (!transcriptsEnabled) {
logger.info('TRANSCRIPT', 'Transcript watcher disabled via CLAUDE_MEM_TRANSCRIPTS_ENABLED=false');
return;
}
const configPath = settings.CLAUDE_MEM_TRANSCRIPTS_CONFIG_PATH || DEFAULT_CONFIG_PATH;
const resolvedConfigPath = expandHomePath(configPath);
try {
if (!existsSync(resolvedConfigPath)) {
writeSampleConfig(configPath);
logger.info('TRANSCRIPT', 'Created default transcript watch config', {
configPath: resolvedConfigPath
});
}
const transcriptConfig = loadTranscriptWatchConfig(configPath);
const statePath = expandHomePath(transcriptConfig.stateFile ?? DEFAULT_STATE_PATH);
this.transcriptWatcher = new TranscriptWatcher(transcriptConfig, statePath);
await this.transcriptWatcher.start();
logger.info('TRANSCRIPT', 'Transcript watcher started', {
configPath: resolvedConfigPath,
statePath,
watches: transcriptConfig.watches.length
});
} catch (error) {
this.transcriptWatcher?.stop();
this.transcriptWatcher = null;
logger.error('TRANSCRIPT', 'Failed to start transcript watcher (continuing without Codex ingestion)', {
configPath: resolvedConfigPath
}, error as Error);
}
}
/**
* Get the appropriate agent based on provider settings.
* Same logic as SessionRoutes.getActiveAgent() for consistency.
@@ -936,6 +967,12 @@ export class WorkerService {
* Shutdown the worker service
*/
async shutdown(): Promise<void> {
if (this.transcriptWatcher) {
this.transcriptWatcher.stop();
this.transcriptWatcher = null;
logger.info('TRANSCRIPT', 'Transcript watcher stopped');
}
// Stop orphan reaper before shutdown (Issue #737)
if (this.stopOrphanReaper) {
this.stopOrphanReaper();
@@ -948,13 +985,6 @@ export class WorkerService {
this.staleSessionReaperInterval = null;
}
// Stop transcript watcher
if (this.transcriptWatcher) {
this.transcriptWatcher.stop();
this.transcriptWatcher = null;
logger.info('SYSTEM', 'Transcript watcher stopped');
}
await performGracefulShutdown({
server: this.server.getHttpServer(),
sessionManager: this.sessionManager,
@@ -997,7 +1027,7 @@ export class WorkerService {
* @param port - The TCP port (used for port-in-use checks and daemon spawn)
* @returns true if worker is healthy (existing or newly started), false on failure
*/
async function ensureWorkerStarted(port: number): Promise<boolean> {
export async function ensureWorkerStarted(port: number): Promise<boolean> {
// Clean stale PID file first (cheap: 1 fs read + 1 signal-0 check)
const pidFileStatus = cleanStalePidFile();
if (pidFileStatus === 'alive') {
@@ -1011,43 +1041,25 @@ async function ensureWorkerStarted(port: number): Promise<boolean> {
return false;
}
// Check if worker is already running and healthy
// Check if worker is already running and healthy.
// NOTE: Version mismatch auto-restart intentionally removed (#1435).
// The marketplace bundle ships with __DEFAULT_PACKAGE_VERSION__ unbaked, causing
// BUILT_IN_VERSION to fall back to "development". This creates a 100% reproducible
// mismatch on every hook call, killing a healthy worker and often failing to restart
// (cold start exceeds POST_SPAWN_WAIT). A working-but-old worker is strictly better
// than a dead worker. Users must manually restart after genuine plugin updates.
// See also: #566, #665, #667, #669, #689, #1124, #1145 (same pattern across 8+ releases).
if (await waitForHealth(port, 1000)) {
const versionCheck = await checkVersionMatch(port);
if (!versionCheck.matches) {
// Guard: If PID file was written recently, another session is likely already
// restarting the worker. Poll health instead of starting a concurrent restart.
// This prevents the "100 sessions all restart simultaneously" storm (#1145).
const RESTART_COORDINATION_THRESHOLD_MS = 15000;
if (isPidFileRecent(RESTART_COORDINATION_THRESHOLD_MS)) {
logger.info('SYSTEM', 'Version mismatch detected but PID file is recent — another restart likely in progress, polling health', {
pluginVersion: versionCheck.pluginVersion,
workerVersion: versionCheck.workerVersion
});
const healthy = await waitForHealth(port, RESTART_COORDINATION_THRESHOLD_MS);
if (healthy) {
logger.info('SYSTEM', 'Worker became healthy after waiting for concurrent restart');
return true;
}
logger.warn('SYSTEM', 'Worker did not become healthy after waiting — proceeding with own restart');
}
logger.info('SYSTEM', 'Worker version mismatch detected - auto-restarting', {
pluginVersion: versionCheck.pluginVersion,
workerVersion: versionCheck.workerVersion
});
await httpShutdown(port);
const freed = await waitForPortFree(port, getPlatformTimeout(HOOK_TIMEOUTS.PORT_IN_USE_WAIT));
if (!freed) {
logger.error('SYSTEM', 'Port did not free up after shutdown for version mismatch restart', { port });
return false;
}
removePidFile();
} else {
logger.info('SYSTEM', 'Worker already running and healthy');
return true;
// Health passed — worker is listening. Also wait for readiness in case
// another hook just spawned it and background init is still running.
// This mirrors the fresh-spawn path (line ~1025) so concurrent hooks
// don't race past a cold-starting worker's initialization guard.
const ready = await waitForReadiness(port, getPlatformTimeout(HOOK_TIMEOUTS.READINESS_WAIT));
if (!ready) {
logger.warn('SYSTEM', 'Worker is alive but readiness timed out — proceeding anyway');
}
logger.info('SYSTEM', 'Worker already running and healthy');
return true;
}
// Check if port is in use by something else
@@ -1096,8 +1108,7 @@ async function ensureWorkerStarted(port: number): Promise<boolean> {
}
clearWorkerSpawnAttempted();
// Touch PID file to signal other sessions that a restart just completed.
// Other sessions checking isPidFileRecent() will see this and skip their own restart.
// Touch PID file to signal other sessions that a spawn just completed.
touchPidFile();
logger.info('SYSTEM', 'Worker started successfully');
return true;
@@ -1307,8 +1318,10 @@ async function main() {
}
// Check if running as main module in both ESM and CommonJS
// The CLAUDE_MEM_MANAGED check handles Bun on Windows where require.main !== module
// in CJS mode despite being the entry point (see #1450)
const isMainModule = typeof require !== 'undefined' && typeof module !== 'undefined'
? require.main === module || !module.parent
? require.main === module || !module.parent || process.env.CLAUDE_MEM_MANAGED === 'true'
: import.meta.url === `file://${process.argv[1]}`
|| process.argv[1]?.endsWith('worker-service')
|| process.argv[1]?.endsWith('worker-service.cjs')
+6
View File
@@ -22,6 +22,7 @@ export interface ActiveSession {
contentSessionId: string; // User's Claude Code session being observed
memorySessionId: string | null; // Memory agent's session ID for resume
project: string;
platformSource: string;
userPrompt: string;
pendingMessages: PendingMessage[]; // Deprecated: now using persistent store, kept for compatibility
abortController: AbortController;
@@ -99,6 +100,7 @@ export interface PaginationParams {
offset: number;
limit: number;
project?: string;
platformSource?: string;
}
// ============================================================================
@@ -119,6 +121,7 @@ export interface Observation {
id: number;
memory_session_id: string; // Renamed from sdk_session_id
project: string;
platform_source: string;
type: string;
title: string;
subtitle: string | null;
@@ -137,6 +140,7 @@ export interface Summary {
id: number;
session_id: string; // content_session_id (from JOIN)
project: string;
platform_source: string;
request: string | null;
investigated: string | null;
learned: string | null;
@@ -151,6 +155,7 @@ export interface UserPrompt {
id: number;
content_session_id: string; // Renamed from claude_session_id
project: string; // From JOIN with sdk_sessions
platform_source: string;
prompt_number: number;
prompt_text: string;
created_at: string;
@@ -161,6 +166,7 @@ export interface DBSession {
id: number;
content_session_id: string; // Renamed from claude_session_id
project: string;
platform_source: string;
user_prompt: string;
memory_session_id: string | null; // Renamed from sdk_session_id
status: 'active' | 'completed' | 'failed';
+68 -8
View File
@@ -18,6 +18,8 @@ import { logger } from '../../utils/logger.js';
import { buildInitPrompt, buildObservationPrompt, buildSummaryPrompt, buildContinuationPrompt } from '../../sdk/prompts.js';
import { SettingsDefaultsManager } from '../../shared/SettingsDefaultsManager.js';
import { getCredential } from '../../shared/EnvManager.js';
import { USER_SETTINGS_PATH } from '../../shared/paths.js';
import { estimateTokens } from '../../shared/timeline-formatting.js';
import type { ActiveSession, ConversationMessage } from '../worker-types.js';
import { ModeManager } from '../domain/ModeManager.js';
import {
@@ -56,6 +58,10 @@ const GEMINI_RPM_LIMITS: Record<GeminiModel, number> = {
// Track last request time for rate limiting
let lastRequestTime = 0;
// Context window limits (prevents O(N²) token cost growth)
const DEFAULT_MAX_CONTEXT_MESSAGES = 20; // Maximum messages to keep in conversation history
const DEFAULT_MAX_ESTIMATED_TOKENS = 100000; // ~100k tokens max context (safety limit)
/**
* Enforce RPM rate limit for Gemini free tier.
* Waits the required time between requests based on model's RPM limit + 100ms safety buffer.
@@ -175,7 +181,9 @@ export class GeminiAgent {
worker,
tokensUsed,
null,
'Gemini'
'Gemini',
undefined,
model
);
} else {
logger.error('SDK', 'Empty Gemini init response - session may lack context', {
@@ -248,7 +256,8 @@ export class GeminiAgent {
tokensUsed,
originalTimestamp,
'Gemini',
lastCwd
lastCwd,
model
);
} else {
logger.warn('SDK', 'Empty Gemini observation response, skipping processing to preserve message', {
@@ -298,7 +307,8 @@ export class GeminiAgent {
tokensUsed,
originalTimestamp,
'Gemini',
lastCwd
lastCwd,
model
);
} else {
logger.warn('SDK', 'Empty Gemini summary response, skipping processing to preserve message', {
@@ -342,6 +352,54 @@ export class GeminiAgent {
}
}
/**
* Truncate conversation history to prevent runaway context costs.
* Keeps most recent messages within both message count and token budget.
* Returns a new array never mutates the original history.
*/
private truncateHistory(history: ConversationMessage[]): ConversationMessage[] {
const settings = SettingsDefaultsManager.loadFromFile(USER_SETTINGS_PATH);
const MAX_CONTEXT_MESSAGES = parseInt(settings.CLAUDE_MEM_GEMINI_MAX_CONTEXT_MESSAGES) || DEFAULT_MAX_CONTEXT_MESSAGES;
const MAX_ESTIMATED_TOKENS = parseInt(settings.CLAUDE_MEM_GEMINI_MAX_TOKENS) || DEFAULT_MAX_ESTIMATED_TOKENS;
if (history.length <= MAX_CONTEXT_MESSAGES) {
// Check token count even if message count is ok
const totalTokens = history.reduce((sum, m) => sum + estimateTokens(m.content), 0);
if (totalTokens <= MAX_ESTIMATED_TOKENS) {
return history;
}
}
// Sliding window: keep most recent messages within limits
const truncated: ConversationMessage[] = [];
let tokenCount = 0;
// Process messages in reverse (most recent first)
for (let i = history.length - 1; i >= 0; i--) {
const msg = history[i];
const msgTokens = estimateTokens(msg.content);
// Always include at least the newest message — an empty contents array
// would cause a hard Gemini API error, which is worse than an oversized request.
if (truncated.length > 0 && (truncated.length >= MAX_CONTEXT_MESSAGES || tokenCount + msgTokens > MAX_ESTIMATED_TOKENS)) {
logger.warn('SDK', 'Context window truncated to prevent runaway costs', {
originalMessages: history.length,
keptMessages: truncated.length,
droppedMessages: i + 1,
estimatedTokens: tokenCount,
tokenLimit: MAX_ESTIMATED_TOKENS
});
break;
}
truncated.unshift(msg); // Add to beginning
tokenCount += msgTokens;
}
return truncated;
}
/**
* Convert shared ConversationMessage array to Gemini's contents format
* Maps 'assistant' role to 'model' for Gemini API compatibility
@@ -354,8 +412,8 @@ export class GeminiAgent {
}
/**
* Query Gemini via REST API with full conversation history (multi-turn)
* Sends the entire conversation context for coherent responses
* Query Gemini via REST API with truncated conversation history (multi-turn)
* Truncates history to prevent O(N²) token cost growth, then sends for coherent responses
*/
private async queryGeminiMultiTurn(
history: ConversationMessage[],
@@ -363,11 +421,13 @@ export class GeminiAgent {
model: GeminiModel,
rateLimitingEnabled: boolean
): Promise<{ content: string; tokensUsed?: number }> {
const contents = this.conversationToGeminiContents(history);
const totalChars = history.reduce((sum, m) => sum + m.content.length, 0);
const truncatedHistory = this.truncateHistory(history);
const contents = this.conversationToGeminiContents(truncatedHistory);
const totalChars = truncatedHistory.reduce((sum, m) => sum + m.content.length, 0);
logger.debug('SDK', `Querying Gemini multi-turn (${model})`, {
turns: history.length,
turns: truncatedHistory.length,
totalTurns: history.length,
totalChars
});
+6 -3
View File
@@ -131,7 +131,8 @@ export class OpenRouterAgent {
tokensUsed,
null,
'OpenRouter',
undefined // No lastCwd yet - before message processing
undefined, // No lastCwd yet - before message processing
model
);
} else {
logger.error('SDK', 'Empty OpenRouter init response - session may lack context', {
@@ -202,7 +203,8 @@ export class OpenRouterAgent {
tokensUsed,
originalTimestamp,
'OpenRouter',
lastCwd
lastCwd,
model
);
} else if (message.type === 'summarize') {
@@ -244,7 +246,8 @@ export class OpenRouterAgent {
tokensUsed,
originalTimestamp,
'OpenRouter',
lastCwd
lastCwd,
model
);
}
}
+83 -12
View File
@@ -71,14 +71,54 @@ export class PaginationHelper {
/**
* Get paginated observations
*/
getObservations(offset: number, limit: number, project?: string): PaginatedResult<Observation> {
const result = this.paginate<Observation>(
'observations',
'id, memory_session_id, project, type, title, subtitle, narrative, text, facts, concepts, files_read, files_modified, prompt_number, created_at, created_at_epoch',
getObservations(offset: number, limit: number, project?: string, platformSource?: string): PaginatedResult<Observation> {
const db = this.dbManager.getSessionStore().db;
let query = `
SELECT
o.id,
o.memory_session_id,
o.project,
COALESCE(s.platform_source, 'claude') as platform_source,
o.type,
o.title,
o.subtitle,
o.narrative,
o.text,
o.facts,
o.concepts,
o.files_read,
o.files_modified,
o.prompt_number,
o.created_at,
o.created_at_epoch
FROM observations o
LEFT JOIN sdk_sessions s ON o.memory_session_id = s.memory_session_id
`;
const params: unknown[] = [];
const conditions: string[] = [];
if (project) {
conditions.push('o.project = ?');
params.push(project);
}
if (platformSource) {
conditions.push(`COALESCE(s.platform_source, 'claude') = ?`);
params.push(platformSource);
}
if (conditions.length > 0) {
query += ` WHERE ${conditions.join(' AND ')}`;
}
query += ' ORDER BY o.created_at_epoch DESC LIMIT ? OFFSET ?';
params.push(limit + 1, offset);
const results = db.prepare(query).all(...params) as Observation[];
const result: PaginatedResult<Observation> = {
items: results.slice(0, limit),
hasMore: results.length > limit,
offset,
limit,
project
);
limit
};
// Strip project paths from file paths before returning
return {
@@ -90,13 +130,14 @@ export class PaginationHelper {
/**
* Get paginated summaries
*/
getSummaries(offset: number, limit: number, project?: string): PaginatedResult<Summary> {
getSummaries(offset: number, limit: number, project?: string, platformSource?: string): PaginatedResult<Summary> {
const db = this.dbManager.getSessionStore().db;
let query = `
SELECT
ss.id,
s.content_session_id as session_id,
COALESCE(s.platform_source, 'claude') as platform_source,
ss.request,
ss.investigated,
ss.learned,
@@ -110,11 +151,22 @@ export class PaginationHelper {
`;
const params: any[] = [];
const conditions: string[] = [];
if (project) {
query += ' WHERE ss.project = ?';
conditions.push('ss.project = ?');
params.push(project);
}
if (platformSource) {
conditions.push(`COALESCE(s.platform_source, 'claude') = ?`);
params.push(platformSource);
}
if (conditions.length > 0) {
query += ` WHERE ${conditions.join(' AND ')}`;
}
query += ' ORDER BY ss.created_at_epoch DESC LIMIT ? OFFSET ?';
params.push(limit + 1, offset);
@@ -132,21 +184,40 @@ export class PaginationHelper {
/**
* Get paginated user prompts
*/
getPrompts(offset: number, limit: number, project?: string): PaginatedResult<UserPrompt> {
getPrompts(offset: number, limit: number, project?: string, platformSource?: string): PaginatedResult<UserPrompt> {
const db = this.dbManager.getSessionStore().db;
let query = `
SELECT up.id, up.content_session_id, s.project, up.prompt_number, up.prompt_text, up.created_at, up.created_at_epoch
SELECT
up.id,
up.content_session_id,
s.project,
COALESCE(s.platform_source, 'claude') as platform_source,
up.prompt_number,
up.prompt_text,
up.created_at,
up.created_at_epoch
FROM user_prompts up
JOIN sdk_sessions s ON up.content_session_id = s.content_session_id
`;
const params: any[] = [];
const conditions: string[] = [];
if (project) {
query += ' WHERE s.project = ?';
conditions.push('s.project = ?');
params.push(project);
}
if (platformSource) {
conditions.push(`COALESCE(s.platform_source, 'claude') = ?`);
params.push(platformSource);
}
if (conditions.length > 0) {
query += ` WHERE ${conditions.join(' AND ')}`;
}
query += ' ORDER BY up.created_at_epoch DESC LIMIT ? OFFSET ?';
params.push(limit + 1, offset);
+2 -1
View File
@@ -270,7 +270,8 @@ export class SDKAgent {
discoveryTokens,
originalTimestamp,
'SDK',
cwdTracker.lastCwd
cwdTracker.lastCwd,
modelId
);
}
+4
View File
@@ -77,6 +77,9 @@ export class SessionManager {
});
session.project = dbSession.project;
}
if (dbSession.platform_source && dbSession.platform_source !== session.platformSource) {
session.platformSource = dbSession.platform_source;
}
// Update userPrompt for continuation prompts
if (currentUserPrompt) {
@@ -144,6 +147,7 @@ export class SessionManager {
contentSessionId: dbSession.content_session_id,
memorySessionId: null, // Always start fresh - SDK will capture new ID
project: dbSession.project,
platformSource: dbSession.platform_source,
userPrompt,
pendingMessages: [],
abortController: new AbortController(),
@@ -54,7 +54,8 @@ export async function processAgentResponse(
discoveryTokens: number,
originalTimestamp: number | null,
agentName: string,
projectRoot?: string
projectRoot?: string,
modelId?: string
): Promise<void> {
// Track generator activity for stale detection (Issue #1099)
session.lastGeneratorActivity = Date.now();
@@ -115,7 +116,8 @@ export async function processAgentResponse(
summaryForStore,
session.lastPromptNumber,
discoveryTokens,
originalTimestamp ?? undefined
originalTimestamp ?? undefined,
modelId
);
// Log storage result with IDs for end-to-end traceability
@@ -236,6 +238,7 @@ async function syncAndBroadcastObservations(
id: obsId,
memory_session_id: session.memorySessionId,
session_id: session.contentSessionId,
platform_source: session.platformSource,
type: obs.type,
title: obs.title,
subtitle: obs.subtitle,
@@ -325,6 +328,7 @@ async function syncAndBroadcastSummary(
broadcastSummary(worker, {
id: result.summaryId,
session_id: session.contentSessionId,
platform_source: session.platformSource,
request: summary!.request,
investigated: summary!.investigated,
learned: summary!.learned,
+2
View File
@@ -33,6 +33,7 @@ export interface ObservationSSEPayload {
id: number;
memory_session_id: string | null;
session_id: string;
platform_source: string;
type: string;
title: string | null;
subtitle: string | null;
@@ -50,6 +51,7 @@ export interface ObservationSSEPayload {
export interface SummarySSEPayload {
id: number;
session_id: string;
platform_source: string;
request: string | null;
investigated: string | null;
learned: string | null;
@@ -23,6 +23,7 @@ export class SessionEventBroadcaster {
id: number;
content_session_id: string;
project: string;
platform_source: string;
prompt_number: number;
prompt_text: string;
created_at_epoch: number;
+15 -1
View File
@@ -11,6 +11,7 @@
import { Request, Response } from 'express';
import { logger } from '../../../utils/logger.js';
import { AppError } from '../../server/ErrorHandler.js';
export abstract class BaseRouteHandler {
/**
@@ -78,9 +79,22 @@ export abstract class BaseRouteHandler {
* Checks headersSent to avoid "Cannot set headers after they are sent" errors
*/
protected handleError(res: Response, error: Error, context?: string): void {
// [APPROVED OVERRIDE]: Worker routes need centralized AppError translation so
// status/code/details stay consistent across every HTTP handler.
logger.failure('WORKER', context || 'Request failed', {}, error);
if (!res.headersSent) {
res.status(500).json({ error: error.message });
const statusCode = error instanceof AppError ? error.statusCode : 500;
const response: Record<string, unknown> = { error: error.message };
if (error instanceof AppError && error.code) {
response.code = error.code;
}
if (error instanceof AppError && error.details !== undefined) {
response.details = error.details;
}
res.status(statusCode).json(response);
}
}
}
+49 -19
View File
@@ -18,6 +18,8 @@ import { SessionManager } from '../../SessionManager.js';
import { SSEBroadcaster } from '../../SSEBroadcaster.js';
import type { WorkerService } from '../../../worker-service.js';
import { BaseRouteHandler } from '../BaseRouteHandler.js';
import { normalizePlatformSource } from '../../../../shared/platform-source.js';
import { getObservationsByFilePath } from '../../../sqlite/observations/get.js';
export class DataRoutes extends BaseRouteHandler {
constructor(
@@ -39,6 +41,7 @@ export class DataRoutes extends BaseRouteHandler {
// Fetch by ID endpoints
app.get('/api/observation/:id', this.handleGetObservationById.bind(this));
app.get('/api/observations/by-file', this.handleGetObservationsByFile.bind(this));
app.post('/api/observations/batch', this.handleGetObservationsByIds.bind(this));
app.get('/api/session/:id', this.handleGetSessionById.bind(this));
app.post('/api/sdk-sessions/batch', this.handleGetSdkSessionsByIds.bind(this));
@@ -66,8 +69,8 @@ export class DataRoutes extends BaseRouteHandler {
* Get paginated observations
*/
private handleGetObservations = this.wrapHandler((req: Request, res: Response): void => {
const { offset, limit, project } = this.parsePaginationParams(req);
const result = this.paginationHelper.getObservations(offset, limit, project);
const { offset, limit, project, platformSource } = this.parsePaginationParams(req);
const result = this.paginationHelper.getObservations(offset, limit, project, platformSource);
res.json(result);
});
@@ -75,8 +78,8 @@ export class DataRoutes extends BaseRouteHandler {
* Get paginated summaries
*/
private handleGetSummaries = this.wrapHandler((req: Request, res: Response): void => {
const { offset, limit, project } = this.parsePaginationParams(req);
const result = this.paginationHelper.getSummaries(offset, limit, project);
const { offset, limit, project, platformSource } = this.parsePaginationParams(req);
const result = this.paginationHelper.getSummaries(offset, limit, project, platformSource);
res.json(result);
});
@@ -84,8 +87,8 @@ export class DataRoutes extends BaseRouteHandler {
* Get paginated user prompts
*/
private handleGetPrompts = this.wrapHandler((req: Request, res: Response): void => {
const { offset, limit, project } = this.parsePaginationParams(req);
const result = this.paginationHelper.getPrompts(offset, limit, project);
const { offset, limit, project, platformSource } = this.parsePaginationParams(req);
const result = this.paginationHelper.getPrompts(offset, limit, project, platformSource);
res.json(result);
});
@@ -108,6 +111,28 @@ export class DataRoutes extends BaseRouteHandler {
res.json(observation);
});
/**
* Get observations associated with a file path, scoped to projects
* GET /api/observations/by-file?path=<file_path>&projects=<comma,separated>&limit=15
*/
private handleGetObservationsByFile = this.wrapHandler((req: Request, res: Response): void => {
const filePath = req.query.path as string | undefined;
if (!filePath) {
this.badRequest(res, 'path query parameter is required');
return;
}
const projectsParam = req.query.projects as string | undefined;
const projects = projectsParam ? projectsParam.split(',').filter(Boolean) : undefined;
const parsedLimit = req.query.limit ? parseInt(req.query.limit as string, 10) : undefined;
const limit = Number.isFinite(parsedLimit) && parsedLimit! > 0 ? parsedLimit : undefined;
const db = this.dbManager.getSessionStore().db;
const observations = getObservationsByFilePath(db, filePath, { projects, limit });
res.json({ observations, count: observations.length });
});
/**
* Get observations by array of IDs
* POST /api/observations/batch
@@ -256,19 +281,21 @@ export class DataRoutes extends BaseRouteHandler {
* GET /api/projects
*/
private handleGetProjects = this.wrapHandler((req: Request, res: Response): void => {
const db = this.dbManager.getSessionStore().db;
const store = this.dbManager.getSessionStore();
const rawPlatformSource = req.query.platformSource as string | undefined;
const platformSource = rawPlatformSource ? normalizePlatformSource(rawPlatformSource) : undefined;
const rows = db.prepare(`
SELECT DISTINCT project
FROM observations
WHERE project IS NOT NULL
GROUP BY project
ORDER BY MAX(created_at_epoch) DESC
`).all() as Array<{ project: string }>;
if (platformSource) {
const projects = store.getAllProjects(platformSource);
res.json({
projects,
sources: [platformSource],
projectsBySource: { [platformSource]: projects }
});
return;
}
const projects = rows.map(row => row.project);
res.json({ projects });
res.json(store.getProjectCatalog());
});
/**
@@ -299,12 +326,14 @@ export class DataRoutes extends BaseRouteHandler {
/**
* Parse pagination parameters from request query
*/
private parsePaginationParams(req: Request): { offset: number; limit: number; project?: string } {
private parsePaginationParams(req: Request): { offset: number; limit: number; project?: string; platformSource?: string } {
const offset = parseInt(req.query.offset as string, 10) || 0;
const limit = Math.min(parseInt(req.query.limit as string, 10) || 20, 100); // Max 100
const project = req.query.project as string | undefined;
const rawPlatformSource = req.query.platformSource as string | undefined;
const platformSource = rawPlatformSource ? normalizePlatformSource(rawPlatformSource) : undefined;
return { offset, limit, project };
return { offset, limit, project, platformSource };
}
/**
@@ -473,4 +502,5 @@ export class DataRoutes extends BaseRouteHandler {
clearedCount
});
});
}
@@ -168,6 +168,7 @@ export class SearchRoutes extends BaseRouteHandler {
*/
private handleContextPreview = this.wrapHandler(async (req: Request, res: Response): Promise<void> => {
const projectName = req.query.project as string;
const platformSource = req.query.platformSource as string | undefined;
if (!projectName) {
this.badRequest(res, 'Project parameter is required');
@@ -184,9 +185,11 @@ export class SearchRoutes extends BaseRouteHandler {
const contextText = await generateContext(
{
session_id: 'preview-' + Date.now(),
cwd: cwd
cwd: cwd,
projects: [projectName],
platform_source: platformSource
},
true // useColors=true for ANSI terminal output
true // forHuman=true for ANSI terminal output
);
// Return as plain text
@@ -208,8 +211,9 @@ export class SearchRoutes extends BaseRouteHandler {
private handleContextInject = this.wrapHandler(async (req: Request, res: Response): Promise<void> => {
// Support both legacy `project` and new `projects` parameter
const projectsParam = (req.query.projects as string) || (req.query.project as string);
const useColors = req.query.colors === 'true';
const forHuman = req.query.colors === 'true';
const full = req.query.full === 'true';
const platformSource = req.query.platformSource as string | undefined;
if (!projectsParam) {
this.badRequest(res, 'Project(s) parameter is required');
@@ -237,9 +241,10 @@ export class SearchRoutes extends BaseRouteHandler {
session_id: 'context-inject-' + Date.now(),
cwd: cwd,
projects: projects,
full
full,
platform_source: platformSource
},
useColors
forHuman
);
// Return as plain text
@@ -22,6 +22,8 @@ import { PrivacyCheckValidator } from '../../validation/PrivacyCheckValidator.js
import { SettingsDefaultsManager } from '../../../../shared/SettingsDefaultsManager.js';
import { USER_SETTINGS_PATH } from '../../../../shared/paths.js';
import { getProcessBySession, ensureProcessExit } from '../../ProcessRegistry.js';
import { getProjectName } from '../../../../utils/project-name.js';
import { normalizePlatformSource } from '../../../../shared/platform-source.js';
export class SessionRoutes extends BaseRouteHandler {
private completionHandler: SessionCompletionHandler;
@@ -40,7 +42,8 @@ export class SessionRoutes extends BaseRouteHandler {
super();
this.completionHandler = new SessionCompletionHandler(
sessionManager,
eventBroadcaster
eventBroadcaster,
dbManager
);
}
@@ -353,6 +356,7 @@ export class SessionRoutes extends BaseRouteHandler {
id: latestPrompt.id,
content_session_id: latestPrompt.content_session_id,
project: latestPrompt.project,
platform_source: latestPrompt.platform_source,
prompt_number: latestPrompt.prompt_number,
prompt_text: latestPrompt.prompt_text,
created_at_epoch: latestPrompt.created_at_epoch
@@ -502,6 +506,8 @@ export class SessionRoutes extends BaseRouteHandler {
*/
private handleObservationsByClaudeId = this.wrapHandler((req: Request, res: Response): void => {
const { contentSessionId, tool_name, tool_input, tool_response, cwd } = req.body;
const platformSource = normalizePlatformSource(req.body.platformSource);
const project = typeof cwd === 'string' && cwd.trim() ? getProjectName(cwd) : '';
if (!contentSessionId) {
return this.badRequest(res, 'Missing contentSessionId');
@@ -536,7 +542,7 @@ export class SessionRoutes extends BaseRouteHandler {
const store = this.dbManager.getSessionStore();
// Get or create session
const sessionDbId = store.createSDKSession(contentSessionId, '', '');
const sessionDbId = store.createSDKSession(contentSessionId, project, '', undefined, platformSource);
const promptNumber = store.getPromptNumberFromUserPrompts(contentSessionId);
// Privacy check: skip if user prompt was entirely private
@@ -600,6 +606,7 @@ export class SessionRoutes extends BaseRouteHandler {
*/
private handleSummarizeByClaudeId = this.wrapHandler((req: Request, res: Response): void => {
const { contentSessionId, last_assistant_message } = req.body;
const platformSource = normalizePlatformSource(req.body.platformSource);
if (!contentSessionId) {
return this.badRequest(res, 'Missing contentSessionId');
@@ -608,7 +615,7 @@ export class SessionRoutes extends BaseRouteHandler {
const store = this.dbManager.getSessionStore();
// Get or create session
const sessionDbId = store.createSDKSession(contentSessionId, '', '');
const sessionDbId = store.createSDKSession(contentSessionId, '', '', undefined, platformSource);
const promptNumber = store.getPromptNumberFromUserPrompts(contentSessionId);
// Privacy check: skip if user prompt was entirely private
@@ -681,6 +688,7 @@ export class SessionRoutes extends BaseRouteHandler {
*/
private handleCompleteByClaudeId = this.wrapHandler(async (req: Request, res: Response): Promise<void> => {
const { contentSessionId } = req.body;
const platformSource = normalizePlatformSource(req.body.platformSource);
logger.info('HTTP', '→ POST /api/sessions/complete', { contentSessionId });
@@ -692,21 +700,20 @@ export class SessionRoutes extends BaseRouteHandler {
// Look up sessionDbId from contentSessionId (createSDKSession is idempotent)
// Pass empty strings - we only need the ID lookup, not to create a new session
const sessionDbId = store.createSDKSession(contentSessionId, '', '');
const sessionDbId = store.createSDKSession(contentSessionId, '', '', undefined, platformSource);
// Check if session is in the active sessions map
const activeSession = this.sessionManager.getSession(sessionDbId);
if (!activeSession) {
// Session may not be in memory (already completed or never initialized)
logger.debug('SESSION', 'session-complete: Session not in active map', {
// Still proceed with DB-backed completion so the row gets marked completed
logger.debug('SESSION', 'session-complete: Session not in active map; continuing with DB-backed completion', {
contentSessionId,
sessionDbId
});
res.json({ status: 'skipped', reason: 'not_active' });
return;
}
// Complete the session (removes from active sessions map)
// Complete the session (removes from active sessions map if present)
// Note: The Stop hook (summarize handler) waits for pending work before calling
// this endpoint. No polling here — that's the hook's responsibility.
await this.completionHandler.completeByDbId(sessionDbId);
@@ -716,7 +723,7 @@ export class SessionRoutes extends BaseRouteHandler {
sessionDbId
});
res.json({ status: 'completed', sessionDbId });
res.json({ status: activeSession ? 'completed' : 'completed_db_only', sessionDbId });
});
/**
@@ -738,11 +745,13 @@ export class SessionRoutes extends BaseRouteHandler {
// may omit prompt/project in their payload (#838, #1049)
const project = req.body.project || 'unknown';
const prompt = req.body.prompt || '[media prompt]';
const platformSource = normalizePlatformSource(req.body.platformSource);
const customTitle = req.body.customTitle || undefined;
logger.info('HTTP', 'SessionRoutes: handleSessionInitByClaudeId called', {
contentSessionId,
project,
platformSource,
prompt_length: prompt?.length,
customTitle
});
@@ -755,7 +764,7 @@ export class SessionRoutes extends BaseRouteHandler {
const store = this.dbManager.getSessionStore();
// Step 1: Create/get SDK session (idempotent INSERT OR IGNORE)
const sessionDbId = store.createSDKSession(contentSessionId, project, prompt, customTitle);
const sessionDbId = store.createSDKSession(contentSessionId, project, prompt, customTitle, platformSource);
// Verify session creation with DB lookup
const dbSession = store.getSessionById(sessionDbId);
@@ -94,6 +94,8 @@ export class SettingsRoutes extends BaseRouteHandler {
'CLAUDE_MEM_GEMINI_API_KEY',
'CLAUDE_MEM_GEMINI_MODEL',
'CLAUDE_MEM_GEMINI_RATE_LIMITING_ENABLED',
'CLAUDE_MEM_GEMINI_MAX_CONTEXT_MESSAGES',
'CLAUDE_MEM_GEMINI_MAX_TOKENS',
// OpenRouter Configuration
'CLAUDE_MEM_OPENROUTER_API_KEY',
'CLAUDE_MEM_OPENROUTER_MODEL',
@@ -248,6 +250,22 @@ export class SettingsRoutes extends BaseRouteHandler {
}
}
// Validate CLAUDE_MEM_GEMINI_MAX_CONTEXT_MESSAGES
if (settings.CLAUDE_MEM_GEMINI_MAX_CONTEXT_MESSAGES) {
const count = parseInt(settings.CLAUDE_MEM_GEMINI_MAX_CONTEXT_MESSAGES, 10);
if (isNaN(count) || count < 1 || count > 100) {
return { valid: false, error: 'CLAUDE_MEM_GEMINI_MAX_CONTEXT_MESSAGES must be between 1 and 100' };
}
}
// Validate CLAUDE_MEM_GEMINI_MAX_TOKENS
if (settings.CLAUDE_MEM_GEMINI_MAX_TOKENS) {
const tokens = parseInt(settings.CLAUDE_MEM_GEMINI_MAX_TOKENS, 10);
if (isNaN(tokens) || tokens < 1000 || tokens > 1000000) {
return { valid: false, error: 'CLAUDE_MEM_GEMINI_MAX_TOKENS must be between 1000 and 1000000' };
}
}
// Validate CLAUDE_MEM_CONTEXT_OBSERVATIONS
if (settings.CLAUDE_MEM_CONTEXT_OBSERVATIONS) {
const obsCount = parseInt(settings.CLAUDE_MEM_CONTEXT_OBSERVATIONS, 10);
@@ -68,6 +68,14 @@ export class ViewerRoutes extends BaseRouteHandler {
* SSE stream endpoint
*/
private handleSSEStream = this.wrapHandler((req: Request, res: Response): void => {
// Guard: if DB is not yet initialized, return 503 before registering client
try {
this.dbManager.getSessionStore();
} catch {
res.status(503).json({ error: 'Service initializing' });
return;
}
// Setup SSE headers
res.setHeader('Content-Type', 'text/event-stream');
res.setHeader('Cache-Control', 'no-cache');
@@ -76,11 +84,13 @@ export class ViewerRoutes extends BaseRouteHandler {
// Add client to broadcaster
this.sseBroadcaster.addClient(res);
// Send initial_load event with projects list
const allProjects = this.dbManager.getSessionStore().getAllProjects();
// Send initial_load event with project/source catalog
const projectCatalog = this.dbManager.getSessionStore().getProjectCatalog();
this.sseBroadcaster.broadcast({
type: 'initial_load',
projects: allProjects,
projects: projectCatalog.projects,
sources: projectCatalog.sources,
projectsBySource: projectCatalog.projectsBySource,
timestamp: Date.now()
});
@@ -11,12 +11,14 @@
import { SessionManager } from '../SessionManager.js';
import { SessionEventBroadcaster } from '../events/SessionEventBroadcaster.js';
import { DatabaseManager } from '../DatabaseManager.js';
import { logger } from '../../../utils/logger.js';
export class SessionCompletionHandler {
constructor(
private sessionManager: SessionManager,
private eventBroadcaster: SessionEventBroadcaster
private eventBroadcaster: SessionEventBroadcaster,
private dbManager: DatabaseManager
) {}
/**
@@ -24,6 +26,9 @@ export class SessionCompletionHandler {
* Used by DELETE /api/sessions/:id and POST /api/sessions/:id/complete
*/
async completeByDbId(sessionDbId: number): Promise<void> {
// Persist completion to database before in-memory cleanup (fix for #1532)
this.dbManager.getSessionStore().markSessionCompleted(sessionDbId);
// Delete from session manager (aborts SDK agent via SIGTERM)
await this.sessionManager.deleteSession(sessionDbId);
+11 -1
View File
@@ -23,6 +23,8 @@ export interface SettingsDefaults {
CLAUDE_MEM_GEMINI_API_KEY: string;
CLAUDE_MEM_GEMINI_MODEL: string; // 'gemini-2.5-flash-lite' | 'gemini-2.5-flash' | 'gemini-3-flash-preview'
CLAUDE_MEM_GEMINI_RATE_LIMITING_ENABLED: string; // 'true' | 'false' - enable rate limiting for free tier
CLAUDE_MEM_GEMINI_MAX_CONTEXT_MESSAGES: string; // Max messages in Gemini context window (prevents O(N²) cost growth)
CLAUDE_MEM_GEMINI_MAX_TOKENS: string; // Max estimated tokens for Gemini context (~100k safety limit)
CLAUDE_MEM_OPENROUTER_API_KEY: string;
CLAUDE_MEM_OPENROUTER_MODEL: string;
CLAUDE_MEM_OPENROUTER_SITE_URL: string;
@@ -49,6 +51,9 @@ export interface SettingsDefaults {
CLAUDE_MEM_CONTEXT_SHOW_LAST_MESSAGE: string;
CLAUDE_MEM_CONTEXT_SHOW_TERMINAL_OUTPUT: string;
CLAUDE_MEM_FOLDER_CLAUDEMD_ENABLED: string;
CLAUDE_MEM_FOLDER_USE_LOCAL_MD: string; // 'true' | 'false' - write to CLAUDE.local.md instead of CLAUDE.md
CLAUDE_MEM_TRANSCRIPTS_ENABLED: string; // 'true' | 'false' - enable transcript watcher ingestion for Codex and other transcript-based clients
CLAUDE_MEM_TRANSCRIPTS_CONFIG_PATH: string; // Path to transcript watcher config JSON
// Process Management
CLAUDE_MEM_MAX_CONCURRENT_AGENTS: string; // Max concurrent Claude SDK agent subprocesses (default: 2)
// Exclusion Settings
@@ -78,7 +83,7 @@ export class SettingsDefaultsManager {
* Default values for all settings
*/
private static readonly DEFAULTS: SettingsDefaults = {
CLAUDE_MEM_MODEL: 'claude-sonnet-4-5',
CLAUDE_MEM_MODEL: 'claude-sonnet-4-6',
CLAUDE_MEM_CONTEXT_OBSERVATIONS: '50',
CLAUDE_MEM_WORKER_PORT: '37777',
CLAUDE_MEM_WORKER_HOST: '127.0.0.1',
@@ -89,6 +94,8 @@ export class SettingsDefaultsManager {
CLAUDE_MEM_GEMINI_API_KEY: '', // Empty by default, can be set via UI or env
CLAUDE_MEM_GEMINI_MODEL: 'gemini-2.5-flash-lite', // Default Gemini model (highest free tier RPM)
CLAUDE_MEM_GEMINI_RATE_LIMITING_ENABLED: 'true', // Rate limiting ON by default for free tier users
CLAUDE_MEM_GEMINI_MAX_CONTEXT_MESSAGES: '20', // Max messages in Gemini context window
CLAUDE_MEM_GEMINI_MAX_TOKENS: '100000', // Max estimated tokens (~100k safety limit)
CLAUDE_MEM_OPENROUTER_API_KEY: '', // Empty by default, can be set via UI or env
CLAUDE_MEM_OPENROUTER_MODEL: 'xiaomi/mimo-v2-flash:free', // Default OpenRouter model (free tier)
CLAUDE_MEM_OPENROUTER_SITE_URL: '', // Optional: for OpenRouter analytics
@@ -115,6 +122,9 @@ export class SettingsDefaultsManager {
CLAUDE_MEM_CONTEXT_SHOW_LAST_MESSAGE: 'false',
CLAUDE_MEM_CONTEXT_SHOW_TERMINAL_OUTPUT: 'true',
CLAUDE_MEM_FOLDER_CLAUDEMD_ENABLED: 'false',
CLAUDE_MEM_FOLDER_USE_LOCAL_MD: 'false', // When true, writes to CLAUDE.local.md instead of CLAUDE.md
CLAUDE_MEM_TRANSCRIPTS_ENABLED: 'true',
CLAUDE_MEM_TRANSCRIPTS_CONFIG_PATH: join(homedir(), '.claude-mem', 'transcript-watch.json'),
// Process Management
CLAUDE_MEM_MAX_CONCURRENT_AGENTS: '2', // Max concurrent Claude SDK agent subprocesses
// Exclusion Settings
+1 -1
View File
@@ -1,7 +1,7 @@
export const HOOK_TIMEOUTS = {
DEFAULT: 300000, // Standard HTTP timeout (5 min for slow systems)
HEALTH_CHECK: 3000, // Worker health check (3s — healthy worker responds in <100ms)
POST_SPAWN_WAIT: 5000, // Wait for daemon to start after spawn (starts in <1s on Linux)
POST_SPAWN_WAIT: 15000, // Wait for daemon to start after spawn (starts in <1s on Linux, 6-8s on macOS with Chroma)
READINESS_WAIT: 30000, // Wait for DB + search init after spawn (typically <5s)
PORT_IN_USE_WAIT: 3000, // Wait when port occupied but health failing
WORKER_STARTUP_WAIT: 1000,
+6 -1
View File
@@ -58,7 +58,12 @@ export function isDirectChild(filePath: string, folderPath: string): boolean {
const folderSegments = normFolder.split('/');
const fileSegments = normFile.split('/');
if (fileSegments.length < 2) return false; // Need at least folder/file
// Handle bare filenames (no directory component, e.g. stored as "dashboard.html").
// These are root-level files and are a direct child only of the root folder.
// Fixes #1514: bare filenames stored in DB were never matched by any folder query.
if (fileSegments.length < 2) {
return normFolder === '' || normFolder === '.';
}
const fileDir = fileSegments.slice(0, -1).join('/'); // Directory part of file
const fileName = fileSegments[fileSegments.length - 1]; // Actual filename
+36
View File
@@ -0,0 +1,36 @@
export const DEFAULT_PLATFORM_SOURCE = 'claude';
function sanitizeRawSource(value: string): string {
return value.trim().toLowerCase().replace(/\s+/g, '-');
}
export function normalizePlatformSource(value?: string | null): string {
if (!value) return DEFAULT_PLATFORM_SOURCE;
const source = sanitizeRawSource(value);
if (!source) return DEFAULT_PLATFORM_SOURCE;
if (source === 'transcript') return 'codex';
if (source.includes('codex')) return 'codex';
if (source.includes('cursor')) return 'cursor';
if (source.includes('claude')) return 'claude';
return source;
}
export function sortPlatformSources(sources: string[]): string[] {
const priority = ['claude', 'codex', 'cursor'];
return [...sources].sort((a, b) => {
const aPriority = priority.indexOf(a);
const bPriority = priority.indexOf(b);
if (aPriority !== -1 || bPriority !== -1) {
if (aPriority === -1) return 1;
if (bPriority === -1) return -1;
return aPriority - bPriority;
}
return a.localeCompare(b);
});
}
+2 -1
View File
@@ -1,5 +1,6 @@
import { readFileSync, existsSync } from 'fs';
import { logger } from '../utils/logger.js';
import { SYSTEM_REMINDER_REGEX } from '../utils/tag-stripping.js';
/**
* Extract last message of specified role from transcript JSONL file
@@ -48,7 +49,7 @@ export function extractLastMessage(
}
if (stripSystemReminders) {
text = text.replace(/<system-reminder>[\s\S]*?<\/system-reminder>/g, '');
text = text.replace(SYSTEM_REMINDER_REGEX, '');
text = text.replace(/\n{3,}/g, '\n\n').trim();
}
+2
View File
@@ -103,6 +103,7 @@ export interface UserPromptRecord {
prompt_number: number;
prompt_text: string;
project?: string; // From JOIN with sdk_sessions
platform_source?: string;
created_at: string;
created_at_epoch: number;
}
@@ -115,6 +116,7 @@ export interface LatestPromptResult {
content_session_id: string;
memory_session_id: string;
project: string;
platform_source: string;
prompt_number: number;
prompt_text: string;
created_at_epoch: number;
+124 -1
View File
@@ -355,6 +355,14 @@
box-shadow: 0 1px 3px rgba(0, 0, 0, 0.03);
}
.header-main {
display: flex;
align-items: center;
gap: 18px;
min-width: 0;
flex-wrap: wrap;
}
.sidebar-header {
padding: 14px 18px;
border-bottom: 1px solid var(--color-border-primary);
@@ -549,6 +557,42 @@
font-size: 13px;
}
.source-tabs {
display: inline-flex;
align-items: center;
gap: 6px;
flex-wrap: wrap;
}
.source-tab {
background: transparent;
border: 1px solid var(--color-border-primary);
color: var(--color-text-secondary);
border-radius: 999px;
padding: 6px 12px;
font-size: 12px;
line-height: 1;
font-weight: 600;
letter-spacing: 0.01em;
cursor: pointer;
transition: all 0.2s cubic-bezier(0.4, 0, 0.2, 1);
white-space: nowrap;
}
.source-tab:hover {
background: var(--color-bg-card-hover);
border-color: var(--color-border-focus);
color: var(--color-text-primary);
transform: translateY(-1px);
}
.source-tab.active {
background: linear-gradient(135deg, var(--color-bg-button) 0%, var(--color-accent-primary) 100%);
border-color: var(--color-bg-button);
color: var(--color-text-button);
box-shadow: 0 2px 8px rgba(9, 105, 218, 0.18);
}
.settings-btn,
.theme-toggle-btn {
background: var(--color-bg-card);
@@ -887,6 +931,49 @@
letter-spacing: 0.5px;
}
.card-source {
padding: 2px 8px;
border-radius: 999px;
font-weight: 600;
font-size: 10px;
letter-spacing: 0.04em;
text-transform: uppercase;
border: 1px solid transparent;
}
.source-claude {
background: rgba(255, 138, 61, 0.12);
color: #c25a00;
border-color: rgba(255, 138, 61, 0.22);
}
.source-codex {
background: rgba(33, 150, 243, 0.12);
color: #0f5ba7;
border-color: rgba(33, 150, 243, 0.24);
}
.source-cursor {
background: rgba(124, 58, 237, 0.12);
color: #6d28d9;
border-color: rgba(124, 58, 237, 0.24);
}
[data-theme="dark"] .source-claude {
color: #ffb067;
border-color: rgba(255, 176, 103, 0.2);
}
[data-theme="dark"] .source-codex {
color: #8fc7ff;
border-color: rgba(143, 199, 255, 0.2);
}
[data-theme="dark"] .source-cursor {
color: #c4b5fd;
border-color: rgba(196, 181, 253, 0.2);
}
.card-title {
font-size: 17px;
margin-bottom: 14px;
@@ -1483,6 +1570,10 @@
padding: 14px 20px;
}
.header-main {
gap: 12px;
}
.status {
gap: 6px;
}
@@ -1491,6 +1582,11 @@
max-width: 160px;
}
.source-tab {
padding: 6px 10px;
font-size: 11px;
}
/* Hide icon links (docs, github, twitter) on tablet */
.icon-link {
display: none;
@@ -1544,6 +1640,28 @@
gap: 8px;
}
.header-main {
gap: 10px;
}
.source-tabs {
width: 100%;
flex-wrap: nowrap;
overflow-x: auto;
padding-bottom: 2px;
scrollbar-width: none;
}
.source-tabs::-webkit-scrollbar {
display: none;
}
.source-tab {
flex-shrink: 0;
padding: 5px 10px;
font-size: 11px;
}
.logomark {
height: 28px;
}
@@ -1732,6 +1850,11 @@
white-space: nowrap;
}
.preview-selector select:disabled {
opacity: 0.6;
cursor: not-allowed;
}
.preview-selector select {
background: var(--color-bg-card);
border: 1px solid var(--color-border-primary);
@@ -2873,4 +2996,4 @@
<script src="viewer-bundle.js"></script>
</body>
</html>
</html>
+42 -21
View File
@@ -13,39 +13,57 @@ import { mergeAndDeduplicateByProject } from './utils/data';
export function App() {
const [currentFilter, setCurrentFilter] = useState('');
const [currentSource, setCurrentSource] = useState('all');
const [contextPreviewOpen, setContextPreviewOpen] = useState(false);
const [logsModalOpen, setLogsModalOpen] = useState(false);
const [paginatedObservations, setPaginatedObservations] = useState<Observation[]>([]);
const [paginatedSummaries, setPaginatedSummaries] = useState<Summary[]>([]);
const [paginatedPrompts, setPaginatedPrompts] = useState<UserPrompt[]>([]);
const { observations, summaries, prompts, projects, isProcessing, queueDepth, isConnected } = useSSE();
const { observations, summaries, prompts, projects, sources, projectsBySource, isProcessing, queueDepth, isConnected } = useSSE();
const { settings, saveSettings, isSaving, saveStatus } = useSettings();
const { stats, refreshStats } = useStats();
const { preference, resolvedTheme, setThemePreference } = useTheme();
const pagination = usePagination(currentFilter);
const pagination = usePagination(currentFilter, currentSource);
const availableProjects = useMemo(() => {
if (currentSource === 'all') {
return projects;
}
return projectsBySource[currentSource] || [];
}, [currentSource, projects, projectsBySource]);
const matchesSelection = useCallback((item: { project: string; platform_source: string }) => {
const matchesProject = !currentFilter || item.project === currentFilter;
const matchesSource = currentSource === 'all' || (item.platform_source || 'claude') === currentSource;
return matchesProject && matchesSource;
}, [currentFilter, currentSource]);
useEffect(() => {
if (currentFilter && !availableProjects.includes(currentFilter)) {
setCurrentFilter('');
}
}, [availableProjects, currentFilter]);
// Merge SSE live data with paginated data, filtering by project when active
const allObservations = useMemo(() => {
const live = currentFilter
? observations.filter(o => o.project === currentFilter)
: observations;
return mergeAndDeduplicateByProject(live, paginatedObservations);
}, [observations, paginatedObservations, currentFilter]);
const live = observations.filter(matchesSelection);
const paginated = paginatedObservations.filter(matchesSelection);
return mergeAndDeduplicateByProject(live, paginated);
}, [observations, paginatedObservations, matchesSelection]);
const allSummaries = useMemo(() => {
const live = currentFilter
? summaries.filter(s => s.project === currentFilter)
: summaries;
return mergeAndDeduplicateByProject(live, paginatedSummaries);
}, [summaries, paginatedSummaries, currentFilter]);
const live = summaries.filter(matchesSelection);
const paginated = paginatedSummaries.filter(matchesSelection);
return mergeAndDeduplicateByProject(live, paginated);
}, [summaries, paginatedSummaries, matchesSelection]);
const allPrompts = useMemo(() => {
const live = currentFilter
? prompts.filter(p => p.project === currentFilter)
: prompts;
return mergeAndDeduplicateByProject(live, paginatedPrompts);
}, [prompts, paginatedPrompts, currentFilter]);
const live = prompts.filter(matchesSelection);
const paginated = paginatedPrompts.filter(matchesSelection);
return mergeAndDeduplicateByProject(live, paginated);
}, [prompts, paginatedPrompts, matchesSelection]);
// Toggle context preview modal
const toggleContextPreview = useCallback(() => {
@@ -78,24 +96,27 @@ export function App() {
} catch (error) {
console.error('Failed to load more data:', error);
}
}, [currentFilter, pagination.observations, pagination.summaries, pagination.prompts]);
}, [pagination.observations, pagination.summaries, pagination.prompts]);
// Reset paginated data and load first page when filter changes
// Reset paginated data and load first page when project/source changes
useEffect(() => {
setPaginatedObservations([]);
setPaginatedSummaries([]);
setPaginatedPrompts([]);
handleLoadMore();
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [currentFilter]);
}, [currentFilter, currentSource]);
return (
<>
<Header
isConnected={isConnected}
projects={projects}
projects={availableProjects}
sources={sources}
currentFilter={currentFilter}
currentSource={currentSource}
onFilterChange={setCurrentFilter}
onSourceChange={setCurrentSource}
isProcessing={isProcessing}
queueDepth={queueDepth}
themePreference={preference}
@@ -136,7 +136,17 @@ export function ContextSettingsModal({
}, [settings]);
// Get context preview based on current form state
const { preview, isLoading, error, projects, selectedProject, setSelectedProject } = useContextPreview(formState);
const {
preview,
isLoading,
error,
projects,
sources,
selectedSource,
setSelectedSource,
selectedProject,
setSelectedProject
} = useContextPreview(formState);
const updateSetting = useCallback((key: keyof Settings, value: string) => {
const newState = { ...formState, [key]: value };
@@ -174,10 +184,23 @@ export function ContextSettingsModal({
<h2>Settings</h2>
<div className="header-controls">
<label className="preview-selector">
Preview for:
Source:
<select
value={selectedSource || ''}
onChange={(e) => setSelectedSource(e.target.value)}
disabled={sources.length === 0}
>
{sources.map(source => (
<option key={source} value={source}>{source}</option>
))}
</select>
</label>
<label className="preview-selector">
Project:
<select
value={selectedProject || ''}
onChange={(e) => setSelectedProject(e.target.value)}
disabled={projects.length === 0}
>
{projects.map(project => (
<option key={project} value={project}>{project}</option>
+44 -10
View File
@@ -7,8 +7,11 @@ import { useSpinningFavicon } from '../hooks/useSpinningFavicon';
interface HeaderProps {
isConnected: boolean;
projects: string[];
sources: string[];
currentFilter: string;
currentSource: string;
onFilterChange: (filter: string) => void;
onSourceChange: (source: string) => void;
isProcessing: boolean;
queueDepth: number;
themePreference: ThemePreference;
@@ -16,11 +19,26 @@ interface HeaderProps {
onContextPreviewToggle: () => void;
}
function formatSourceLabel(source: string): string {
if (source === 'all') return 'All';
if (source === 'claude') return 'Claude';
if (source === 'codex') return 'Codex';
return source.charAt(0).toUpperCase() + source.slice(1);
}
function buildSourceTabs(sources: string[]): string[] {
const merged = ['all', 'claude', 'codex', ...sources];
return Array.from(new Set(merged.filter(Boolean)));
}
export function Header({
isConnected,
projects,
sources,
currentFilter,
currentSource,
onFilterChange,
onSourceChange,
isProcessing,
queueDepth,
themePreference,
@@ -28,20 +46,36 @@ export function Header({
onContextPreviewToggle
}: HeaderProps) {
useSpinningFavicon(isProcessing);
const availableSources = buildSourceTabs(sources);
return (
<div className="header">
<h1>
<div style={{ position: 'relative', display: 'inline-block' }}>
<img src="claude-mem-logomark.webp" alt="" className={`logomark ${isProcessing ? 'spinning' : ''}`} />
{queueDepth > 0 && (
<div className="queue-bubble">
{queueDepth}
</div>
)}
<div className="header-main">
<h1>
<div style={{ position: 'relative', display: 'inline-block' }}>
<img src="claude-mem-logomark.webp" alt="" className={`logomark ${isProcessing ? 'spinning' : ''}`} />
{queueDepth > 0 && (
<div className="queue-bubble">
{queueDepth}
</div>
)}
</div>
<span className="logo-text">claude-mem</span>
</h1>
<div className="source-tabs" role="tablist" aria-label="Context source tabs">
{availableSources.map(source => (
<button
key={source}
type="button"
className={`source-tab ${currentSource === source ? 'active' : ''}`}
onClick={() => onSourceChange(source)}
aria-pressed={currentSource === source}
>
{formatSourceLabel(source)}
</button>
))}
</div>
<span className="logo-text">claude-mem</span>
</h1>
</div>
<div className="status">
<a
href="https://docs.claude-mem.ai"
@@ -52,6 +52,9 @@ export function ObservationCard({ observation }: ObservationCardProps) {
<span className={`card-type type-${observation.type}`}>
{observation.type}
</span>
<span className={`card-source source-${observation.platform_source || 'claude'}`}>
{observation.platform_source || 'claude'}
</span>
<span className="card-project">{observation.project}</span>
</div>
<div className="view-mode-toggles">
+3
View File
@@ -14,6 +14,9 @@ export function PromptCard({ prompt }: PromptCardProps) {
<div className="card-header">
<div className="card-header-left">
<span className="card-type">Prompt</span>
<span className={`card-source source-${prompt.platform_source || 'claude'}`}>
{prompt.platform_source || 'claude'}
</span>
<span className="card-project">{prompt.project}</span>
</div>
</div>
+3
View File
@@ -21,6 +21,9 @@ export function SummaryCard({ summary }: SummaryCardProps) {
<header className="summary-card-header">
<div className="summary-badge-row">
<span className="card-type summary-badge">Session Summary</span>
<span className={`card-source source-${summary.platform_source || 'claude'}`}>
{summary.platform_source || 'claude'}
</span>
<span className="summary-project-badge">{summary.project}</span>
</div>
{summary.request && (
+1 -1
View File
@@ -3,7 +3,7 @@
* Shared across UI components and hooks
*/
export const DEFAULT_SETTINGS = {
CLAUDE_MEM_MODEL: 'claude-sonnet-4-5',
CLAUDE_MEM_MODEL: 'claude-sonnet-4-6',
CLAUDE_MEM_CONTEXT_OBSERVATIONS: '50',
CLAUDE_MEM_WORKER_PORT: '37777',
CLAUDE_MEM_WORKER_HOST: '127.0.0.1',
+75 -12
View File
@@ -1,5 +1,5 @@
import { useState, useEffect, useCallback } from 'react';
import type { Settings } from '../types';
import type { ProjectCatalog, Settings } from '../types';
interface UseContextPreviewResult {
preview: string;
@@ -7,15 +7,31 @@ interface UseContextPreviewResult {
error: string | null;
refresh: () => Promise<void>;
projects: string[];
sources: string[];
selectedSource: string | null;
setSelectedSource: (source: string) => void;
selectedProject: string | null;
setSelectedProject: (project: string) => void;
}
function getPreferredSource(sources: string[]): string | null {
if (sources.includes('claude')) return 'claude';
if (sources.includes('codex')) return 'codex';
return sources[0] || null;
}
function withDefaultSources(sources: string[]): string[] {
const merged = ['claude', 'codex', ...sources];
return Array.from(new Set(merged));
}
export function useContextPreview(settings: Settings): UseContextPreviewResult {
const [preview, setPreview] = useState<string>('');
const [isLoading, setIsLoading] = useState(false);
const [error, setError] = useState<string | null>(null);
const [catalog, setCatalog] = useState<ProjectCatalog>({ projects: [], sources: [], projectsBySource: {} });
const [projects, setProjects] = useState<string[]>([]);
const [selectedSource, setSelectedSource] = useState<string | null>(null);
const [selectedProject, setSelectedProject] = useState<string | null>(null);
// Fetch projects on mount
@@ -23,11 +39,27 @@ export function useContextPreview(settings: Settings): UseContextPreviewResult {
async function fetchProjects() {
try {
const response = await fetch('/api/projects');
const data = await response.json();
if (data.projects && data.projects.length > 0) {
setProjects(data.projects);
setSelectedProject(data.projects[0]); // Default to first project
const data = await response.json() as ProjectCatalog;
const nextCatalog: ProjectCatalog = {
projects: data.projects || [],
sources: withDefaultSources(data.sources || []),
projectsBySource: data.projectsBySource || {}
};
setCatalog(nextCatalog);
const preferredSource = getPreferredSource(nextCatalog.sources);
setSelectedSource(preferredSource);
if (preferredSource) {
const sourceProjects = nextCatalog.projectsBySource[preferredSource] || [];
setProjects(sourceProjects);
setSelectedProject(sourceProjects[0] || null);
return;
}
setProjects(nextCatalog.projects);
setSelectedProject(nextCatalog.projects[0] || null);
} catch (err) {
console.error('Failed to fetch projects:', err);
}
@@ -35,6 +67,18 @@ export function useContextPreview(settings: Settings): UseContextPreviewResult {
fetchProjects();
}, []);
useEffect(() => {
if (!selectedSource) {
setProjects(catalog.projects);
setSelectedProject(prev => (prev && catalog.projects.includes(prev) ? prev : catalog.projects[0] || null));
return;
}
const sourceProjects = catalog.projectsBySource[selectedSource] || [];
setProjects(sourceProjects);
setSelectedProject(prev => (prev && sourceProjects.includes(prev) ? prev : sourceProjects[0] || null));
}, [catalog, selectedSource]);
const refresh = useCallback(async () => {
if (!selectedProject) {
setPreview('No project selected');
@@ -48,17 +92,25 @@ export function useContextPreview(settings: Settings): UseContextPreviewResult {
project: selectedProject
});
const response = await fetch(`/api/context/preview?${params}`);
const text = await response.text();
if (selectedSource) {
params.append('platformSource', selectedSource);
}
if (response.ok) {
setPreview(text);
} else {
try {
const response = await fetch(`/api/context/preview?${params}`);
const text = await response.text();
if (response.ok) {
setPreview(text);
} else {
setError('Failed to load preview');
}
} catch {
setError('Failed to load preview');
}
setIsLoading(false);
}, [selectedProject]);
}, [selectedProject, selectedSource]);
// Debounced refresh when settings or selectedProject change
useEffect(() => {
@@ -68,5 +120,16 @@ export function useContextPreview(settings: Settings): UseContextPreviewResult {
return () => clearTimeout(timeout);
}, [settings, refresh]);
return { preview, isLoading, error, refresh, projects, selectedProject, setSelectedProject };
return {
preview,
isLoading,
error,
refresh,
projects,
sources: catalog.sources,
selectedSource,
setSelectedSource,
selectedProject,
setSelectedProject
};
}
+23 -10
View File
@@ -14,7 +14,7 @@ type DataItem = Observation | Summary | UserPrompt;
/**
* Generic pagination hook for observations, summaries, and prompts
*/
function usePaginationFor(endpoint: string, dataType: DataType, currentFilter: string) {
function usePaginationFor(endpoint: string, dataType: DataType, currentFilter: string, currentSource: string) {
const [state, setState] = useState<PaginationState>({
isLoading: false,
hasMore: true
@@ -22,7 +22,7 @@ function usePaginationFor(endpoint: string, dataType: DataType, currentFilter: s
// Track offset and filter in refs to handle synchronous resets
const offsetRef = useRef(0);
const lastFilterRef = useRef(currentFilter);
const lastSelectionRef = useRef(`${currentSource}::${currentFilter}`);
const stateRef = useRef(state);
/**
@@ -31,16 +31,17 @@ function usePaginationFor(endpoint: string, dataType: DataType, currentFilter: s
*/
const loadMore = useCallback(async (): Promise<DataItem[]> => {
// Check if filter changed - if so, reset pagination synchronously
const filterChanged = lastFilterRef.current !== currentFilter;
const selectionKey = `${currentSource}::${currentFilter}`;
const filterChanged = lastSelectionRef.current !== selectionKey;
if (filterChanged) {
offsetRef.current = 0;
lastFilterRef.current = currentFilter;
lastSelectionRef.current = selectionKey;
// Reset state both in React state and ref synchronously
const newState = { isLoading: false, hasMore: true };
setState(newState);
stateRef.current = newState; // Update ref immediately to avoid stale checks
stateRef.current = newState; // Update ref immediately to avoid stale checks
}
// Prevent concurrent requests using ref (always current)
@@ -49,6 +50,7 @@ function usePaginationFor(endpoint: string, dataType: DataType, currentFilter: s
return [];
}
stateRef.current = { ...stateRef.current, isLoading: true };
setState(prev => ({ ...prev, isLoading: true }));
// Build query params using current offset from ref
@@ -62,6 +64,10 @@ function usePaginationFor(endpoint: string, dataType: DataType, currentFilter: s
params.append('project', currentFilter);
}
if (currentSource && currentSource !== 'all') {
params.append('platformSource', currentSource);
}
const response = await fetch(`${endpoint}?${params}`);
if (!response.ok) {
@@ -70,6 +76,13 @@ function usePaginationFor(endpoint: string, dataType: DataType, currentFilter: s
const data = await response.json() as { items: DataItem[], hasMore: boolean };
const nextState = {
...stateRef.current,
isLoading: false,
hasMore: data.hasMore
};
stateRef.current = nextState;
setState(prev => ({
...prev,
isLoading: false,
@@ -80,7 +93,7 @@ function usePaginationFor(endpoint: string, dataType: DataType, currentFilter: s
offsetRef.current += UI.PAGINATION_PAGE_SIZE;
return data.items;
}, [currentFilter, endpoint, dataType]);
}, [currentFilter, currentSource, endpoint, dataType]);
return {
...state,
@@ -91,10 +104,10 @@ function usePaginationFor(endpoint: string, dataType: DataType, currentFilter: s
/**
* Hook for paginating observations
*/
export function usePagination(currentFilter: string) {
const observations = usePaginationFor(API_ENDPOINTS.OBSERVATIONS, 'observations', currentFilter);
const summaries = usePaginationFor(API_ENDPOINTS.SUMMARIES, 'summaries', currentFilter);
const prompts = usePaginationFor(API_ENDPOINTS.PROMPTS, 'prompts', currentFilter);
export function usePagination(currentFilter: string, currentSource: string) {
const observations = usePaginationFor(API_ENDPOINTS.OBSERVATIONS, 'observations', currentFilter, currentSource);
const summaries = usePaginationFor(API_ENDPOINTS.SUMMARIES, 'summaries', currentFilter, currentSource);
const prompts = usePaginationFor(API_ENDPOINTS.PROMPTS, 'prompts', currentFilter, currentSource);
return {
observations,
+56 -18
View File
@@ -1,5 +1,5 @@
import { useState, useEffect, useRef } from 'react';
import { Observation, Summary, UserPrompt, StreamEvent } from '../types';
import { Observation, Summary, UserPrompt, StreamEvent, ProjectCatalog } from '../types';
import { API_ENDPOINTS } from '../constants/api';
import { TIMING } from '../constants/timing';
@@ -7,16 +7,42 @@ export function useSSE() {
const [observations, setObservations] = useState<Observation[]>([]);
const [summaries, setSummaries] = useState<Summary[]>([]);
const [prompts, setPrompts] = useState<UserPrompt[]>([]);
const [projects, setProjects] = useState<string[]>([]);
const [catalog, setCatalog] = useState<ProjectCatalog>({
projects: [],
sources: [],
projectsBySource: {}
});
const [isConnected, setIsConnected] = useState(false);
const [isProcessing, setIsProcessing] = useState(false);
const [queueDepth, setQueueDepth] = useState(0);
const eventSourceRef = useRef<EventSource | null>(null);
const reconnectTimeoutRef = useRef<NodeJS.Timeout>();
const updateCatalogForItem = (project: string, platformSource: string) => {
setCatalog(prev => {
const nextProjects = prev.projects.includes(project)
? prev.projects
: [...prev.projects, project];
const nextSources = prev.sources.includes(platformSource)
? prev.sources
: [...prev.sources, platformSource];
const sourceProjects = prev.projectsBySource[platformSource] || [];
return {
projects: nextProjects,
sources: nextSources,
projectsBySource: {
...prev.projectsBySource,
[platformSource]: sourceProjects.includes(project)
? sourceProjects
: [...sourceProjects, project]
}
};
});
};
useEffect(() => {
const connect = () => {
// Clean up existing connection
if (eventSourceRef.current) {
eventSourceRef.current.close();
}
@@ -27,7 +53,6 @@ export function useSSE() {
eventSource.onopen = () => {
console.log('[SSE] Connected');
setIsConnected(true);
// Clear any pending reconnect
if (reconnectTimeoutRef.current) {
clearTimeout(reconnectTimeoutRef.current);
}
@@ -38,9 +63,8 @@ export function useSSE() {
setIsConnected(false);
eventSource.close();
// Reconnect after delay
reconnectTimeoutRef.current = setTimeout(() => {
reconnectTimeoutRef.current = undefined; // Clear before reconnecting
reconnectTimeoutRef.current = undefined;
console.log('[SSE] Attempting to reconnect...');
connect();
}, TIMING.SSE_RECONNECT_DELAY_MS);
@@ -52,32 +76,37 @@ export function useSSE() {
switch (data.type) {
case 'initial_load':
console.log('[SSE] Initial load:', {
projects: data.projects?.length || 0
projects: data.projects?.length || 0,
sources: data.sources?.length || 0
});
setCatalog({
projects: data.projects || [],
sources: data.sources || [],
projectsBySource: data.projectsBySource || {}
});
// Only load projects list - data will come via pagination
setProjects(data.projects || []);
break;
case 'new_observation':
if (data.observation) {
console.log('[SSE] New observation:', data.observation.id);
setObservations(prev => [data.observation, ...prev]);
updateCatalogForItem(data.observation.project, data.observation.platform_source || 'claude');
setObservations(prev => [data.observation!, ...prev]);
}
break;
case 'new_summary':
if (data.summary) {
const summary = data.summary;
console.log('[SSE] New summary:', summary.id);
setSummaries(prev => [summary, ...prev]);
console.log('[SSE] New summary:', data.summary.id);
updateCatalogForItem(data.summary.project, data.summary.platform_source || 'claude');
setSummaries(prev => [data.summary!, ...prev]);
}
break;
case 'new_prompt':
if (data.prompt) {
const prompt = data.prompt;
console.log('[SSE] New prompt:', prompt.id);
setPrompts(prev => [prompt, ...prev]);
console.log('[SSE] New prompt:', data.prompt.id);
updateCatalogForItem(data.prompt.project, data.prompt.platform_source || 'claude');
setPrompts(prev => [data.prompt!, ...prev]);
}
break;
@@ -94,7 +123,6 @@ export function useSSE() {
connect();
// Cleanup on unmount
return () => {
if (eventSourceRef.current) {
eventSourceRef.current.close();
@@ -105,5 +133,15 @@ export function useSSE() {
};
}, []);
return { observations, summaries, prompts, projects, isProcessing, queueDepth, isConnected };
return {
observations,
summaries,
prompts,
projects: catalog.projects,
sources: catalog.sources,
projectsBySource: catalog.projectsBySource,
isProcessing,
queueDepth,
isConnected
};
}
+12
View File
@@ -2,6 +2,7 @@ export interface Observation {
id: number;
memory_session_id: string;
project: string;
platform_source: string;
type: string;
title: string | null;
subtitle: string | null;
@@ -20,6 +21,7 @@ export interface Summary {
id: number;
session_id: string;
project: string;
platform_source: string;
request?: string;
investigated?: string;
learned?: string;
@@ -32,6 +34,7 @@ export interface UserPrompt {
id: number;
content_session_id: string;
project: string;
platform_source: string;
prompt_number: number;
prompt_text: string;
created_at_epoch: number;
@@ -48,10 +51,19 @@ export interface StreamEvent {
summaries?: Summary[];
prompts?: UserPrompt[];
projects?: string[];
sources?: string[];
projectsBySource?: Record<string, string[]>;
observation?: Observation;
summary?: Summary;
prompt?: UserPrompt;
isProcessing?: boolean;
queueDepth?: number;
}
export interface ProjectCatalog {
projects: string[];
sources: string[];
projectsBySource: Record<string, string[]>;
}
export interface Settings {
+37 -14
View File
@@ -1,9 +1,13 @@
/**
* CLAUDE.md File Utilities
* CLAUDE.md / CLAUDE.local.md File Utilities
*
* Shared utilities for writing folder-level CLAUDE.md files with
* Shared utilities for writing folder-level context files with
* auto-generated context sections. Preserves user content outside
* <claude-mem-context> tags.
*
* When CLAUDE_MEM_FOLDER_USE_LOCAL_MD is 'true', writes to CLAUDE.local.md
* instead of CLAUDE.md. This keeps auto-generated context in a personal,
* gitignored file separate from shared project instructions.
*/
import { existsSync, readFileSync, writeFileSync, renameSync } from 'fs';
@@ -16,6 +20,22 @@ import { workerHttpRequest } from '../shared/worker-utils.js';
const SETTINGS_PATH = path.join(os.homedir(), '.claude-mem', 'settings.json');
/** Default target filename */
const CLAUDE_MD_FILENAME = 'CLAUDE.md';
/** Alternative target filename for personal/local context */
const CLAUDE_LOCAL_MD_FILENAME = 'CLAUDE.local.md';
/**
* Get the target filename based on settings.
* Returns 'CLAUDE.local.md' when CLAUDE_MEM_FOLDER_USE_LOCAL_MD is 'true',
* otherwise returns 'CLAUDE.md'.
*/
export function getTargetFilename(settings?: ReturnType<typeof SettingsDefaultsManager.loadFromFile>): string {
const s = settings ?? SettingsDefaultsManager.loadFromFile(SETTINGS_PATH);
return s.CLAUDE_MEM_FOLDER_USE_LOCAL_MD === 'true' ? CLAUDE_LOCAL_MD_FILENAME : CLAUDE_MD_FILENAME;
}
/**
* Check for consecutive duplicate path segments like frontend/frontend/ or src/src/.
* This catches paths created when cwd already includes the directory name (Issue #814).
@@ -112,14 +132,16 @@ export function replaceTaggedContent(existingContent: string, newContent: string
*
* @param folderPath - Absolute path to the folder (must already exist)
* @param newContent - Content to write inside tags
* @param targetFilename - Target filename (default: determined by settings)
*/
export function writeClaudeMdToFolder(folderPath: string, newContent: string): void {
export function writeClaudeMdToFolder(folderPath: string, newContent: string, targetFilename?: string): void {
const resolvedPath = path.resolve(folderPath);
// Never write inside .git directories — corrupts refs (#1165)
if (resolvedPath.includes('/.git/') || resolvedPath.includes('\\.git\\') || resolvedPath.endsWith('/.git') || resolvedPath.endsWith('\\.git')) return;
const claudeMdPath = path.join(folderPath, 'CLAUDE.md');
const filename = targetFilename ?? getTargetFilename();
const claudeMdPath = path.join(folderPath, filename);
const tempFile = `${claudeMdPath}.tmp`;
// Only write to folders that already exist - never create new directories
@@ -329,9 +351,10 @@ export async function updateFolderClaudeMdFiles(
_port: number,
projectRoot?: string
): Promise<void> {
// Load settings to get configurable observation limit and exclude list
// Load settings to get configurable observation limit, exclude list, and target filename
const settings = SettingsDefaultsManager.loadFromFile(SETTINGS_PATH);
const limit = parseInt(settings.CLAUDE_MEM_CONTEXT_OBSERVATIONS, 10) || 50;
const targetFilename = getTargetFilename(settings);
// Parse exclude paths from settings
let folderMdExcludePaths: string[] = [];
@@ -349,18 +372,18 @@ export async function updateFolderClaudeMdFiles(
// See: https://github.com/thedotmack/claude-mem/issues/859
const foldersWithActiveClaudeMd = new Set<string>();
// First pass: identify folders with actively-used CLAUDE.md files
// First pass: identify folders with actively-used CLAUDE.md or CLAUDE.local.md files
for (const filePath of filePaths) {
if (!filePath) continue;
const basename = path.basename(filePath);
if (basename === 'CLAUDE.md') {
if (basename === CLAUDE_MD_FILENAME || basename === CLAUDE_LOCAL_MD_FILENAME) {
let absoluteFilePath = filePath;
if (projectRoot && !path.isAbsolute(filePath)) {
absoluteFilePath = path.join(projectRoot, filePath);
}
const folderPath = path.dirname(absoluteFilePath);
foldersWithActiveClaudeMd.add(folderPath);
logger.debug('FOLDER_INDEX', 'Detected active CLAUDE.md, will skip folder', { folderPath });
logger.debug('FOLDER_INDEX', 'Detected active context file, will skip folder', { folderPath, basename });
}
}
@@ -435,24 +458,24 @@ export async function updateFolderClaudeMdFiles(
const formatted = formatTimelineForClaudeMd(result.content[0].text);
// Fix for #794: Don't create new CLAUDE.md files if there's no activity
// Fix for #794: Don't create new context files if there's no activity
// But update existing ones to show "No recent activity" if they already exist
const claudeMdPath = path.join(folderPath, 'CLAUDE.md');
const claudeMdPath = path.join(folderPath, targetFilename);
const hasNoActivity = formatted.includes('*No recent activity*');
const fileExists = existsSync(claudeMdPath);
if (hasNoActivity && !fileExists) {
logger.debug('FOLDER_INDEX', 'Skipping empty CLAUDE.md creation', { folderPath });
logger.debug('FOLDER_INDEX', 'Skipping empty context file creation', { folderPath, targetFilename });
continue;
}
writeClaudeMdToFolder(folderPath, formatted);
writeClaudeMdToFolder(folderPath, formatted, targetFilename);
logger.debug('FOLDER_INDEX', 'Updated CLAUDE.md', { folderPath });
logger.debug('FOLDER_INDEX', 'Updated context file', { folderPath, targetFilename });
} catch (error) {
// Fire-and-forget: log warning but don't fail
const err = error as Error;
logger.error('FOLDER_INDEX', 'Failed to update CLAUDE.md', {
logger.error('FOLDER_INDEX', `Failed to update ${targetFilename}`, {
folderPath,
errorMessage: err.message,
errorStack: err.stack
+20 -4
View File
@@ -1,12 +1,24 @@
import { homedir } from 'os'
import path from 'path';
import { logger } from './logger.js';
import { detectWorktree } from './worktree.js';
/**
* Expand leading ~ to the user's home directory.
* Handles "~", "~/", and "~/subpath" but not "~user/" (which is rare in cwd).
*/
function expandTilde(p: string): string {
if (p === '~' || p.startsWith('~/')) {
return p.replace(/^~/, homedir())
}
return p
}
/**
* Extract project name from working directory path
* Handles edge cases: null/undefined cwd, drive roots, trailing slashes
* Handles edge cases: null/undefined cwd, drive roots, trailing slashes, unexpanded ~
*
* @param cwd - Current working directory (absolute path)
* @param cwd - Current working directory (absolute path, or ~-prefixed path)
* @returns Project name or "unknown-project" if extraction fails
*/
export function getProjectName(cwd: string | null | undefined): string {
@@ -15,8 +27,11 @@ export function getProjectName(cwd: string | null | undefined): string {
return 'unknown-project';
}
// Expand leading ~ before path operations
const expanded = expandTilde(cwd)
// Extract basename (handles trailing slashes automatically)
const basename = path.basename(cwd);
const basename = path.basename(expanded);
// Edge case: Drive roots on Windows (C:\, J:\) or Unix root (/)
// path.basename('C:\') returns '' (empty string)
@@ -69,7 +84,8 @@ export function getProjectContext(cwd: string | null | undefined): ProjectContex
return { primary, parent: null, isWorktree: false, allProjects: [primary] };
}
const worktreeInfo = detectWorktree(cwd);
const expandedCwd = expandTilde(cwd);
const worktreeInfo = detectWorktree(expandedCwd);
if (worktreeInfo.isWorktree && worktreeInfo.parentProjectName) {
// In a worktree: include parent first for chronological ordering

Some files were not shown because too many files have changed in this diff Show More