feat: inject file observation timeline on PreToolUse Read hook
When Claude reads a file, the PreToolUse hook queries for existing observations about that file and injects the timeline into context via additionalContext + permissionDecision: allow. This prevents duplicate observations and saves tokens through active rediscovery. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -58,6 +58,18 @@
|
||||
]
|
||||
}
|
||||
],
|
||||
"PreToolUse": [
|
||||
{
|
||||
"matcher": "Read",
|
||||
"hooks": [
|
||||
{
|
||||
"type": "command",
|
||||
"command": "_R=\"${CLAUDE_PLUGIN_ROOT}\"; [ -z \"$_R\" ] && _R=\"$HOME/.claude/plugins/marketplaces/thedotmack/plugin\"; node \"$_R/scripts/bun-runner.js\" \"$_R/scripts/worker-service.cjs\" hook claude-code file-context",
|
||||
"timeout": 2000
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"Stop": [
|
||||
{
|
||||
"hooks": [
|
||||
|
||||
File diff suppressed because one or more lines are too long
+153
-142
File diff suppressed because one or more lines are too long
+11
-11
File diff suppressed because one or more lines are too long
@@ -0,0 +1,142 @@
|
||||
/**
|
||||
* File Context Handler - PreToolUse
|
||||
*
|
||||
* Injects relevant observation history when Claude reads/edits a file,
|
||||
* so it can avoid duplicating past work.
|
||||
*/
|
||||
|
||||
import type { EventHandler, NormalizedHookInput, HookResult } from '../types.js';
|
||||
import { ensureWorkerRunning, workerHttpRequest } from '../../shared/worker-utils.js';
|
||||
import { logger } from '../../utils/logger.js';
|
||||
import { HOOK_EXIT_CODES } from '../../shared/hook-constants.js';
|
||||
import { isProjectExcluded } from '../../utils/project-filter.js';
|
||||
import { SettingsDefaultsManager } from '../../shared/SettingsDefaultsManager.js';
|
||||
import { USER_SETTINGS_PATH } from '../../shared/paths.js';
|
||||
|
||||
const TYPE_ICONS: Record<string, string> = {
|
||||
decision: '\u2696\uFE0F',
|
||||
bugfix: '\uD83D\uDD34',
|
||||
feature: '\uD83D\uDFE3',
|
||||
refactor: '\uD83D\uDD04',
|
||||
discovery: '\uD83D\uDD35',
|
||||
change: '\u2705',
|
||||
};
|
||||
|
||||
function compactTime(timeStr: string): string {
|
||||
return timeStr.toLowerCase().replace(' am', 'a').replace(' pm', 'p');
|
||||
}
|
||||
|
||||
function formatTime(epoch: number): string {
|
||||
const date = new Date(epoch);
|
||||
return date.toLocaleString('en-US', { hour: 'numeric', minute: '2-digit', hour12: true });
|
||||
}
|
||||
|
||||
function formatDate(epoch: number): string {
|
||||
const date = new Date(epoch);
|
||||
return date.toLocaleString('en-US', { month: 'short', day: 'numeric', year: 'numeric' });
|
||||
}
|
||||
|
||||
interface ObservationRow {
|
||||
id: number;
|
||||
title: string | null;
|
||||
type: string;
|
||||
created_at_epoch: number;
|
||||
}
|
||||
|
||||
function formatFileTimeline(observations: ObservationRow[], filePath: string): string {
|
||||
// Group observations by day
|
||||
const byDay = new Map<string, ObservationRow[]>();
|
||||
for (const obs of observations) {
|
||||
const day = formatDate(obs.created_at_epoch);
|
||||
if (!byDay.has(day)) {
|
||||
byDay.set(day, []);
|
||||
}
|
||||
byDay.get(day)!.push(obs);
|
||||
}
|
||||
|
||||
// Sort days chronologically
|
||||
const sortedDays = Array.from(byDay.entries()).sort((a, b) => {
|
||||
const aEpoch = a[1][0].created_at_epoch;
|
||||
const bEpoch = b[1][0].created_at_epoch;
|
||||
return aEpoch - bEpoch;
|
||||
});
|
||||
|
||||
const lines: string[] = [
|
||||
`Existing observations for this file \u2014 review via get_observations([IDs]) to avoid duplicates:`,
|
||||
];
|
||||
|
||||
for (const [day, dayObservations] of sortedDays) {
|
||||
lines.push(`### ${day}`);
|
||||
for (const obs of dayObservations) {
|
||||
const title = obs.title || 'Untitled';
|
||||
const icon = TYPE_ICONS[obs.type] || '\u2753';
|
||||
const time = compactTime(formatTime(obs.created_at_epoch));
|
||||
lines.push(`${obs.id} ${time} ${icon} ${title}`);
|
||||
}
|
||||
}
|
||||
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
export const fileContextHandler: EventHandler = {
|
||||
async execute(input: NormalizedHookInput): Promise<HookResult> {
|
||||
// Extract file_path from toolInput
|
||||
const toolInput = input.toolInput as Record<string, unknown> | undefined;
|
||||
const filePath = toolInput?.file_path as string | undefined;
|
||||
|
||||
if (!filePath) {
|
||||
return { continue: true, suppressOutput: true };
|
||||
}
|
||||
|
||||
// Check if project is excluded from tracking
|
||||
const settings = SettingsDefaultsManager.loadFromFile(USER_SETTINGS_PATH);
|
||||
if (input.cwd && isProjectExcluded(input.cwd, settings.CLAUDE_MEM_EXCLUDED_PROJECTS)) {
|
||||
logger.debug('HOOK', 'Project excluded from tracking, skipping file context', { cwd: input.cwd });
|
||||
return { continue: true, suppressOutput: true };
|
||||
}
|
||||
|
||||
// Ensure worker is running
|
||||
const workerReady = await ensureWorkerRunning();
|
||||
if (!workerReady) {
|
||||
return { continue: true, suppressOutput: true };
|
||||
}
|
||||
|
||||
// Query worker for observations related to this file
|
||||
try {
|
||||
const queryParams = new URLSearchParams({ path: filePath });
|
||||
if (input.cwd) {
|
||||
queryParams.set('project', input.cwd);
|
||||
}
|
||||
|
||||
const response = await workerHttpRequest(`/api/observations/by-file?${queryParams.toString()}`, {
|
||||
method: 'GET',
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
logger.warn('HOOK', 'File context query failed, skipping', { status: response.status, filePath });
|
||||
return { continue: true, suppressOutput: true };
|
||||
}
|
||||
|
||||
const data = await response.json() as { observations: ObservationRow[]; count: number };
|
||||
|
||||
if (!data.observations || data.observations.length === 0) {
|
||||
return { continue: true, suppressOutput: true };
|
||||
}
|
||||
|
||||
const timeline = formatFileTimeline(data.observations, filePath);
|
||||
|
||||
return {
|
||||
hookSpecificOutput: {
|
||||
hookEventName: 'PreToolUse',
|
||||
permissionDecision: 'allow',
|
||||
additionalContext: timeline,
|
||||
},
|
||||
};
|
||||
} catch (error) {
|
||||
logger.warn('HOOK', 'File context fetch error, skipping', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
return { continue: true, suppressOutput: true };
|
||||
}
|
||||
},
|
||||
};
|
||||
@@ -13,6 +13,7 @@ import { observationHandler } from './observation.js';
|
||||
import { summarizeHandler } from './summarize.js';
|
||||
import { userMessageHandler } from './user-message.js';
|
||||
import { fileEditHandler } from './file-edit.js';
|
||||
import { fileContextHandler } from './file-context.js';
|
||||
import { sessionCompleteHandler } from './session-complete.js';
|
||||
|
||||
export type EventType =
|
||||
@@ -22,7 +23,8 @@ export type EventType =
|
||||
| 'summarize' // Stop - generate summary (phase 1)
|
||||
| 'session-complete' // Stop - complete session (phase 2) - fixes #842
|
||||
| 'user-message' // SessionStart (parallel) - display to user
|
||||
| 'file-edit'; // Cursor afterFileEdit
|
||||
| 'file-edit' // Cursor afterFileEdit
|
||||
| 'file-context'; // PreToolUse - inject file observation history
|
||||
|
||||
const handlers: Record<EventType, EventHandler> = {
|
||||
'context': contextHandler,
|
||||
@@ -31,7 +33,8 @@ const handlers: Record<EventType, EventHandler> = {
|
||||
'summarize': summarizeHandler,
|
||||
'session-complete': sessionCompleteHandler,
|
||||
'user-message': userMessageHandler,
|
||||
'file-edit': fileEditHandler
|
||||
'file-edit': fileEditHandler,
|
||||
'file-context': fileContextHandler
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -64,4 +67,5 @@ export { observationHandler } from './observation.js';
|
||||
export { summarizeHandler } from './summarize.js';
|
||||
export { userMessageHandler } from './user-message.js';
|
||||
export { fileEditHandler } from './file-edit.js';
|
||||
export { fileContextHandler } from './file-context.js';
|
||||
export { sessionCompleteHandler } from './session-complete.js';
|
||||
|
||||
+6
-1
@@ -15,7 +15,12 @@ export interface NormalizedHookInput {
|
||||
export interface HookResult {
|
||||
continue?: boolean;
|
||||
suppressOutput?: boolean;
|
||||
hookSpecificOutput?: { hookEventName: string; additionalContext: string };
|
||||
hookSpecificOutput?: {
|
||||
hookEventName: string;
|
||||
additionalContext: string;
|
||||
permissionDecision?: string;
|
||||
permissionDecisionReason?: string;
|
||||
};
|
||||
systemMessage?: string;
|
||||
exitCode?: number;
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
*/
|
||||
|
||||
import { Database } from 'bun:sqlite';
|
||||
import path from 'path';
|
||||
import { logger } from '../../../utils/logger.js';
|
||||
import type { ObservationRecord } from '../../../types/database.js';
|
||||
import type { GetObservationsByIdsOptions, ObservationSessionRow } from './types.js';
|
||||
@@ -111,3 +112,42 @@ export function getObservationsForSession(
|
||||
|
||||
return stmt.all(memorySessionId) as ObservationSessionRow[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get observations associated with a given file path.
|
||||
* Searches both files_read and files_modified using basename matching
|
||||
* to handle differing absolute paths across sessions.
|
||||
*/
|
||||
export function getObservationsByFilePath(
|
||||
db: Database,
|
||||
filePath: string,
|
||||
options?: { project?: string; limit?: number }
|
||||
): ObservationRecord[] {
|
||||
const basename = path.basename(filePath);
|
||||
const likePattern = `%${basename}%`;
|
||||
const limit = options?.limit ?? 30;
|
||||
|
||||
const additionalConditions: string[] = [];
|
||||
const params: any[] = [likePattern, likePattern];
|
||||
|
||||
if (options?.project) {
|
||||
additionalConditions.push('AND project = ?');
|
||||
params.push(options.project);
|
||||
}
|
||||
|
||||
const additionalWhere = additionalConditions.join(' ');
|
||||
|
||||
const stmt = db.prepare(`
|
||||
SELECT *
|
||||
FROM observations
|
||||
WHERE (
|
||||
EXISTS (SELECT 1 FROM json_each(files_read) WHERE value LIKE ?)
|
||||
OR EXISTS (SELECT 1 FROM json_each(files_modified) WHERE value LIKE ?)
|
||||
)
|
||||
${additionalWhere}
|
||||
ORDER BY created_at_epoch DESC
|
||||
LIMIT ${limit}
|
||||
`);
|
||||
|
||||
return stmt.all(...params) as ObservationRecord[];
|
||||
}
|
||||
|
||||
@@ -18,6 +18,7 @@ import { SessionManager } from '../../SessionManager.js';
|
||||
import { SSEBroadcaster } from '../../SSEBroadcaster.js';
|
||||
import type { WorkerService } from '../../../worker-service.js';
|
||||
import { BaseRouteHandler } from '../BaseRouteHandler.js';
|
||||
import { getObservationsByFilePath } from '../../../sqlite/observations/get.js';
|
||||
|
||||
export class DataRoutes extends BaseRouteHandler {
|
||||
constructor(
|
||||
@@ -39,6 +40,7 @@ export class DataRoutes extends BaseRouteHandler {
|
||||
|
||||
// Fetch by ID endpoints
|
||||
app.get('/api/observation/:id', this.handleGetObservationById.bind(this));
|
||||
app.get('/api/observations/by-file', this.handleGetObservationsByFile.bind(this));
|
||||
app.post('/api/observations/batch', this.handleGetObservationsByIds.bind(this));
|
||||
app.get('/api/session/:id', this.handleGetSessionById.bind(this));
|
||||
app.post('/api/sdk-sessions/batch', this.handleGetSdkSessionsByIds.bind(this));
|
||||
@@ -108,6 +110,26 @@ export class DataRoutes extends BaseRouteHandler {
|
||||
res.json(observation);
|
||||
});
|
||||
|
||||
/**
|
||||
* Get observations associated with a file path
|
||||
* GET /api/observations/by-file?path=<file_path>&project=<project>&limit=30
|
||||
*/
|
||||
private handleGetObservationsByFile = this.wrapHandler((req: Request, res: Response): void => {
|
||||
const filePath = req.query.path as string | undefined;
|
||||
if (!filePath) {
|
||||
this.badRequest(res, 'path query parameter is required');
|
||||
return;
|
||||
}
|
||||
|
||||
const project = req.query.project as string | undefined;
|
||||
const limit = req.query.limit ? parseInt(req.query.limit as string, 10) : undefined;
|
||||
|
||||
const db = this.dbManager.getSessionStore().db;
|
||||
const observations = getObservationsByFilePath(db, filePath, { project, limit });
|
||||
|
||||
res.json({ observations, count: observations.length });
|
||||
});
|
||||
|
||||
/**
|
||||
* Get observations by array of IDs
|
||||
* POST /api/observations/batch
|
||||
|
||||
Reference in New Issue
Block a user