feat: Add ROI tracking with discovery_tokens for observations and session summaries (#111)

* feat: Add discovery_tokens for ROI tracking in observations and session summaries

- Introduced `discovery_tokens` column in `observations` and `session_summaries` tables to track token costs associated with discovering and creating each observation and summary.
- Updated relevant services and hooks to calculate and display ROI metrics based on discovery tokens.
- Enhanced context economics reporting to include savings from reusing previous observations.
- Implemented migration to ensure the new column is added to existing tables.
- Adjusted data models and sync processes to accommodate the new `discovery_tokens` field.

* refactor: streamline context hook by removing unused functions and updating terminology

- Removed the estimateTokens and getObservations helper functions as they were not utilized.
- Updated the legend and output messages to replace "discovery" with "work" for clarity.
- Changed the emoji representation for different observation types to better reflect their purpose.
- Enhanced output formatting for improved readability and understanding of token usage.

* Refactor user-message-hook and context-hook for improved clarity and functionality

- Updated user-message-hook.js to enhance error messaging and improve variable naming for clarity.
- Modified context-hook.ts to include a new column key section, improved context index instructions, and added emoji icons for observation types.
- Adjusted footer messages in context-hook.ts to emphasize token savings and access to past research.
- Changed user-message-hook.ts to update the feedback and support message for clarity.

* fix: Critical ROI tracking fixes from PR review

Addresses critical findings from PR #111 review:

1. **Fixed incorrect discovery token calculation** (src/services/worker/SDKAgent.ts)
   - Changed from passing cumulative total to per-response delta
   - Now correctly tracks token cost for each observation/summary
   - Captures token state before/after response processing
   - Prevents all observations getting inflated cumulative values

2. **Fixed schema version mismatch** (src/services/sqlite/SessionStore.ts)
   - Changed ensureDiscoveryTokensColumn() from version 11 to version 7
   - Now matches migration007 definition in migrations.ts
   - Ensures consistent version tracking across migration system

These fixes ensure ROI metrics accurately reflect token costs.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>

---------

Co-authored-by: Claude <noreply@anthropic.com>
This commit is contained in:
Alex Newman
2025-11-15 19:34:53 -05:00
committed by GitHub
parent 0f96476987
commit 3cbc041c8b
17 changed files with 477 additions and 307 deletions
+112 -55
View File
@@ -71,6 +71,7 @@ interface Observation {
concepts: string | null;
files_read: string | null;
files_modified: string | null;
discovery_tokens: number | null;
created_at: string;
created_at_epoch: number;
}
@@ -130,12 +131,6 @@ function formatDate(dateStr: string): string {
});
}
// Helper: Estimate token count for text
function estimateTokens(text: string | null): number {
if (!text) return 0;
return Math.ceil(text.length / CHARS_PER_TOKEN_ESTIMATE);
}
// Helper: Convert absolute paths to relative paths
function toRelativePath(filePath: string, cwd: string): string {
if (path.isAbsolute(filePath)) {
@@ -154,24 +149,6 @@ function renderSummaryField(label: string, value: string | null, color: string,
return [`**${label}**: ${value}`, ''];
}
// Helper: Get all observations for given sessions
function getObservations(db: SessionStore, sessionIds: string[]): Observation[] {
if (sessionIds.length === 0) return [];
const placeholders = sessionIds.map(() => '?').join(',');
const observations = db.db.prepare(`
SELECT
id, sdk_session_id, type, title, subtitle, narrative,
facts, concepts, files_read, files_modified,
created_at, created_at_epoch
FROM observations
WHERE sdk_session_id IN (${placeholders})
ORDER BY created_at_epoch DESC
`).all(...sessionIds) as Observation[];
return observations;
}
/**
* Context Hook Main Logic
*/
@@ -187,7 +164,7 @@ async function contextHook(input?: SessionStartInput, useColors: boolean = false
const allObservations = db.db.prepare(`
SELECT
id, sdk_session_id, type, title, subtitle, narrative,
facts, concepts, files_read, files_modified,
facts, concepts, files_read, files_modified, discovery_tokens,
created_at, created_at_epoch
FROM observations
WHERE project = ?
@@ -239,25 +216,74 @@ async function contextHook(input?: SessionStartInput, useColors: boolean = false
if (timelineObs.length > 0) {
// Legend/Key
if (useColors) {
output.push(`${colors.dim}Legend: 🎯 session-request | 🔴 bugfix | 🟣 feature | 🔄 refactor | ✅ change | 🔵 discovery | 🧠 decision${colors.reset}`);
output.push('');
output.push(`${colors.dim}Legend: 🎯 session-request | 🔴 bugfix | 🟣 feature | 🔄 refactor | ✅ change | 🔵 discovery | ⚖️ decision${colors.reset}`);
} else {
output.push(`**Legend:** 🎯 session-request | 🔴 bugfix | 🟣 feature | 🔄 refactor | ✅ change | 🔵 discovery | 🧠 decision`);
output.push('');
output.push(`**Legend:** 🎯 session-request | 🔴 bugfix | 🟣 feature | 🔄 refactor | ✅ change | 🔵 discovery | ⚖️ decision`);
}
output.push('');
// Progressive Disclosure Usage Instructions
// Column Key
if (useColors) {
output.push(`${colors.dim}💡 Progressive Disclosure: This index shows WHAT exists (titles) and retrieval COST (token counts).${colors.reset}`);
output.push(`${colors.dim} → Use MCP search tools to fetch full observation details on-demand (Layer 2)${colors.reset}`);
output.push(`${colors.dim} → Prefer searching observations over re-reading code for past decisions and learnings${colors.reset}`);
output.push(`${colors.dim} → Critical types (🔴 bugfix, 🧠 decision) often worth fetching immediately${colors.reset}`);
output.push(`${colors.bright}💡 Column Key${colors.reset}`);
output.push(`${colors.dim} Read: Tokens to read this observation (cost to learn it now)${colors.reset}`);
output.push(`${colors.dim} Work: Tokens spent on work that produced this record (🔍 research, 🛠️ building, ⚖️ deciding)${colors.reset}`);
} else {
output.push(`💡 **Column Key**:`);
output.push(`- **Read**: Tokens to read this observation (cost to learn it now)`);
output.push(`- **Work**: Tokens spent on work that produced this record (🔍 research, 🛠️ building, ⚖️ deciding)`);
}
output.push('');
// Context Index Usage Instructions
if (useColors) {
output.push(`${colors.dim}💡 Context Index: This semantic index (titles, types, files, tokens) is usually sufficient to understand past work.${colors.reset}`);
output.push('');
output.push(`${colors.dim}When you need implementation details, rationale, or debugging context:${colors.reset}`);
output.push(`${colors.dim} - Use the mem-search skill to fetch full observations on-demand${colors.reset}`);
output.push(`${colors.dim} - Critical types (🔴 bugfix, ⚖️ decision) often need detailed fetching${colors.reset}`);
output.push(`${colors.dim} - Trust this index over re-reading code for past decisions and learnings${colors.reset}`);
} else {
output.push(`💡 **Context Index:** This semantic index (titles, types, files, tokens) is usually sufficient to understand past work.`);
output.push('');
output.push(`When you need implementation details, rationale, or debugging context:`);
output.push(`- Use the mem-search skill to fetch full observations on-demand`);
output.push(`- Critical types (🔴 bugfix, ⚖️ decision) often need detailed fetching`);
output.push(`- Trust this index over re-reading code for past decisions and learnings`);
}
output.push('');
// Section 1: Aggregate ROI Metrics
const totalObservations = observations.length;
const totalReadTokens = observations.reduce((sum, obs) => {
// Estimate read tokens from observation size
const obsSize = (obs.title?.length || 0) +
(obs.subtitle?.length || 0) +
(obs.narrative?.length || 0) +
JSON.stringify(obs.facts || []).length;
return sum + Math.ceil(obsSize / CHARS_PER_TOKEN_ESTIMATE);
}, 0);
const totalDiscoveryTokens = observations.reduce((sum, obs) => sum + (obs.discovery_tokens || 0), 0);
const savings = totalDiscoveryTokens - totalReadTokens;
const savingsPercent = totalDiscoveryTokens > 0
? Math.round((savings / totalDiscoveryTokens) * 100)
: 0;
// Display Context Economics section
if (useColors) {
output.push(`${colors.bright}${colors.cyan}📊 Context Economics${colors.reset}`);
output.push(`${colors.dim} Loading: ${totalObservations} observations (${totalReadTokens.toLocaleString()} tokens to read)${colors.reset}`);
output.push(`${colors.dim} Work investment: ${totalDiscoveryTokens.toLocaleString()} tokens spent on research, building, and decisions${colors.reset}`);
if (totalDiscoveryTokens > 0) {
output.push(`${colors.green} Your savings: ${savings.toLocaleString()} tokens (${savingsPercent}% reduction from reuse)${colors.reset}`);
}
output.push('');
} else {
output.push(`💡 **Progressive Disclosure:** This index shows WHAT exists (titles) and retrieval COST (token counts).`);
output.push(`- Use MCP search tools to fetch full observation details on-demand (Layer 2)`);
output.push(`- Prefer searching observations over re-reading code for past decisions and learnings`);
output.push(`- Critical types (🔴 bugfix, 🧠 decision) often worth fetching immediately`);
output.push(`📊 **Context Economics**:`);
output.push(`- Loading: ${totalObservations} observations (${totalReadTokens.toLocaleString()} tokens to read)`);
output.push(`- Work investment: ${totalDiscoveryTokens.toLocaleString()} tokens spent on research, building, and decisions`);
if (totalDiscoveryTokens > 0) {
output.push(`- Your savings: ${savings.toLocaleString()} tokens (${savingsPercent}% reduction from reuse)`);
}
output.push('');
}
@@ -380,8 +406,8 @@ async function contextHook(input?: SessionStartInput, useColors: boolean = false
// Table header (markdown only)
if (!useColors) {
output.push(`| ID | Time | T | Title | Tokens |`);
output.push(`|----|------|---|-------|--------|`);
output.push(`| ID | Time | T | Title | Read | Work |`);
output.push(`|----|------|---|-------|------|------|`);
}
currentFile = file;
@@ -389,10 +415,11 @@ async function contextHook(input?: SessionStartInput, useColors: boolean = false
lastTime = '';
}
// Render observation row
let icon = '•';
const time = formatTime(obs.created_at);
const title = obs.title || 'Untitled';
// Map observation type to emoji
// Map observation type to emoji icon
let icon = '•';
switch (obs.type) {
case 'bugfix':
icon = '🔴';
@@ -410,15 +437,40 @@ async function contextHook(input?: SessionStartInput, useColors: boolean = false
icon = '🔵';
break;
case 'decision':
icon = '🧠';
icon = '⚖️';
break;
default:
icon = '•';
}
const time = formatTime(obs.created_at);
const title = obs.title || 'Untitled';
const tokens = estimateTokens(obs.narrative);
// Section 2: Calculate read tokens (estimate from observation size)
const obsSize = (obs.title?.length || 0) +
(obs.subtitle?.length || 0) +
(obs.narrative?.length || 0) +
JSON.stringify(obs.facts || []).length;
const readTokens = Math.ceil(obsSize / CHARS_PER_TOKEN_ESTIMATE);
// Get discovery tokens (handle old observations without this field)
const discoveryTokens = obs.discovery_tokens || 0;
// Map observation type to work emoji
let workEmoji = '🔍'; // default to research/discovery
switch (obs.type) {
case 'discovery':
workEmoji = '🔍'; // research/exploration
break;
case 'change':
case 'feature':
case 'bugfix':
case 'refactor':
workEmoji = '🛠️'; // building/modifying
break;
case 'decision':
workEmoji = '⚖️'; // decision-making
break;
}
const discoveryDisplay = discoveryTokens > 0 ? `${workEmoji} ${discoveryTokens.toLocaleString()}` : '-';
const showTime = time !== lastTime;
const timeDisplay = showTime ? time : '';
@@ -426,10 +478,11 @@ async function contextHook(input?: SessionStartInput, useColors: boolean = false
if (useColors) {
const timePart = showTime ? `${colors.dim}${time}${colors.reset}` : ' '.repeat(time.length);
const tokensPart = tokens > 0 ? `${colors.dim}(~${tokens}t)${colors.reset}` : '';
output.push(` ${colors.dim}#${obs.id}${colors.reset} ${timePart} ${icon} ${title} ${tokensPart}`);
const readPart = readTokens > 0 ? `${colors.dim}(~${readTokens}t)${colors.reset}` : '';
const discoveryPart = discoveryTokens > 0 ? `${colors.dim}(${workEmoji} ${discoveryTokens.toLocaleString()}t)${colors.reset}` : '';
output.push(` ${colors.dim}#${obs.id}${colors.reset} ${timePart} ${icon} ${title} ${readPart} ${discoveryPart}`);
} else {
output.push(`| #${obs.id} | ${timeDisplay || '″'} | ${icon} | ${title} | ~${tokens} |`);
output.push(`| #${obs.id} | ${timeDisplay || '″'} | ${icon} | ${title} | ~${readTokens} | ${discoveryDisplay} |`);
}
}
}
@@ -456,11 +509,15 @@ async function contextHook(input?: SessionStartInput, useColors: boolean = false
output.push(...renderSummaryField('Next Steps', mostRecentSummary.next_steps, colors.magenta, useColors));
}
// Footer with MCP search instructions
if (useColors) {
output.push(`${colors.dim}Use claude-mem MCP search to access records with the given ID${colors.reset}`);
} else {
output.push(`*Use claude-mem MCP search to access records with the given ID*`);
// Footer with token savings message
if (totalDiscoveryTokens > 0 && savings > 0) {
const workTokensK = Math.round(totalDiscoveryTokens / 1000);
output.push('');
if (useColors) {
output.push(`${colors.dim}💰 Access ${workTokensK}k tokens of past research & decisions for just ${totalReadTokens.toLocaleString()}t. Use claude-mem search to access memories by ID instead of re-reading files.${colors.reset}`);
} else {
output.push(`💰 Access ${workTokensK}k tokens of past research & decisions for just ${totalReadTokens.toLocaleString()}t. Use claude-mem search to access memories by ID instead of re-reading files.`);
}
}
}
+1 -1
View File
@@ -52,7 +52,7 @@ try {
"\n\n📝 Claude-Mem Context Loaded\n" +
" ️ Note: This appears as stderr but is informational only\n\n" +
output +
"\n\n💬 New! Connect with Claude-Mem\nhttps://github.com/thedotmack/claude-mem/discussions/110\n" +
"\n\n💬 Feedback & Support\nhttps://github.com/thedotmack/claude-mem/discussions/110\n" +
`\n📺 Watch live in browser http://localhost:${port}/\n`
);
+6 -4
View File
@@ -264,6 +264,7 @@ export class SessionSearch {
const sql = `
SELECT
o.*,
o.discovery_tokens,
observations_fts.rank as rank
FROM observations o
JOIN observations_fts ON o.id = observations_fts.rowid
@@ -326,6 +327,7 @@ export class SessionSearch {
const sql = `
SELECT
s.*,
s.discovery_tokens,
session_summaries_fts.rank as rank
FROM session_summaries s
JOIN session_summaries_fts ON s.id = session_summaries_fts.rowid
@@ -368,7 +370,7 @@ export class SessionSearch {
const orderClause = this.buildOrderClause(orderBy, false);
const sql = `
SELECT o.*
SELECT o.*, o.discovery_tokens
FROM observations o
WHERE ${filterClause}
${orderClause}
@@ -396,7 +398,7 @@ export class SessionSearch {
const orderClause = this.buildOrderClause(orderBy, false);
const observationsSql = `
SELECT o.*
SELECT o.*, o.discovery_tokens
FROM observations o
WHERE ${filterClause}
${orderClause}
@@ -440,7 +442,7 @@ export class SessionSearch {
sessionParams.push(`%${filePath}%`, `%${filePath}%`);
const sessionsSql = `
SELECT s.*
SELECT s.*, s.discovery_tokens
FROM session_summaries s
WHERE ${baseConditions.join(' AND ')}
ORDER BY s.created_at_epoch DESC
@@ -470,7 +472,7 @@ export class SessionSearch {
const orderClause = this.buildOrderClause(orderBy, false);
const sql = `
SELECT o.*
SELECT o.*, o.discovery_tokens
FROM observations o
WHERE ${filterClause}
${orderClause}
+45 -6
View File
@@ -28,6 +28,7 @@ export class SessionStore {
this.addObservationHierarchicalFields();
this.makeObservationsTextNullable();
this.createUserPromptsTable();
this.ensureDiscoveryTokensColumn();
}
/**
@@ -492,6 +493,40 @@ export class SessionStore {
}
}
/**
* Ensure discovery_tokens column exists (migration 7)
*/
private ensureDiscoveryTokensColumn(): void {
try {
// Check if migration already applied
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(7) as {version: number} | undefined;
if (applied) return;
// Check if discovery_tokens column exists in observations table
const observationsInfo = this.db.pragma('table_info(observations)');
const obsHasDiscoveryTokens = (observationsInfo as any[]).some((col: any) => col.name === 'discovery_tokens');
if (!obsHasDiscoveryTokens) {
this.db.exec('ALTER TABLE observations ADD COLUMN discovery_tokens INTEGER DEFAULT 0');
console.error('[SessionStore] Added discovery_tokens column to observations table');
}
// Check if discovery_tokens column exists in session_summaries table
const summariesInfo = this.db.pragma('table_info(session_summaries)');
const sumHasDiscoveryTokens = (summariesInfo as any[]).some((col: any) => col.name === 'discovery_tokens');
if (!sumHasDiscoveryTokens) {
this.db.exec('ALTER TABLE session_summaries ADD COLUMN discovery_tokens INTEGER DEFAULT 0');
console.error('[SessionStore] Added discovery_tokens column to session_summaries table');
}
// Record migration
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(7, new Date().toISOString());
} catch (error: any) {
console.error('[SessionStore] Discovery tokens migration error:', error.message);
}
}
/**
* Get recent session summaries for a project
*/
@@ -1074,7 +1109,8 @@ export class SessionStore {
files_read: string[];
files_modified: string[];
},
promptNumber?: number
promptNumber?: number,
discoveryTokens: number = 0
): { id: number; createdAtEpoch: number } {
const now = new Date();
const nowEpoch = now.getTime();
@@ -1105,8 +1141,8 @@ export class SessionStore {
const stmt = this.db.prepare(`
INSERT INTO observations
(sdk_session_id, project, type, title, subtitle, facts, narrative, concepts,
files_read, files_modified, prompt_number, created_at, created_at_epoch)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
files_read, files_modified, prompt_number, discovery_tokens, created_at, created_at_epoch)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`);
const result = stmt.run(
@@ -1121,6 +1157,7 @@ export class SessionStore {
JSON.stringify(observation.files_read),
JSON.stringify(observation.files_modified),
promptNumber || null,
discoveryTokens,
now.toISOString(),
nowEpoch
);
@@ -1146,7 +1183,8 @@ export class SessionStore {
next_steps: string;
notes: string | null;
},
promptNumber?: number
promptNumber?: number,
discoveryTokens: number = 0
): { id: number; createdAtEpoch: number } {
const now = new Date();
const nowEpoch = now.getTime();
@@ -1177,8 +1215,8 @@ export class SessionStore {
const stmt = this.db.prepare(`
INSERT INTO session_summaries
(sdk_session_id, project, request, investigated, learned, completed,
next_steps, notes, prompt_number, created_at, created_at_epoch)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
next_steps, notes, prompt_number, discovery_tokens, created_at, created_at_epoch)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`);
const result = stmt.run(
@@ -1191,6 +1229,7 @@ export class SessionStore {
summary.next_steps,
summary.notes,
promptNumber || null,
discoveryTokens,
now.toISOString(),
nowEpoch
);
+26 -1
View File
@@ -471,6 +471,30 @@ export const migration006: Migration = {
}
};
/**
* Migration 007 - Add discovery_tokens column for ROI metrics
* Tracks token cost of discovering/creating each observation and summary
*/
export const migration007: Migration = {
version: 7,
up: (db: Database) => {
// Add discovery_tokens to observations table
db.run(`ALTER TABLE observations ADD COLUMN discovery_tokens INTEGER DEFAULT 0`);
// Add discovery_tokens to session_summaries table
db.run(`ALTER TABLE session_summaries ADD COLUMN discovery_tokens INTEGER DEFAULT 0`);
console.log('✅ Added discovery_tokens columns for ROI tracking');
},
down: (db: Database) => {
// Note: SQLite doesn't support DROP COLUMN in all versions
// In production, would need to recreate tables without these columns
console.log('⚠️ Warning: SQLite ALTER TABLE DROP COLUMN not fully supported');
console.log('⚠️ To rollback, manually recreate the observations and session_summaries tables');
}
};
/**
* All migrations in order
*/
@@ -480,5 +504,6 @@ export const migrations: Migration[] = [
migration003,
migration004,
migration005,
migration006
migration006,
migration007
];
+2
View File
@@ -215,6 +215,7 @@ export interface ObservationRow {
files_read: string | null; // JSON array
files_modified: string | null; // JSON array
prompt_number: number | null;
discovery_tokens: number; // ROI metrics: tokens spent discovering this observation
created_at: string;
created_at_epoch: number;
}
@@ -232,6 +233,7 @@ export interface SessionSummaryRow {
files_edited: string | null; // JSON array
notes: string | null;
prompt_number: number | null;
discovery_tokens: number; // ROI metrics: cumulative tokens spent in this session
created_at: string;
created_at_epoch: number;
}
+8 -2
View File
@@ -36,6 +36,7 @@ interface StoredObservation {
files_read: string | null; // JSON
files_modified: string | null; // JSON
prompt_number: number;
discovery_tokens: number; // ROI metrics
created_at: string;
created_at_epoch: number;
}
@@ -51,6 +52,7 @@ interface StoredSummary {
next_steps: string | null;
notes: string | null;
prompt_number: number;
discovery_tokens: number; // ROI metrics
created_at: string;
created_at_epoch: number;
}
@@ -345,7 +347,8 @@ export class ChromaSync {
project: string,
obs: ParsedObservation,
promptNumber: number,
createdAtEpoch: number
createdAtEpoch: number,
discoveryTokens: number = 0
): Promise<void> {
// Convert ParsedObservation to StoredObservation format
const stored: StoredObservation = {
@@ -362,6 +365,7 @@ export class ChromaSync {
files_read: JSON.stringify(obs.files_read),
files_modified: JSON.stringify(obs.files_modified),
prompt_number: promptNumber,
discovery_tokens: discoveryTokens,
created_at: new Date(createdAtEpoch * 1000).toISOString(),
created_at_epoch: createdAtEpoch
};
@@ -387,7 +391,8 @@ export class ChromaSync {
project: string,
summary: ParsedSummary,
promptNumber: number,
createdAtEpoch: number
createdAtEpoch: number,
discoveryTokens: number = 0
): Promise<void> {
// Convert ParsedSummary to StoredSummary format
const stored: StoredSummary = {
@@ -401,6 +406,7 @@ export class ChromaSync {
next_steps: summary.next_steps,
notes: summary.notes,
prompt_number: promptNumber,
discovery_tokens: discoveryTokens,
created_at: new Date(createdAtEpoch * 1000).toISOString(),
created_at_epoch: createdAtEpoch
};
+2
View File
@@ -19,6 +19,8 @@ export interface ActiveSession {
generatorPromise: Promise<void> | null;
lastPromptNumber: number;
startTime: number;
cumulativeInputTokens: number; // Track input tokens for discovery cost
cumulativeOutputTokens: number; // Track output tokens for discovery cost
}
export interface PendingMessage {
+40 -7
View File
@@ -85,6 +85,34 @@ export class SDKAgent {
const responseSize = textContent.length;
// Capture token state BEFORE updating (for delta calculation)
const tokensBeforeResponse = session.cumulativeInputTokens + session.cumulativeOutputTokens;
// Extract and track token usage
const usage = message.message.usage;
if (usage) {
session.cumulativeInputTokens += usage.input_tokens || 0;
session.cumulativeOutputTokens += usage.output_tokens || 0;
// Cache creation counts as discovery, cache read doesn't
if (usage.cache_creation_input_tokens) {
session.cumulativeInputTokens += usage.cache_creation_input_tokens;
}
logger.debug('SDK', 'Token usage captured', {
sessionId: session.sessionDbId,
inputTokens: usage.input_tokens,
outputTokens: usage.output_tokens,
cacheCreation: usage.cache_creation_input_tokens || 0,
cacheRead: usage.cache_read_input_tokens || 0,
cumulativeInput: session.cumulativeInputTokens,
cumulativeOutput: session.cumulativeOutputTokens
});
}
// Calculate discovery tokens (delta for this response only)
const discoveryTokens = (session.cumulativeInputTokens + session.cumulativeOutputTokens) - tokensBeforeResponse;
// Only log non-empty responses (filter out noise)
if (responseSize > 0) {
const truncatedResponse = responseSize > 100
@@ -95,8 +123,8 @@ export class SDKAgent {
promptNumber: session.lastPromptNumber
}, truncatedResponse);
// Parse and process response
await this.processSDKResponse(session, textContent, worker);
// Parse and process response with discovery token delta
await this.processSDKResponse(session, textContent, worker, discoveryTokens);
}
}
@@ -218,8 +246,9 @@ export class SDKAgent {
/**
* Process SDK response text (parse XML, save to database, sync to Chroma)
* @param discoveryTokens - Token cost for discovering this response (delta, not cumulative)
*/
private async processSDKResponse(session: ActiveSession, text: string, worker?: any): Promise<void> {
private async processSDKResponse(session: ActiveSession, text: string, worker: any | undefined, discoveryTokens: number): Promise<void> {
// Parse observations
const observations = parseObservations(text, session.claudeSessionId);
@@ -229,7 +258,8 @@ export class SDKAgent {
session.claudeSessionId,
session.project,
obs,
session.lastPromptNumber
session.lastPromptNumber,
discoveryTokens
);
// Log observation details
@@ -253,7 +283,8 @@ export class SDKAgent {
session.project,
obs,
session.lastPromptNumber,
createdAtEpoch
createdAtEpoch,
discoveryTokens
).then(() => {
const chromaDuration = Date.now() - chromaStart;
logger.debug('CHROMA', 'Observation synced', {
@@ -305,7 +336,8 @@ export class SDKAgent {
session.claudeSessionId,
session.project,
summary,
session.lastPromptNumber
session.lastPromptNumber,
discoveryTokens
);
// Log summary details
@@ -326,7 +358,8 @@ export class SDKAgent {
session.project,
summary,
session.lastPromptNumber,
createdAtEpoch
createdAtEpoch,
discoveryTokens
).then(() => {
const chromaDuration = Date.now() - chromaStart;
logger.debug('CHROMA', 'Summary synced', {
+3 -1
View File
@@ -89,7 +89,9 @@ export class SessionManager {
abortController: new AbortController(),
generatorPromise: null,
lastPromptNumber: promptNumber || this.dbManager.getSessionStore().getPromptCounter(sessionDbId),
startTime: Date.now()
startTime: Date.now(),
cumulativeInputTokens: 0,
cumulativeOutputTokens: 0
};
this.sessions.set(sessionDbId, session);