Merge branch 'pr-1602' into integration/validation-batch

This commit is contained in:
Alex Newman
2026-04-06 14:19:02 -07:00
6 changed files with 73 additions and 22 deletions
+21 -12
View File
@@ -1508,7 +1508,8 @@ export class SessionStore {
}, },
promptNumber?: number, promptNumber?: number,
discoveryTokens: number = 0, discoveryTokens: number = 0,
overrideTimestampEpoch?: number overrideTimestampEpoch?: number,
generatedByModel?: string
): { id: number; createdAtEpoch: number } { ): { id: number; createdAtEpoch: number } {
// Use override timestamp if provided (for processing backlog messages with original timestamps) // Use override timestamp if provided (for processing backlog messages with original timestamps)
const timestampEpoch = overrideTimestampEpoch ?? Date.now(); const timestampEpoch = overrideTimestampEpoch ?? Date.now();
@@ -1524,8 +1525,9 @@ export class SessionStore {
const stmt = this.db.prepare(` const stmt = this.db.prepare(`
INSERT INTO observations INSERT INTO observations
(memory_session_id, project, type, title, subtitle, facts, narrative, concepts, (memory_session_id, project, type, title, subtitle, facts, narrative, concepts,
files_read, files_modified, prompt_number, discovery_tokens, content_hash, created_at, created_at_epoch) files_read, files_modified, prompt_number, discovery_tokens, content_hash, created_at, created_at_epoch,
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) generated_by_model)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`); `);
const result = stmt.run( const result = stmt.run(
@@ -1543,7 +1545,8 @@ export class SessionStore {
discoveryTokens, discoveryTokens,
contentHash, contentHash,
timestampIso, timestampIso,
timestampEpoch timestampEpoch,
generatedByModel || null
); );
return { return {
@@ -1642,7 +1645,8 @@ export class SessionStore {
} | null, } | null,
promptNumber?: number, promptNumber?: number,
discoveryTokens: number = 0, discoveryTokens: number = 0,
overrideTimestampEpoch?: number overrideTimestampEpoch?: number,
generatedByModel?: string
): { observationIds: number[]; summaryId: number | null; createdAtEpoch: number } { ): { observationIds: number[]; summaryId: number | null; createdAtEpoch: number } {
// Use override timestamp if provided // Use override timestamp if provided
const timestampEpoch = overrideTimestampEpoch ?? Date.now(); const timestampEpoch = overrideTimestampEpoch ?? Date.now();
@@ -1656,8 +1660,9 @@ export class SessionStore {
const obsStmt = this.db.prepare(` const obsStmt = this.db.prepare(`
INSERT INTO observations INSERT INTO observations
(memory_session_id, project, type, title, subtitle, facts, narrative, concepts, (memory_session_id, project, type, title, subtitle, facts, narrative, concepts,
files_read, files_modified, prompt_number, discovery_tokens, content_hash, created_at, created_at_epoch) files_read, files_modified, prompt_number, discovery_tokens, content_hash, created_at, created_at_epoch,
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) generated_by_model)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`); `);
for (const observation of observations) { for (const observation of observations) {
@@ -1684,7 +1689,8 @@ export class SessionStore {
discoveryTokens, discoveryTokens,
contentHash, contentHash,
timestampIso, timestampIso,
timestampEpoch timestampEpoch,
generatedByModel || null
); );
observationIds.push(Number(result.lastInsertRowid)); observationIds.push(Number(result.lastInsertRowid));
} }
@@ -1771,7 +1777,8 @@ export class SessionStore {
_pendingStore: PendingMessageStore, _pendingStore: PendingMessageStore,
promptNumber?: number, promptNumber?: number,
discoveryTokens: number = 0, discoveryTokens: number = 0,
overrideTimestampEpoch?: number overrideTimestampEpoch?: number,
generatedByModel?: string
): { observationIds: number[]; summaryId?: number; createdAtEpoch: number } { ): { observationIds: number[]; summaryId?: number; createdAtEpoch: number } {
// Use override timestamp if provided // Use override timestamp if provided
const timestampEpoch = overrideTimestampEpoch ?? Date.now(); const timestampEpoch = overrideTimestampEpoch ?? Date.now();
@@ -1785,8 +1792,9 @@ export class SessionStore {
const obsStmt = this.db.prepare(` const obsStmt = this.db.prepare(`
INSERT INTO observations INSERT INTO observations
(memory_session_id, project, type, title, subtitle, facts, narrative, concepts, (memory_session_id, project, type, title, subtitle, facts, narrative, concepts,
files_read, files_modified, prompt_number, discovery_tokens, content_hash, created_at, created_at_epoch) files_read, files_modified, prompt_number, discovery_tokens, content_hash, created_at, created_at_epoch,
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) generated_by_model)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`); `);
for (const observation of observations) { for (const observation of observations) {
@@ -1813,7 +1821,8 @@ export class SessionStore {
discoveryTokens, discoveryTokens,
contentHash, contentHash,
timestampIso, timestampIso,
timestampEpoch timestampEpoch,
generatedByModel || null
); );
observationIds.push(Number(result.lastInsertRowid)); observationIds.push(Number(result.lastInsertRowid));
} }
+33 -1
View File
@@ -541,6 +541,37 @@ export const migration008: Migration = {
} }
}; };
/**
* Migration 009: Add missing columns to observations table
*
* The generated_by_model column tracks which model generated each observation
* (required for model selection optimization via Thompson Sampling).
* The relevance_count column tracks how many times an observation was reused
* (incremented by the feedback recording pipeline).
*
* Both columns may already exist in databases created by the compiled binary
* (v10.6.3) but are missing from the migration source. This migration
* conditionally adds them.
*/
export const migration009: Migration = {
version: 26,
up: (db: Database) => {
const columns = db.prepare('PRAGMA table_info(observations)').all() as any[];
const hasGeneratedByModel = columns.some((c: any) => c.name === 'generated_by_model');
const hasRelevanceCount = columns.some((c: any) => c.name === 'relevance_count');
if (!hasGeneratedByModel) {
db.run('ALTER TABLE observations ADD COLUMN generated_by_model TEXT');
}
if (!hasRelevanceCount) {
db.run('ALTER TABLE observations ADD COLUMN relevance_count INTEGER DEFAULT 0');
}
},
down: (_db: Database) => {
// SQLite does not support DROP COLUMN in older versions; no-op
}
};
/** /**
* All migrations in order * All migrations in order
*/ */
@@ -552,5 +583,6 @@ export const migrations: Migration[] = [
migration005, migration005,
migration006, migration006,
migration007, migration007,
migration008 migration008,
migration009
]; ];
+7 -3
View File
@@ -175,7 +175,9 @@ export class GeminiAgent {
worker, worker,
tokensUsed, tokensUsed,
null, null,
'Gemini' 'Gemini',
undefined,
model
); );
} else { } else {
logger.error('SDK', 'Empty Gemini init response - session may lack context', { logger.error('SDK', 'Empty Gemini init response - session may lack context', {
@@ -248,7 +250,8 @@ export class GeminiAgent {
tokensUsed, tokensUsed,
originalTimestamp, originalTimestamp,
'Gemini', 'Gemini',
lastCwd lastCwd,
model
); );
} else { } else {
logger.warn('SDK', 'Empty Gemini observation response, skipping processing to preserve message', { logger.warn('SDK', 'Empty Gemini observation response, skipping processing to preserve message', {
@@ -298,7 +301,8 @@ export class GeminiAgent {
tokensUsed, tokensUsed,
originalTimestamp, originalTimestamp,
'Gemini', 'Gemini',
lastCwd lastCwd,
model
); );
} else { } else {
logger.warn('SDK', 'Empty Gemini summary response, skipping processing to preserve message', { logger.warn('SDK', 'Empty Gemini summary response, skipping processing to preserve message', {
+6 -3
View File
@@ -131,7 +131,8 @@ export class OpenRouterAgent {
tokensUsed, tokensUsed,
null, null,
'OpenRouter', 'OpenRouter',
undefined // No lastCwd yet - before message processing undefined, // No lastCwd yet - before message processing
model
); );
} else { } else {
logger.error('SDK', 'Empty OpenRouter init response - session may lack context', { logger.error('SDK', 'Empty OpenRouter init response - session may lack context', {
@@ -202,7 +203,8 @@ export class OpenRouterAgent {
tokensUsed, tokensUsed,
originalTimestamp, originalTimestamp,
'OpenRouter', 'OpenRouter',
lastCwd lastCwd,
model
); );
} else if (message.type === 'summarize') { } else if (message.type === 'summarize') {
@@ -244,7 +246,8 @@ export class OpenRouterAgent {
tokensUsed, tokensUsed,
originalTimestamp, originalTimestamp,
'OpenRouter', 'OpenRouter',
lastCwd lastCwd,
model
); );
} }
} }
+2 -1
View File
@@ -270,7 +270,8 @@ export class SDKAgent {
discoveryTokens, discoveryTokens,
originalTimestamp, originalTimestamp,
'SDK', 'SDK',
cwdTracker.lastCwd cwdTracker.lastCwd,
modelId
); );
} }
@@ -54,7 +54,8 @@ export async function processAgentResponse(
discoveryTokens: number, discoveryTokens: number,
originalTimestamp: number | null, originalTimestamp: number | null,
agentName: string, agentName: string,
projectRoot?: string projectRoot?: string,
modelId?: string
): Promise<void> { ): Promise<void> {
// Track generator activity for stale detection (Issue #1099) // Track generator activity for stale detection (Issue #1099)
session.lastGeneratorActivity = Date.now(); session.lastGeneratorActivity = Date.now();
@@ -115,7 +116,8 @@ export async function processAgentResponse(
summaryForStore, summaryForStore,
session.lastPromptNumber, session.lastPromptNumber,
discoveryTokens, discoveryTokens,
originalTimestamp ?? undefined originalTimestamp ?? undefined,
modelId
); );
// Log storage result with IDs for end-to-end traceability // Log storage result with IDs for end-to-end traceability