feat: disable subagent summaries, label subagent observations (#2073)

* feat: disable subagent summaries and label subagent observations

Detect Claude Code subagent hook context via `agent_id`/`agent_type` on
stdin, short-circuit the Stop-hook summary path when present, and thread
the subagent identity end-to-end onto observation rows (new `agent_type`
and `agent_id` columns, migration 010 at version 27). Main-session rows
remain NULL; content-hash dedup is unchanged.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>

* fix: address PR #2073 review feedback

- Narrow summarize subagent guard to agentId only so --agent-started
  main sessions still own their summary (agentType alone is main-session).
- Remove now-dead agentId/agentType spreads from the summarize POST body.
- Always overwrite pendingAgentId/pendingAgentType in SDK/Gemini/OpenRouter
  agents (clears stale subagent identity on main-session messages after
  a subagent message in the same batch).
- Add idx_observations_agent_id index in migration 010 + the mirror
  migration in SessionStore + the runner.
- Replace console.log in migration010 with logger.debug.
- Update summarize test: agentType alone no longer short-circuits.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>

* fix: address CodeRabbit + claude-review iteration 4 feedback

- SessionRoutes.handleSummarizeByClaudeId: narrow worker-side guard to
  agentId only (matches hook-side). agentType alone = --agent main
  session, which still owns its summary.
- ResponseProcessor: wrap storeObservations in try/finally so
  pendingAgentId/Type clear even if storage throws. Prevents stale
  subagent identity from leaking into the next batch on error.
- SessionStore.importObservation + bulk.importObservation: persist
  agent_type/agent_id so backup/import round-trips preserve subagent
  attribution.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>

* polish: claude-review iteration 5 cleanup

- Use ?? not || for nullable subagent fields in PendingMessageStore
  (prevents treating empty string as null).
- Simplify observation.ts body spread — include fields unconditionally;
  JSON.stringify drops undefined anyway.
- Narrow any[] to Array<{ name: string }> in migration010 column checks.
- Add trailing newline to migrations.ts.
- Document in observations/store.ts why the dedup hash intentionally
  excludes agent fields.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>

* polish: claude-review iteration 7 feedback

- claude-code adapter: add 128-char safety cap on agent_id/agent_type
  so a malformed Claude Code payload cannot balloon DB rows. Empty
  strings now also treated as absent.
- migration010: state-aware debug log lists only columns actually
  added; idempotent re-runs log "already present; ensured indexes".
- Add 3 adapter tests covering the length cap boundary and empty-string
  rejection.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>

* perf: skip subagent summary before worker bootstrap

Move the agentId short-circuit above ensureWorkerRunning() so a Stop
hook fired inside a subagent does not trigger worker startup just to
return early. Addresses CodeRabbit nit on summarize.ts:36-47.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>

---------

Co-authored-by: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
Alex Newman
2026-04-19 14:58:01 -07:00
committed by GitHub
parent 306a0b1de9
commit 789efe4234
27 changed files with 1381 additions and 361 deletions
+12 -4
View File
@@ -24,6 +24,9 @@ export interface PersistentPendingMessage {
created_at_epoch: number;
started_processing_at_epoch: number | null;
completed_at_epoch: number | null;
// Claude Code subagent identity — NULL for main-session messages.
agent_type: string | null;
agent_id: string | null;
}
/**
@@ -64,8 +67,9 @@ export class PendingMessageStore {
session_db_id, content_session_id, message_type,
tool_name, tool_input, tool_response, cwd,
last_assistant_message,
prompt_number, status, retry_count, created_at_epoch
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, 'pending', 0, ?)
prompt_number, status, retry_count, created_at_epoch,
agent_type, agent_id
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, 'pending', 0, ?, ?, ?)
`);
const result = stmt.run(
@@ -78,7 +82,9 @@ export class PendingMessageStore {
message.cwd || null,
message.last_assistant_message || null,
message.prompt_number || null,
now
now,
message.agentType ?? null,
message.agentId ?? null
);
return result.lastInsertRowid as number;
@@ -496,7 +502,9 @@ export class PendingMessageStore {
tool_response: persistent.tool_response ? JSON.parse(persistent.tool_response) : undefined,
prompt_number: persistent.prompt_number || undefined,
cwd: persistent.cwd || undefined,
last_assistant_message: persistent.last_assistant_message || undefined
last_assistant_message: persistent.last_assistant_message || undefined,
agentId: persistent.agent_id ?? undefined,
agentType: persistent.agent_type ?? undefined
};
}
}
+64 -8
View File
@@ -66,6 +66,7 @@ export class SessionStore {
this.addSessionPlatformSourceColumn();
this.addObservationModelColumns();
this.ensureMergedIntoProjectColumns();
this.addObservationSubagentColumns();
}
/**
@@ -975,6 +976,44 @@ export class SessionStore {
);
}
/**
* Add agent_type and agent_id columns to observations and pending_messages (migration 27).
* Mirrors MigrationRunner.addObservationSubagentColumns so bundled artifacts that embed
* SessionStore (e.g. context-generator.cjs) stay schema-consistent.
*/
private addObservationSubagentColumns(): void {
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(27) as SchemaVersion | undefined;
const obsCols = this.db.query('PRAGMA table_info(observations)').all() as TableColumnInfo[];
const obsHasAgentType = obsCols.some(col => col.name === 'agent_type');
const obsHasAgentId = obsCols.some(col => col.name === 'agent_id');
if (!obsHasAgentType) {
this.db.run('ALTER TABLE observations ADD COLUMN agent_type TEXT');
}
if (!obsHasAgentId) {
this.db.run('ALTER TABLE observations ADD COLUMN agent_id TEXT');
}
this.db.run('CREATE INDEX IF NOT EXISTS idx_observations_agent_type ON observations(agent_type)');
this.db.run('CREATE INDEX IF NOT EXISTS idx_observations_agent_id ON observations(agent_id)');
const pendingCols = this.db.query('PRAGMA table_info(pending_messages)').all() as TableColumnInfo[];
if (pendingCols.length > 0) {
const pendingHasAgentType = pendingCols.some(col => col.name === 'agent_type');
const pendingHasAgentId = pendingCols.some(col => col.name === 'agent_id');
if (!pendingHasAgentType) {
this.db.run('ALTER TABLE pending_messages ADD COLUMN agent_type TEXT');
}
if (!pendingHasAgentId) {
this.db.run('ALTER TABLE pending_messages ADD COLUMN agent_id TEXT');
}
}
if (!applied) {
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(27, new Date().toISOString());
}
}
/**
* Update the memory session ID for a session
* Called by SDKAgent when it captures the session ID from the first SDK message
@@ -1734,6 +1773,8 @@ export class SessionStore {
concepts: string[];
files_read: string[];
files_modified: string[];
agent_type?: string | null;
agent_id?: string | null;
},
promptNumber?: number,
discoveryTokens: number = 0,
@@ -1754,9 +1795,9 @@ export class SessionStore {
const stmt = this.db.prepare(`
INSERT INTO observations
(memory_session_id, project, type, title, subtitle, facts, narrative, concepts,
files_read, files_modified, prompt_number, discovery_tokens, content_hash, created_at, created_at_epoch,
files_read, files_modified, prompt_number, discovery_tokens, agent_type, agent_id, content_hash, created_at, created_at_epoch,
generated_by_model)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`);
const result = stmt.run(
@@ -1772,6 +1813,8 @@ export class SessionStore {
JSON.stringify(observation.files_modified),
promptNumber || null,
discoveryTokens,
observation.agent_type ?? null,
observation.agent_id ?? null,
contentHash,
timestampIso,
timestampEpoch,
@@ -1863,6 +1906,8 @@ export class SessionStore {
concepts: string[];
files_read: string[];
files_modified: string[];
agent_type?: string | null;
agent_id?: string | null;
}>,
summary: {
request: string;
@@ -1889,9 +1934,9 @@ export class SessionStore {
const obsStmt = this.db.prepare(`
INSERT INTO observations
(memory_session_id, project, type, title, subtitle, facts, narrative, concepts,
files_read, files_modified, prompt_number, discovery_tokens, content_hash, created_at, created_at_epoch,
files_read, files_modified, prompt_number, discovery_tokens, agent_type, agent_id, content_hash, created_at, created_at_epoch,
generated_by_model)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`);
for (const observation of observations) {
@@ -1916,6 +1961,8 @@ export class SessionStore {
JSON.stringify(observation.files_modified),
promptNumber || null,
discoveryTokens,
observation.agent_type ?? null,
observation.agent_id ?? null,
contentHash,
timestampIso,
timestampEpoch,
@@ -1993,6 +2040,8 @@ export class SessionStore {
concepts: string[];
files_read: string[];
files_modified: string[];
agent_type?: string | null;
agent_id?: string | null;
}>,
summary: {
request: string;
@@ -2021,9 +2070,9 @@ export class SessionStore {
const obsStmt = this.db.prepare(`
INSERT INTO observations
(memory_session_id, project, type, title, subtitle, facts, narrative, concepts,
files_read, files_modified, prompt_number, discovery_tokens, content_hash, created_at, created_at_epoch,
files_read, files_modified, prompt_number, discovery_tokens, agent_type, agent_id, content_hash, created_at, created_at_epoch,
generated_by_model)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`);
for (const observation of observations) {
@@ -2048,6 +2097,8 @@ export class SessionStore {
JSON.stringify(observation.files_modified),
promptNumber || null,
discoveryTokens,
observation.agent_type ?? null,
observation.agent_id ?? null,
contentHash,
timestampIso,
timestampEpoch,
@@ -2608,6 +2659,8 @@ export class SessionStore {
discovery_tokens: number;
created_at: string;
created_at_epoch: number;
agent_type?: string | null;
agent_id?: string | null;
}): { imported: boolean; id: number } {
// Check if observation already exists
const existing = this.db.prepare(`
@@ -2623,8 +2676,9 @@ export class SessionStore {
INSERT INTO observations (
memory_session_id, project, text, type, title, subtitle,
facts, narrative, concepts, files_read, files_modified,
prompt_number, discovery_tokens, created_at, created_at_epoch
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
prompt_number, discovery_tokens, agent_type, agent_id,
created_at, created_at_epoch
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`);
const result = stmt.run(
@@ -2641,6 +2695,8 @@ export class SessionStore {
obs.files_modified,
obs.prompt_number,
obs.discovery_tokens || 0,
obs.agent_type ?? null,
obs.agent_id ?? null,
obs.created_at,
obs.created_at_epoch
);
+7 -2
View File
@@ -141,6 +141,8 @@ export function importObservation(
discovery_tokens: number;
created_at: string;
created_at_epoch: number;
agent_type?: string | null;
agent_id?: string | null;
}
): ImportResult {
// Check if observation already exists
@@ -163,8 +165,9 @@ export function importObservation(
INSERT INTO observations (
memory_session_id, project, text, type, title, subtitle,
facts, narrative, concepts, files_read, files_modified,
prompt_number, discovery_tokens, created_at, created_at_epoch
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
prompt_number, discovery_tokens, agent_type, agent_id,
created_at, created_at_epoch
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`);
const result = stmt.run(
@@ -181,6 +184,8 @@ export function importObservation(
obs.files_modified,
obs.prompt_number,
obs.discovery_tokens || 0,
obs.agent_type ?? null,
obs.agent_id ?? null,
obs.created_at,
obs.created_at_epoch
);
+59 -2
View File
@@ -1,5 +1,6 @@
import { Database } from 'bun:sqlite';
import { Migration } from './Database.js';
import { logger } from '../../utils/logger.js';
// Re-export MigrationRunner for SessionStore migration extraction
export { MigrationRunner } from './migrations/runner.js';
@@ -572,6 +573,61 @@ export const migration009: Migration = {
}
};
/**
* Migration 010: Label observations (and their queue rows) with the subagent identity.
*
* Claude Code hooks that fire inside a subagent carry agent_id and agent_type on the
* stdin payload. These flow hook → worker → pending_messages → SDK storage so that
* observation rows can be attributed to the originating subagent. Main-session rows
* keep NULL for both columns.
*/
export const migration010: Migration = {
version: 27,
up: (db: Database) => {
const added: string[] = [];
const obsColumns = db.prepare('PRAGMA table_info(observations)').all() as Array<{ name: string }>;
const obsHasAgentType = obsColumns.some(c => c.name === 'agent_type');
const obsHasAgentId = obsColumns.some(c => c.name === 'agent_id');
if (!obsHasAgentType) {
db.run('ALTER TABLE observations ADD COLUMN agent_type TEXT');
added.push('observations.agent_type');
}
if (!obsHasAgentId) {
db.run('ALTER TABLE observations ADD COLUMN agent_id TEXT');
added.push('observations.agent_id');
}
db.run('CREATE INDEX IF NOT EXISTS idx_observations_agent_type ON observations(agent_type)');
db.run('CREATE INDEX IF NOT EXISTS idx_observations_agent_id ON observations(agent_id)');
// Also thread the same fields through the pending_messages queue so the label
// survives worker restarts between enqueue and SDK-agent processing.
const pendingColumns = db.prepare('PRAGMA table_info(pending_messages)').all() as Array<{ name: string }>;
if (pendingColumns.length > 0) {
const pendingHasAgentType = pendingColumns.some(c => c.name === 'agent_type');
const pendingHasAgentId = pendingColumns.some(c => c.name === 'agent_id');
if (!pendingHasAgentType) {
db.run('ALTER TABLE pending_messages ADD COLUMN agent_type TEXT');
added.push('pending_messages.agent_type');
}
if (!pendingHasAgentId) {
db.run('ALTER TABLE pending_messages ADD COLUMN agent_id TEXT');
added.push('pending_messages.agent_id');
}
}
logger.debug(
'DB',
added.length > 0
? `[migration010] Added columns: ${added.join(', ')}`
: '[migration010] Subagent identity columns already present; ensured indexes'
);
},
down: (_db: Database) => {
// SQLite DROP COLUMN not fully supported; no-op
}
};
/**
* All migrations in order
*/
@@ -584,5 +640,6 @@ export const migrations: Migration[] = [
migration006,
migration007,
migration008,
migration009
];
migration009,
migration010
];
+48
View File
@@ -38,6 +38,7 @@ export class MigrationRunner {
this.createObservationFeedbackTable();
this.addSessionPlatformSourceColumn();
this.ensureMergedIntoProjectColumns();
this.addObservationSubagentColumns();
}
/**
@@ -952,4 +953,51 @@ export class MigrationRunner {
'CREATE INDEX IF NOT EXISTS idx_summaries_merged_into ON session_summaries(merged_into_project)'
);
}
/**
* Add agent_type and agent_id columns to observations and pending_messages (migration 27).
*
* Labels observation rows with the originating Claude Code subagent identity so
* downstream queries can distinguish main-session work from subagent work.
* Main-session rows keep NULL for both columns.
*
* Also threads the same columns through pending_messages so the label survives
* between enqueue (hook) and SDK-agent processing (which re-inserts into observations).
*/
private addObservationSubagentColumns(): void {
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(27) as SchemaVersion | undefined;
const obsCols = this.db.query('PRAGMA table_info(observations)').all() as TableColumnInfo[];
const obsHasAgentType = obsCols.some(c => c.name === 'agent_type');
const obsHasAgentId = obsCols.some(c => c.name === 'agent_id');
if (!obsHasAgentType) {
this.db.run('ALTER TABLE observations ADD COLUMN agent_type TEXT');
logger.debug('DB', 'Added agent_type column to observations table');
}
if (!obsHasAgentId) {
this.db.run('ALTER TABLE observations ADD COLUMN agent_id TEXT');
logger.debug('DB', 'Added agent_id column to observations table');
}
this.db.run('CREATE INDEX IF NOT EXISTS idx_observations_agent_type ON observations(agent_type)');
this.db.run('CREATE INDEX IF NOT EXISTS idx_observations_agent_id ON observations(agent_id)');
const pendingCols = this.db.query('PRAGMA table_info(pending_messages)').all() as TableColumnInfo[];
if (pendingCols.length > 0) {
const pendingHasAgentType = pendingCols.some(c => c.name === 'agent_type');
const pendingHasAgentId = pendingCols.some(c => c.name === 'agent_id');
if (!pendingHasAgentType) {
this.db.run('ALTER TABLE pending_messages ADD COLUMN agent_type TEXT');
logger.debug('DB', 'Added agent_type column to pending_messages table');
}
if (!pendingHasAgentId) {
this.db.run('ALTER TABLE pending_messages ADD COLUMN agent_id TEXT');
logger.debug('DB', 'Added agent_id column to pending_messages table');
}
}
if (!applied) {
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(27, new Date().toISOString());
}
}
}
+6 -2
View File
@@ -15,6 +15,8 @@ const DEDUP_WINDOW_MS = 30_000;
/**
* Compute a short content hash for deduplication.
* Uses (memory_session_id, title, narrative) as the semantic identity of an observation.
* Subagent fields (agent_type, agent_id) are intentionally excluded so the same work
* described once by a subagent and once by its parent deduplicates across contexts.
*/
export function computeObservationContentHash(
memorySessionId: string,
@@ -75,8 +77,8 @@ export function storeObservation(
const stmt = db.prepare(`
INSERT INTO observations
(memory_session_id, project, type, title, subtitle, facts, narrative, concepts,
files_read, files_modified, prompt_number, discovery_tokens, content_hash, created_at, created_at_epoch)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
files_read, files_modified, prompt_number, discovery_tokens, agent_type, agent_id, content_hash, created_at, created_at_epoch)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`);
const result = stmt.run(
@@ -92,6 +94,8 @@ export function storeObservation(
JSON.stringify(observation.files_modified),
promptNumber || null,
discoveryTokens,
observation.agent_type ?? null,
observation.agent_id ?? null,
contentHash,
timestampIso,
timestampEpoch
@@ -16,6 +16,9 @@ export interface ObservationInput {
concepts: string[];
files_read: string[];
files_modified: string[];
// Claude Code subagent identity — NULL for main-session rows.
agent_type?: string | null;
agent_id?: string | null;
}
/**
+8 -4
View File
@@ -68,8 +68,8 @@ export function storeObservationsAndMarkComplete(
const obsStmt = db.prepare(`
INSERT INTO observations
(memory_session_id, project, type, title, subtitle, facts, narrative, concepts,
files_read, files_modified, prompt_number, discovery_tokens, content_hash, created_at, created_at_epoch)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
files_read, files_modified, prompt_number, discovery_tokens, agent_type, agent_id, content_hash, created_at, created_at_epoch)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`);
for (const observation of observations) {
@@ -93,6 +93,8 @@ export function storeObservationsAndMarkComplete(
JSON.stringify(observation.files_modified),
promptNumber || null,
discoveryTokens,
observation.agent_type ?? null,
observation.agent_id ?? null,
contentHash,
timestampIso,
timestampEpoch
@@ -187,8 +189,8 @@ export function storeObservations(
const obsStmt = db.prepare(`
INSERT INTO observations
(memory_session_id, project, type, title, subtitle, facts, narrative, concepts,
files_read, files_modified, prompt_number, discovery_tokens, content_hash, created_at, created_at_epoch)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
files_read, files_modified, prompt_number, discovery_tokens, agent_type, agent_id, content_hash, created_at, created_at_epoch)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`);
for (const observation of observations) {
@@ -212,6 +214,8 @@ export function storeObservations(
JSON.stringify(observation.files_modified),
promptNumber || null,
discoveryTokens,
observation.agent_type ?? null,
observation.agent_id ?? null,
contentHash,
timestampIso,
timestampEpoch