fix: enhance title sanitization per PR #1641 review (round 4)

Collapse multiple whitespace, trim, and increase max length to 160 chars
for observation titles in file-context deny reason.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Alex Newman
2026-04-07 14:18:22 -07:00
parent 753a993647
commit a0e895b53b
3 changed files with 3 additions and 3 deletions
+1 -1
View File
@@ -942,7 +942,7 @@ View Observations Live @ http://localhost:${i}`:void 0;return{hookSpecificOutput
`+String.fromCodePoint(128172)+` Community https://discord.gg/J4wttp9vDu
`+String.fromCodePoint(128250)+` Watch live in browser http://localhost:${r}/
`)}catch{}return{exitCode:it.SUCCESS}}}});function qxe(t){return t.toLowerCase().replace(" am","a").replace(" pm","p")}function Hxe(t){return new Date(t).toLocaleString("en-US",{hour:"numeric",minute:"2-digit",hour12:!0})}function Zxe(t){return new Date(t).toLocaleString("en-US",{month:"short",day:"numeric",year:"numeric"})}function Bxe(t,e,r){let n=new Set,i=[];for(let o of t){let a=o.memory_session_id??`no-session-${o.id}`;n.has(a)||(n.add(a),i.push(o))}let s=i.map(o=>{let a=Ca(o.files_read),c=Ca(o.files_modified),u=a.length+c.length,l=e.replace(/\\/g,"/"),d=c.some(f=>f.replace(/\\/g,"/")===l),p=0;return d&&(p+=2),u<=3?p+=2:u<=8&&(p+=1),{obs:o,specificityScore:p}});return s.sort((o,a)=>a.specificityScore-o.specificityScore),s.slice(0,r).map(o=>o.obs)}function Gxe(t,e){let r=e.replace(/\\/g,"\\\\").replace(/"/g,'\\"').replace(/\n/g,"\\n"),n=new Map;for(let l of t){let d=Zxe(l.created_at_epoch);n.has(d)||n.set(d,[]),n.get(d).push(l)}let i=Array.from(n.entries()).sort((l,d)=>{let p=Math.min(...l[1].map(m=>m.created_at_epoch)),f=Math.min(...d[1].map(m=>m.created_at_epoch));return p-f}),s=new Date,o=s.toLocaleDateString("en-CA"),a=s.toLocaleTimeString("en-US",{hour:"numeric",minute:"2-digit",hour12:!0}).toLowerCase().replace(" ",""),c=s.toLocaleTimeString("en-US",{timeZoneName:"short"}).split(" ").pop(),u=[`Current: ${o} ${a} ${c}`,"This file has prior observations. Only line 1 was read to save tokens.","- **Already know enough?** The timeline below may be all you need (semantic priming).","- **Need details?** get_observations([IDs]) \u2014 ~300 tokens each.","- **Need full file?** Read again with offset/limit for the section you need.",`- **Need to edit?** Edit works \u2014 the file is registered as read. Use smart_outline("${r}") for line numbers.`];for(let[l,d]of i){let p=[...d].sort((f,m)=>f.created_at_epoch-m.created_at_epoch);u.push(`### ${l}`);for(let f of p){let m=(f.title||"Untitled").replace(/[\r\n\t]/g," ").slice(0,120),h=Fxe[f.type]||"\u2753",g=qxe(Hxe(f.created_at_epoch));u.push(`${f.id} ${g} ${h} ${m}`)}}return u.join(`
`)}catch{}return{exitCode:it.SUCCESS}}}});function qxe(t){return t.toLowerCase().replace(" am","a").replace(" pm","p")}function Hxe(t){return new Date(t).toLocaleString("en-US",{hour:"numeric",minute:"2-digit",hour12:!0})}function Zxe(t){return new Date(t).toLocaleString("en-US",{month:"short",day:"numeric",year:"numeric"})}function Bxe(t,e,r){let n=new Set,i=[];for(let o of t){let a=o.memory_session_id??`no-session-${o.id}`;n.has(a)||(n.add(a),i.push(o))}let s=i.map(o=>{let a=Ca(o.files_read),c=Ca(o.files_modified),u=a.length+c.length,l=e.replace(/\\/g,"/"),d=c.some(f=>f.replace(/\\/g,"/")===l),p=0;return d&&(p+=2),u<=3?p+=2:u<=8&&(p+=1),{obs:o,specificityScore:p}});return s.sort((o,a)=>a.specificityScore-o.specificityScore),s.slice(0,r).map(o=>o.obs)}function Gxe(t,e){let r=e.replace(/\\/g,"\\\\").replace(/"/g,'\\"').replace(/\n/g,"\\n"),n=new Map;for(let l of t){let d=Zxe(l.created_at_epoch);n.has(d)||n.set(d,[]),n.get(d).push(l)}let i=Array.from(n.entries()).sort((l,d)=>{let p=Math.min(...l[1].map(m=>m.created_at_epoch)),f=Math.min(...d[1].map(m=>m.created_at_epoch));return p-f}),s=new Date,o=s.toLocaleDateString("en-CA"),a=s.toLocaleTimeString("en-US",{hour:"numeric",minute:"2-digit",hour12:!0}).toLowerCase().replace(" ",""),c=s.toLocaleTimeString("en-US",{timeZoneName:"short"}).split(" ").pop(),u=[`Current: ${o} ${a} ${c}`,"This file has prior observations. Only line 1 was read to save tokens.","- **Already know enough?** The timeline below may be all you need (semantic priming).","- **Need details?** get_observations([IDs]) \u2014 ~300 tokens each.","- **Need full file?** Read again with offset/limit for the section you need.",`- **Need to edit?** Edit works \u2014 the file is registered as read. Use smart_outline("${r}") for line numbers.`];for(let[l,d]of i){let p=[...d].sort((f,m)=>f.created_at_epoch-m.created_at_epoch);u.push(`### ${l}`);for(let f of p){let m=(f.title||"Untitled").replace(/[\r\n\t]+/g," ").replace(/\s+/g," ").trim().slice(0,160),h=Fxe[f.type]||"\u2753",g=qxe(Hxe(f.created_at_epoch));u.push(`${f.id} ${g} ${h} ${m}`)}}return u.join(`
`)}var Y5,Va,zxe,Lxe,Uxe,Fxe,FR,qR=Ee(()=>{"use strict";fr();te();Fs();Y5=require("fs"),Va=Pe(require("path"),1);y_();tr();It();Za();zxe=1500,Lxe=40,Uxe=15,Fxe={decision:"\u2696\uFE0F",bugfix:"\u{1F534}",feature:"\u{1F7E3}",refactor:"\u{1F504}",discovery:"\u{1F535}",change:"\u2705"};FR={async execute(t){let r=t.toolInput?.file_path;if(!r)return{continue:!0,suppressOutput:!0};try{let s=Va.default.isAbsolute(r)?r:Va.default.resolve(t.cwd||process.cwd(),r);if((0,Y5.statSync)(s).size<zxe)return{continue:!0,suppressOutput:!0}}catch(s){if(s.code==="ENOENT")return{continue:!0,suppressOutput:!0}}let n=ye.loadFromFile(vt);if(t.cwd&&fl(t.cwd,n.CLAUDE_MEM_EXCLUDED_PROJECTS))return _.debug("HOOK","Project excluded from tracking, skipping file context",{cwd:t.cwd}),{continue:!0,suppressOutput:!0};if(!await sr())return{continue:!0,suppressOutput:!0};try{let s=pl(t.cwd),o=t.cwd||process.cwd(),a=Va.default.isAbsolute(r)?r:Va.default.resolve(o,r),c=Va.default.relative(o,a).split(Va.default.sep).join("/"),u=new URLSearchParams({path:c});s.allProjects.length>0&&u.set("projects",s.allProjects.join(",")),u.set("limit",String(Lxe));let l=await Qe(`/api/observations/by-file?${u.toString()}`,{method:"GET"});if(!l.ok)return _.warn("HOOK","File context query failed, skipping",{status:l.status,filePath:r}),{continue:!0,suppressOutput:!0};let d=await l.json();if(!d.observations||d.observations.length===0)return{continue:!0,suppressOutput:!0};let p=Bxe(d.observations,c,Uxe);return p.length===0?{continue:!0,suppressOutput:!0}:{hookSpecificOutput:{hookEventName:"PreToolUse",additionalContext:Gxe(p,r),permissionDecision:"allow",updatedInput:{file_path:r,limit:1}}}}catch(s){return _.warn("HOOK","File context fetch error, skipping",{error:s instanceof Error?s.message:String(s)}),{continue:!0,suppressOutput:!0}}}}});function Q5(t){let e=Wxe[t];return e||(_.warn("HOOK",`Unknown event type: ${t}, returning no-op`),{async execute(){return{continue:!0,suppressOutput:!0,exitCode:it.SUCCESS}}})}var Wxe,e3=Ee(()=>{"use strict";Pn();te();DR();__();b_();zR();UR();S_();qR();x_();DR();__();b_();zR();UR();S_();qR();x_();Wxe={context:MR,"session-init":nm,observation:im,summarize:jR,"session-complete":om,"user-message":LR,"file-edit":sm,"file-context":FR}});var r3={};Fn(r3,{hookCommand:()=>Vxe,isWorkerUnavailableError:()=>t3});function t3(t){let e=t instanceof Error?t.message:String(t),r=e.toLowerCase();return["econnrefused","econnreset","epipe","etimedout","enotfound","econnaborted","enetunreach","ehostunreach","fetch failed","unable to connect","socket hang up"].some(i=>r.includes(i))||r.includes("timed out")||r.includes("timeout")||/failed:\s*5\d{2}/.test(e)||/status[:\s]+5\d{2}/.test(e)||/failed:\s*429/.test(e)||/status[:\s]+429/.test(e)?!0:(/failed:\s*4\d{2}/.test(e)||/status[:\s]+4\d{2}/.test(e)||t instanceof TypeError||t instanceof ReferenceError||t instanceof SyntaxError,!1)}async function Vxe(t,e,r={}){let n=process.stderr.write.bind(process.stderr);process.stderr.write=(()=>!0);try{let i=W5(t),s=Q5(e),o=await D5(),a=i.normalizeInput(o);a.platform=t;let c=await s.execute(a),u=i.formatOutput(c);console.log(JSON.stringify(u));let l=c.exitCode??it.SUCCESS;return r.skipExit||process.exit(l),l}catch(i){return t3(i)?(_.warn("HOOK",`Worker unavailable, skipping hook: ${i instanceof Error?i.message:i}`),r.skipExit||process.exit(it.SUCCESS),it.SUCCESS):(_.error("HOOK",`Hook error: ${i instanceof Error?i.message:i}`,{},i instanceof Error?i:void 0),r.skipExit||process.exit(it.BLOCKING_ERROR),it.BLOCKING_ERROR)}finally{process.stderr.write=n}}var n3=Ee(()=>{"use strict";j5();V5();e3();Pn();te()});var ZR={};Fn(ZR,{cleanClaudeMd:()=>awe,generateClaudeMd:()=>owe});function Xxe(t){return Jxe[t]||"\u{1F4DD}"}function Yxe(t){let e=(t.title?.length||0)+(t.subtitle?.length||0)+(t.narrative?.length||0)+(t.facts?.length||0);return Math.ceil(e/4)}function Qxe(t){let e=new Set;try{let n=(0,o3.execSync)("git ls-files",{cwd:t,encoding:"utf-8",maxBuffer:52428800}).trim().split(`
`).filter(i=>i);for(let i of n){let s=lr.default.join(t,i),o=lr.default.dirname(s);for(;o.length>t.length&&o.startsWith(t);)e.add(o),o=lr.default.dirname(o)}}catch(r){_.warn("CLAUDE_MD","git ls-files failed, falling back to directory walk",{error:String(r)}),a3(t,e)}return e}function a3(t,e,r=0){if(r>10)return;let n=["node_modules",".git",".next","dist","build",".cache","__pycache__",".venv","venv",".idea",".vscode","coverage",".claude-mem",".open-next",".turbo"];try{let i=(0,dr.readdirSync)(t,{withFileTypes:!0});for(let s of i){if(!s.isDirectory()||n.includes(s.name)||s.name.startsWith(".")&&s.name!==".claude")continue;let o=lr.default.join(t,s.name);e.add(o),a3(o,e,r+1)}}catch{}}function ewe(t,e){let r=n=>{if(!n)return!1;try{let i=JSON.parse(n);if(Array.isArray(i))return i.some(s=>Ra(s,e))}catch{}return!1};return r(t.files_modified)||r(t.files_read)}function twe(t,e,r,n){let i=n*3,s=`
SELECT o.*, o.discovery_tokens