@@ -16,7 +16,6 @@ import {
1616 TLLMChatResponse ,
1717 BasicCredentials ,
1818 TAnthropicRequestBody ,
19- TLLMConnectorParams ,
2019 ILLMRequestContext ,
2120} from '@sre/types/LLM.types' ;
2221
@@ -81,10 +80,14 @@ export class AnthropicConnector extends LLMConnector {
8180 }
8281
8382 const textBlock = result ?. content ?. find ( ( block ) => block . type === 'text' ) ;
84- const content = textBlock ?. text || '' ;
83+ let content = textBlock ?. text || '' ;
8584
8685 const usage = result ?. usage ;
8786
87+ if ( this . hasPrefillText ( body . messages ) ) {
88+ content = `${ PREFILL_TEXT_FOR_JSON_RESPONSE } ${ content } ` ;
89+ }
90+
8891 this . reportUsage ( usage , {
8992 modelEntryName : context . modelEntryName ,
9093 keySource : context . isUserKey ? APIKeySource . User : APIKeySource . Smyth ,
@@ -116,6 +119,10 @@ export class AnthropicConnector extends LLMConnector {
116119 let toolsData : ToolData [ ] = [ ] ;
117120 let thinkingBlocks : any [ ] = [ ] ; // To preserve thinking blocks
118121
122+ // Determine if we need to inject prefill text and track if it's been injected
123+ const needsPrefillInjection = this . hasPrefillText ( body . messages ) ;
124+ let prefillInjected = false ;
125+
119126 stream . on ( 'streamEvent' , ( event : any ) => {
120127 if ( event . message ?. usage ) {
121128 //console.log('usage', event.message?.usage);
@@ -127,7 +134,14 @@ export class AnthropicConnector extends LLMConnector {
127134
128135 emitter . emit ( 'error' , error ) ;
129136 } ) ;
137+
130138 stream . on ( 'text' , ( text : string ) => {
139+ // Inject prefill text only once at the very beginning if needed
140+ if ( needsPrefillInjection && ! prefillInjected ) {
141+ text = `${ PREFILL_TEXT_FOR_JSON_RESPONSE } ${ text } ` ;
142+ prefillInjected = true ;
143+ }
144+
131145 emitter . emit ( 'content' , text ) ;
132146 } ) ;
133147
@@ -629,4 +643,16 @@ export class AnthropicConnector extends LLMConnector {
629643 throw error ;
630644 }
631645 }
646+
647+ private hasPrefillText ( messages : Anthropic . MessageParam [ ] ) {
648+ for ( let i = messages . length - 1 ; i >= 0 ; i -- ) {
649+ const message = messages [ i ] ;
650+
651+ if ( message ?. role === TLLMMessageRole . Assistant && message ?. content === PREFILL_TEXT_FOR_JSON_RESPONSE ) {
652+ return true ;
653+ }
654+ }
655+
656+ return false ;
657+ }
632658}
0 commit comments