@@ -172,52 +172,10 @@ class Anthropic extends BaseLLM {
172
172
}
173
173
}
174
174
175
- protected async * _streamChat (
176
- messages : ChatMessage [ ] ,
177
- signal : AbortSignal ,
178
- options : CompletionOptions ,
175
+ async * handleResponse (
176
+ response : any ,
177
+ stream : boolean | undefined ,
179
178
) : AsyncGenerator < ChatMessage > {
180
- if ( ! this . apiKey || this . apiKey === "" ) {
181
- throw new Error (
182
- "Request not sent. You have an Anthropic model configured in your config.json, but the API key is not set." ,
183
- ) ;
184
- }
185
-
186
- const systemMessage = stripImages (
187
- messages . filter ( ( m ) => m . role === "system" ) [ 0 ] ?. content ?? "" ,
188
- ) ;
189
- const shouldCacheSystemMessage = ! ! (
190
- this . cacheBehavior ?. cacheSystemMessage && systemMessage
191
- ) ;
192
-
193
- const msgs = this . convertMessages ( messages ) ;
194
- const response = await this . fetch ( new URL ( "messages" , this . apiBase ) , {
195
- method : "POST" ,
196
- headers : {
197
- "Content-Type" : "application/json" ,
198
- Accept : "application/json" ,
199
- "anthropic-version" : "2023-06-01" ,
200
- "x-api-key" : this . apiKey as string ,
201
- ...( shouldCacheSystemMessage || this . cacheBehavior ?. cacheConversation
202
- ? { "anthropic-beta" : "prompt-caching-2024-07-31" }
203
- : { } ) ,
204
- } ,
205
- body : JSON . stringify ( {
206
- ...this . convertArgs ( options ) ,
207
- messages : msgs ,
208
- system : shouldCacheSystemMessage
209
- ? [
210
- {
211
- type : "text" ,
212
- text : systemMessage ,
213
- cache_control : { type : "ephemeral" } ,
214
- } ,
215
- ]
216
- : systemMessage ,
217
- } ) ,
218
- signal,
219
- } ) ;
220
-
221
179
if ( response . status === 499 ) {
222
180
return ; // Aborted by user
223
181
}
@@ -237,7 +195,7 @@ class Anthropic extends BaseLLM {
237
195
) ;
238
196
}
239
197
240
- if ( options . stream === false ) {
198
+ if ( stream === false ) {
241
199
const data = await response . json ( ) ;
242
200
const cost = data . usage
243
201
? {
@@ -348,6 +306,55 @@ class Anthropic extends BaseLLM {
348
306
usage,
349
307
} ;
350
308
}
309
+
310
+ protected async * _streamChat (
311
+ messages : ChatMessage [ ] ,
312
+ signal : AbortSignal ,
313
+ options : CompletionOptions ,
314
+ ) : AsyncGenerator < ChatMessage > {
315
+ if ( ! this . apiKey || this . apiKey === "" ) {
316
+ throw new Error (
317
+ "Request not sent. You have an Anthropic model configured in your config.json, but the API key is not set." ,
318
+ ) ;
319
+ }
320
+
321
+ const systemMessage = stripImages (
322
+ messages . filter ( ( m ) => m . role === "system" ) [ 0 ] ?. content ?? "" ,
323
+ ) ;
324
+ const shouldCacheSystemMessage = ! ! (
325
+ this . cacheBehavior ?. cacheSystemMessage && systemMessage
326
+ ) ;
327
+
328
+ const msgs = this . convertMessages ( messages ) ;
329
+ const response = await this . fetch ( new URL ( "messages" , this . apiBase ) , {
330
+ method : "POST" ,
331
+ headers : {
332
+ "Content-Type" : "application/json" ,
333
+ Accept : "application/json" ,
334
+ "anthropic-version" : "2023-06-01" ,
335
+ "x-api-key" : this . apiKey as string ,
336
+ ...( shouldCacheSystemMessage || this . cacheBehavior ?. cacheConversation
337
+ ? { "anthropic-beta" : "prompt-caching-2024-07-31" }
338
+ : { } ) ,
339
+ } ,
340
+ body : JSON . stringify ( {
341
+ ...this . convertArgs ( options ) ,
342
+ messages : msgs ,
343
+ system : shouldCacheSystemMessage
344
+ ? [
345
+ {
346
+ type : "text" ,
347
+ text : systemMessage ,
348
+ cache_control : { type : "ephemeral" } ,
349
+ } ,
350
+ ]
351
+ : systemMessage ,
352
+ } ) ,
353
+ signal,
354
+ } ) ;
355
+
356
+ yield * this . handleResponse ( response , options . stream ) ;
357
+ }
351
358
}
352
359
353
360
export default Anthropic ;
0 commit comments