@@ -7,9 +7,26 @@ import * as assert from 'assert';
7
7
import * as positron from 'positron' ;
8
8
import * as vscode from 'vscode' ;
9
9
import * as sinon from 'sinon' ;
10
- import { AnthropicLanguageModel } from '../anthropic' ;
10
+ import { AnthropicLanguageModel , CacheControlOptions } from '../anthropic' ;
11
11
import { ModelConfig } from '../config' ;
12
12
import { EMPTY_TOOL_RESULT_PLACEHOLDER } from '../utils.js' ;
13
+ import Anthropic from '@anthropic-ai/sdk' ;
14
+ import { MessageStream } from '@anthropic-ai/sdk/lib/MessageStream.js' ;
15
+ import { mock } from './utils.js' ;
16
+
17
+ class MockAnthropicClient {
18
+ messages = {
19
+ stream : sinon . stub <
20
+ Parameters < Anthropic [ 'messages' ] [ 'stream' ] > ,
21
+ ReturnType < Anthropic [ 'messages' ] [ 'stream' ] >
22
+ > ( ) . returns ( mock < MessageStream > ( {
23
+ on : ( event , listener ) => mock < MessageStream > ( { } ) ,
24
+ abort : ( ) => { } ,
25
+ done : ( ) => Promise . resolve ( ) ,
26
+ finalMessage : ( ) => Promise . resolve ( mock < Anthropic . Message > ( { } ) ) ,
27
+ } ) )
28
+ } ;
29
+ }
13
30
14
31
type ChatMessageValidateInfo = {
15
32
testName : string ;
@@ -19,22 +36,13 @@ type ChatMessageValidateInfo = {
19
36
20
37
suite ( 'AnthropicLanguageModel' , ( ) => {
21
38
let model : AnthropicLanguageModel ;
22
- let mockClient : any ;
39
+ let mockClient : MockAnthropicClient ;
23
40
let mockProgress : vscode . Progress < vscode . ChatResponseFragment2 > ;
24
41
let mockCancellationToken : vscode . CancellationToken ;
25
42
26
43
setup ( ( ) => {
27
44
// Create a mock Anthropic client
28
- mockClient = {
29
- messages : {
30
- stream : sinon . stub ( ) . returns ( {
31
- on : sinon . stub ( ) ,
32
- abort : sinon . stub ( ) ,
33
- done : sinon . stub ( ) . resolves ( ) ,
34
- finalMessage : sinon . stub ( ) . resolves ( { } ) ,
35
- } )
36
- }
37
- } ;
45
+ mockClient = new MockAnthropicClient ( ) ;
38
46
39
47
// Create a mock configuration
40
48
const config : ModelConfig = {
@@ -47,10 +55,7 @@ suite('AnthropicLanguageModel', () => {
47
55
} ;
48
56
49
57
// Create an instance of the AnthropicLanguageModel
50
- model = new AnthropicLanguageModel ( config ) ;
51
-
52
- // Replace the client with our mock
53
- ( model as any ) . _client = mockClient ;
58
+ model = new AnthropicLanguageModel ( config , mockClient as unknown as Anthropic ) ;
54
59
55
60
// Create mock progress
56
61
mockProgress = {
@@ -233,11 +238,165 @@ suite('AnthropicLanguageModel', () => {
233
238
const streamCall = mockClient . messages . stream . getCall ( 0 ) ;
234
239
assert . ok ( streamCall , 'Stream method was not called' ) ;
235
240
236
- const messagesPassedToAnthropicClient : vscode . LanguageModelChatMessage2 [ ] = streamCall . args [ 0 ] . messages ;
241
+ const messagesPassedToAnthropicClient = streamCall . args [ 0 ] . messages ;
237
242
assert . strictEqual ( messagesPassedToAnthropicClient . length , 1 , 'Exactly one message should be passed to the Anthropic client' ) ;
238
243
244
+ assert . ok ( typeof messagesPassedToAnthropicClient [ 0 ] . content !== 'string' , 'Expected a content block object, got a string' ) ;
239
245
testCase . validate ( messagesPassedToAnthropicClient [ 0 ] . content ) ;
240
246
} ) ;
241
247
} ) ;
242
248
} ) ;
249
+
250
+ test ( 'provideLanguageModelResponse cache_control default behavior' , async ( ) => {
251
+ const toolA = {
252
+ name : 'toolA' ,
253
+ description : 'Tool A' ,
254
+ inputSchema : { type : 'object' as const , properties : { } }
255
+ } satisfies vscode . LanguageModelChatTool ;
256
+ const toolB = {
257
+ name : 'toolB' ,
258
+ description : 'Tool B' ,
259
+ inputSchema : { type : 'object' as const , properties : { } }
260
+ } satisfies vscode . LanguageModelChatTool ;
261
+ const system = 'System prompt' ;
262
+
263
+ // Call the method under test.
264
+ await model . provideLanguageModelResponse (
265
+ [
266
+ vscode . LanguageModelChatMessage . User ( 'Hi' ) ,
267
+ vscode . LanguageModelChatMessage . User ( 'Bye' ) ,
268
+ ] ,
269
+ {
270
+ // Define the request tools, not sorted by name, so we can test sorting behavior.
271
+ tools : [ toolB , toolA ] ,
272
+ modelOptions : { system } ,
273
+ } ,
274
+ 'test-extension' ,
275
+ mockProgress ,
276
+ mockCancellationToken
277
+ ) ;
278
+
279
+ sinon . assert . calledOnce ( mockClient . messages . stream ) ;
280
+ const body = mockClient . messages . stream . getCall ( 0 ) . args [ 0 ] ;
281
+
282
+ assert . deepStrictEqual ( body . tools , [
283
+ {
284
+ name : toolA . name ,
285
+ description : toolA . description ,
286
+ input_schema : toolA . inputSchema ,
287
+ } ,
288
+ {
289
+ name : toolB . name ,
290
+ description : toolB . description ,
291
+ input_schema : toolB . inputSchema ,
292
+ cache_control : { type : 'ephemeral' } ,
293
+ } ,
294
+ ] satisfies Anthropic . ToolUnion [ ] , 'Unexpected tools in request body' ) ;
295
+
296
+ assert . deepStrictEqual ( body . system , [
297
+ {
298
+ type : 'text' ,
299
+ text : system ,
300
+ cache_control : { type : 'ephemeral' } ,
301
+ } ,
302
+ ] satisfies Anthropic . TextBlockParam [ ] , 'Unexpected system prompt in request body' ) ;
303
+
304
+ assert . deepStrictEqual ( body . messages , [
305
+ { role : 'user' , content : [ { type : 'text' , text : 'Hi' } ] } ,
306
+ { role : 'user' , content : [ { type : 'text' , text : 'Bye' } ] } ,
307
+ ] satisfies Anthropic . MessageCreateParams [ 'messages' ] , 'Unexpected user messages in request body' ) ;
308
+ } ) ;
309
+
310
+ test ( 'provideLanguageModelResponse cache_control last user message enabled' , async ( ) => {
311
+ // Call the method under test.
312
+ await model . provideLanguageModelResponse (
313
+ [
314
+ vscode . LanguageModelChatMessage . User ( 'Hi' ) ,
315
+ vscode . LanguageModelChatMessage . User ( 'Bye' ) ,
316
+ ] ,
317
+ {
318
+ modelOptions : {
319
+ cacheControl : {
320
+ lastUserMessage : true ,
321
+ } satisfies CacheControlOptions ,
322
+ } ,
323
+ } ,
324
+ 'test-extension' ,
325
+ mockProgress ,
326
+ mockCancellationToken
327
+ ) ;
328
+
329
+ sinon . assert . calledOnce ( mockClient . messages . stream ) ;
330
+ const body = mockClient . messages . stream . getCall ( 0 ) . args [ 0 ] ;
331
+
332
+ assert . deepStrictEqual ( body . messages , [
333
+ { role : 'user' , content : [ { type : 'text' , text : 'Hi' } ] } ,
334
+ { role : 'user' , content : [ { type : 'text' , text : 'Bye' , cache_control : { type : 'ephemeral' } } ] } ,
335
+ ] satisfies Anthropic . MessageCreateParams [ 'messages' ] , 'Unexpected user messages in request body' ) ;
336
+ } ) ;
337
+
338
+ test ( 'provideLanguageModelResponse cache_control all disabled' , async ( ) => {
339
+ const toolA = {
340
+ name : 'toolA' ,
341
+ description : 'Tool A' ,
342
+ inputSchema : { type : 'object' as const , properties : { } }
343
+ } satisfies vscode . LanguageModelChatTool ;
344
+ const toolB = {
345
+ name : 'toolB' ,
346
+ description : 'Tool B' ,
347
+ inputSchema : { type : 'object' as const , properties : { } }
348
+ } satisfies vscode . LanguageModelChatTool ;
349
+ const system = 'System prompt' ;
350
+
351
+ // Call the method under test with no cacheControl options to test default behavior.
352
+ await model . provideLanguageModelResponse (
353
+ [
354
+ vscode . LanguageModelChatMessage . User ( 'Hi' ) ,
355
+ vscode . LanguageModelChatMessage . User ( 'Bye' ) ,
356
+ ] ,
357
+ {
358
+ // Define the request tools, not sorted by name, so we can test sorting behavior.
359
+ tools : [ toolB , toolA ] ,
360
+ modelOptions : {
361
+ system,
362
+ cacheControl : {
363
+ lastTool : false ,
364
+ system : false ,
365
+ lastUserMessage : false ,
366
+ } satisfies CacheControlOptions ,
367
+ } ,
368
+ } ,
369
+ 'test-extension' ,
370
+ mockProgress ,
371
+ mockCancellationToken
372
+ ) ;
373
+
374
+ sinon . assert . calledOnce ( mockClient . messages . stream ) ;
375
+ const body = mockClient . messages . stream . getCall ( 0 ) . args [ 0 ] ;
376
+
377
+ assert . deepStrictEqual ( body . tools , [
378
+ {
379
+ name : toolA . name ,
380
+ description : toolA . description ,
381
+ input_schema : toolA . inputSchema ,
382
+ } ,
383
+ {
384
+ name : toolB . name ,
385
+ description : toolB . description ,
386
+ input_schema : toolB . inputSchema ,
387
+ } ,
388
+ ] satisfies Anthropic . ToolUnion [ ] , 'Unexpected tools in request body' ) ;
389
+
390
+ assert . deepStrictEqual ( body . system , [
391
+ {
392
+ type : 'text' ,
393
+ text : system ,
394
+ } ,
395
+ ] satisfies Anthropic . TextBlockParam [ ] , 'Unexpected system prompt in request body' ) ;
396
+
397
+ assert . deepStrictEqual ( body . messages , [
398
+ { role : 'user' , content : [ { type : 'text' , text : 'Hi' } ] } ,
399
+ { role : 'user' , content : [ { type : 'text' , text : 'Bye' } ] } ,
400
+ ] satisfies Anthropic . MessageCreateParams [ 'messages' ] , 'Unexpected user messages in request body' ) ;
401
+ } ) ;
243
402
} ) ;
0 commit comments