@@ -18,11 +18,11 @@ public async Task Llama2_UseEnum_SuccessAsync()
1818
1919 // Assert
2020 _ = await client . Received ( ) . GetTextCompletionAsync (
21- Arg . Is < ModelRequest < TextGenerationInput , ITextGenerationParameters > > (
22- s => s . Input . Messages == Cases . TextMessages
23- && s . Model == "llama2-13b-chat-v2"
24- && s . Parameters != null
25- && s . Parameters . ResultFormat == ResultFormats . Message ) ) ;
21+ Arg . Is < ModelRequest < TextGenerationInput , ITextGenerationParameters > > ( s
22+ => s . Input . Messages == Cases . TextMessages
23+ && s . Model == "llama2-13b-chat-v2"
24+ && s . Parameters != null
25+ && s . Parameters . ResultFormat == ResultFormats . Message ) ) ;
2626 }
2727
2828 [ Fact ]
@@ -32,7 +32,10 @@ public async Task Llama2_UseInvalidEnum_SuccessAsync()
3232 var client = Substitute . For < IDashScopeClient > ( ) ;
3333
3434 // Act
35- var act = async ( ) => await client . GetLlama2TextCompletionAsync ( Llama2Model . Chat13Bv2 , Cases . TextMessages , ResultFormats . Message ) ;
35+ var act = async ( ) => await client . GetLlama2TextCompletionAsync (
36+ ( Llama2Model ) ( - 1 ) ,
37+ Cases . TextMessages ,
38+ ResultFormats . Message ) ;
3639
3740 // Assert
3841 await Assert . ThrowsAsync < ArgumentOutOfRangeException > ( act ) ;
@@ -49,10 +52,10 @@ public async Task Llama2_CustomModel_SuccessAsync()
4952
5053 // Assert
5154 _ = await client . Received ( ) . GetTextCompletionAsync (
52- Arg . Is < ModelRequest < TextGenerationInput , ITextGenerationParameters > > (
53- s => s . Input . Messages == Cases . TextMessages
54- && s . Model == Cases . CustomModelName
55- && s . Parameters != null
56- && s . Parameters . ResultFormat == ResultFormats . Message ) ) ;
55+ Arg . Is < ModelRequest < TextGenerationInput , ITextGenerationParameters > > ( s
56+ => s . Input . Messages == Cases . TextMessages
57+ && s . Model == Cases . CustomModelName
58+ && s . Parameters != null
59+ && s . Parameters . ResultFormat == ResultFormats . Message ) ) ;
5760 }
5861}
0 commit comments