@@ -191,7 +191,7 @@ async def llm_general_chat_funcs_test(llm: BaseLLM, prompt: str, messages: list[
191191BEDROCK_PROVIDER_REQUEST_BODY = {
192192 "mistral" : {"prompt" : "" , "max_tokens" : 0 , "stop" : [], "temperature" : 0.0 , "top_p" : 0.0 , "top_k" : 0 },
193193 "meta" : {"prompt" : "" , "temperature" : 0.0 , "top_p" : 0.0 , "max_gen_len" : 0 },
194- "ai21" : {
194+ "ai21-j2 " : {
195195 "prompt" : "" ,
196196 "temperature" : 0.0 ,
197197 "topP" : 0.0 ,
@@ -201,6 +201,16 @@ async def llm_general_chat_funcs_test(llm: BaseLLM, prompt: str, messages: list[
201201 "presencePenalty" : {"scale" : 0.0 },
202202 "frequencyPenalty" : {"scale" : 0.0 },
203203 },
204+ "ai21-jamba" : {
205+ "messages" : [],
206+ "temperature" : 0.0 ,
207+ "topP" : 0.0 ,
208+ "max_tokens" : 0 ,
209+ "stopSequences" : [],
210+ "countPenalty" : {"scale" : 0.0 },
211+ "presencePenalty" : {"scale" : 0.0 },
212+ "frequencyPenalty" : {"scale" : 0.0 },
213+ },
204214 "cohere" : {
205215 "prompt" : "" ,
206216 "temperature" : 0.0 ,
@@ -214,6 +224,20 @@ async def llm_general_chat_funcs_test(llm: BaseLLM, prompt: str, messages: list[
214224 "logit_bias" : {},
215225 "truncate" : "NONE" ,
216226 },
227+ "cohere-command-r" : {
228+ "message" : [],
229+ "chat_history" : [],
230+ "temperature" : 0.0 ,
231+ "p" : 0.0 ,
232+ "k" : 0.0 ,
233+ "max_tokens" : 0 ,
234+ "stop_sequences" : [],
235+ "return_likelihoods" : "NONE" ,
236+ "stream" : False ,
237+ "num_generations" : 0 ,
238+ "logit_bias" : {},
239+ "truncate" : "NONE" ,
240+ },
217241 "anthropic" : {
218242 "anthropic_version" : "bedrock-2023-05-31" ,
219243 "max_tokens" : 0 ,
@@ -233,12 +257,20 @@ async def llm_general_chat_funcs_test(llm: BaseLLM, prompt: str, messages: list[
233257BEDROCK_PROVIDER_RESPONSE_BODY = {
234258 "mistral" : {"outputs" : [{"text" : "Hello World" , "stop_reason" : "" }]},
235259 "meta" : {"generation" : "Hello World" , "prompt_token_count" : 0 , "generation_token_count" : 0 , "stop_reason" : "" },
236- "ai21" : {
260+ "ai21-jamba " : {
237261 "id" : "" ,
238262 "prompt" : {"text" : "Hello World" , "tokens" : []},
239- "completions" : [
240- {"data" : {"text" : "Hello World" , "tokens" : []}, "finishReason" : {"reason" : "length" , "length" : 2 }}
241- ],
263+ "choices" : [{"message" : {"content" : "Hello World" }}],
264+ },
265+ "ai21-jamba-stream" : {
266+ "id" : "" ,
267+ "prompt" : {"text" : "Hello World" , "tokens" : []},
268+ "choices" : [{"delta" : {"content" : "Hello World" }}],
269+ },
270+ "ai21-j2" : {
271+ "id" : "" ,
272+ "prompt" : {"text" : "Hello World" , "tokens" : []},
273+ "completions" : [{"data" : {"text" : "Hello World" }, "finishReason" : {"reason" : "length" , "length" : 2 }}],
242274 },
243275 "cohere" : {
244276 "generations" : [
@@ -255,6 +287,21 @@ async def llm_general_chat_funcs_test(llm: BaseLLM, prompt: str, messages: list[
255287 "id" : "" ,
256288 "prompt" : "" ,
257289 },
290+ "cohere-command-r" : {
291+ "generations" : [
292+ {
293+ "finish_reason" : "" ,
294+ "id" : "" ,
295+ "text" : "Hello World" ,
296+ "likelihood" : 0.0 ,
297+ "token_likelihoods" : [{"token" : 0.0 }],
298+ "is_finished" : True ,
299+ "index" : 0 ,
300+ }
301+ ],
302+ "id" : "" ,
303+ "prompt" : "" ,
304+ },
258305 "anthropic" : {
259306 "id" : "" ,
260307 "model" : "" ,
0 commit comments