6455
6455
"input_cost_per_token" : 3.5e-07 ,
6456
6456
"output_cost_per_token" : 1.4e-06 ,
6457
6457
"litellm_provider" : " perplexity" ,
6458
- "mode" : " chat" ,
6459
- "supports_tool_choice" : true
6458
+ "mode" : " chat"
6460
6459
},
6461
6460
"perplexity/codellama-70b-instruct" : {
6462
6461
"max_tokens" : 16384 ,
6465
6464
"input_cost_per_token" : 7e-07 ,
6466
6465
"output_cost_per_token" : 2.8e-06 ,
6467
6466
"litellm_provider" : " perplexity" ,
6468
- "mode" : " chat" ,
6469
- "supports_tool_choice" : true
6467
+ "mode" : " chat"
6470
6468
},
6471
6469
"perplexity/llama-3.1-70b-instruct" : {
6472
6470
"max_tokens" : 131072 ,
6475
6473
"input_cost_per_token" : 1e-06 ,
6476
6474
"output_cost_per_token" : 1e-06 ,
6477
6475
"litellm_provider" : " perplexity" ,
6478
- "mode" : " chat" ,
6479
- "supports_tool_choice" : true
6476
+ "mode" : " chat"
6480
6477
},
6481
6478
"perplexity/llama-3.1-8b-instruct" : {
6482
6479
"max_tokens" : 131072 ,
6485
6482
"input_cost_per_token" : 2e-07 ,
6486
6483
"output_cost_per_token" : 2e-07 ,
6487
6484
"litellm_provider" : " perplexity" ,
6488
- "mode" : " chat" ,
6489
- "supports_tool_choice" : true
6485
+ "mode" : " chat"
6490
6486
},
6491
6487
"perplexity/llama-3.1-sonar-huge-128k-online" : {
6492
6488
"max_tokens" : 127072 ,
6496
6492
"output_cost_per_token" : 5e-06 ,
6497
6493
"litellm_provider" : " perplexity" ,
6498
6494
"mode" : " chat" ,
6499
- "deprecation_date" : " 2025-02-22" ,
6500
- "supports_tool_choice" : true
6495
+ "deprecation_date" : " 2025-02-22"
6501
6496
},
6502
6497
"perplexity/llama-3.1-sonar-large-128k-online" : {
6503
6498
"max_tokens" : 127072 ,
6507
6502
"output_cost_per_token" : 1e-06 ,
6508
6503
"litellm_provider" : " perplexity" ,
6509
6504
"mode" : " chat" ,
6510
- "deprecation_date" : " 2025-02-22" ,
6511
- "supports_tool_choice" : true
6505
+ "deprecation_date" : " 2025-02-22"
6512
6506
},
6513
6507
"perplexity/llama-3.1-sonar-large-128k-chat" : {
6514
6508
"max_tokens" : 131072 ,
6518
6512
"output_cost_per_token" : 1e-06 ,
6519
6513
"litellm_provider" : " perplexity" ,
6520
6514
"mode" : " chat" ,
6521
- "deprecation_date" : " 2025-02-22" ,
6522
- "supports_tool_choice" : true
6515
+ "deprecation_date" : " 2025-02-22"
6523
6516
},
6524
6517
"perplexity/llama-3.1-sonar-small-128k-chat" : {
6525
6518
"max_tokens" : 131072 ,
6529
6522
"output_cost_per_token" : 2e-07 ,
6530
6523
"litellm_provider" : " perplexity" ,
6531
6524
"mode" : " chat" ,
6532
- "deprecation_date" : " 2025-02-22" ,
6533
- "supports_tool_choice" : true
6525
+ "deprecation_date" : " 2025-02-22"
6534
6526
},
6535
6527
"perplexity/llama-3.1-sonar-small-128k-online" : {
6536
6528
"max_tokens" : 127072 ,
6540
6532
"output_cost_per_token" : 2e-07 ,
6541
6533
"litellm_provider" : " perplexity" ,
6542
6534
"mode" : " chat" ,
6543
- "deprecation_date" : " 2025-02-22" ,
6544
- "supports_tool_choice" : true
6535
+ "deprecation_date" : " 2025-02-22"
6545
6536
},
6546
6537
"perplexity/pplx-7b-chat" : {
6547
6538
"max_tokens" : 8192 ,
6550
6541
"input_cost_per_token" : 7e-08 ,
6551
6542
"output_cost_per_token" : 2.8e-07 ,
6552
6543
"litellm_provider" : " perplexity" ,
6553
- "mode" : " chat" ,
6554
- "supports_tool_choice" : true
6544
+ "mode" : " chat"
6555
6545
},
6556
6546
"perplexity/pplx-70b-chat" : {
6557
6547
"max_tokens" : 4096 ,
6560
6550
"input_cost_per_token" : 7e-07 ,
6561
6551
"output_cost_per_token" : 2.8e-06 ,
6562
6552
"litellm_provider" : " perplexity" ,
6563
- "mode" : " chat" ,
6564
- "supports_tool_choice" : true
6553
+ "mode" : " chat"
6565
6554
},
6566
6555
"perplexity/pplx-7b-online" : {
6567
6556
"max_tokens" : 4096 ,
6571
6560
"output_cost_per_token" : 2.8e-07 ,
6572
6561
"input_cost_per_request" : 0.005 ,
6573
6562
"litellm_provider" : " perplexity" ,
6574
- "mode" : " chat" ,
6575
- "supports_tool_choice" : true
6563
+ "mode" : " chat"
6576
6564
},
6577
6565
"perplexity/pplx-70b-online" : {
6578
6566
"max_tokens" : 4096 ,
6582
6570
"output_cost_per_token" : 2.8e-06 ,
6583
6571
"input_cost_per_request" : 0.005 ,
6584
6572
"litellm_provider" : " perplexity" ,
6585
- "mode" : " chat" ,
6586
- "supports_tool_choice" : true
6573
+ "mode" : " chat"
6587
6574
},
6588
6575
"perplexity/llama-2-70b-chat" : {
6589
6576
"max_tokens" : 4096 ,
6592
6579
"input_cost_per_token" : 7e-07 ,
6593
6580
"output_cost_per_token" : 2.8e-06 ,
6594
6581
"litellm_provider" : " perplexity" ,
6595
- "mode" : " chat" ,
6596
- "supports_tool_choice" : true
6582
+ "mode" : " chat"
6597
6583
},
6598
6584
"perplexity/mistral-7b-instruct" : {
6599
6585
"max_tokens" : 4096 ,
6602
6588
"input_cost_per_token" : 7e-08 ,
6603
6589
"output_cost_per_token" : 2.8e-07 ,
6604
6590
"litellm_provider" : " perplexity" ,
6605
- "mode" : " chat" ,
6606
- "supports_tool_choice" : true
6591
+ "mode" : " chat"
6607
6592
},
6608
6593
"perplexity/mixtral-8x7b-instruct" : {
6609
6594
"max_tokens" : 4096 ,
6612
6597
"input_cost_per_token" : 7e-08 ,
6613
6598
"output_cost_per_token" : 2.8e-07 ,
6614
6599
"litellm_provider" : " perplexity" ,
6615
- "mode" : " chat" ,
6616
- "supports_tool_choice" : true
6600
+ "mode" : " chat"
6617
6601
},
6618
6602
"perplexity/sonar-small-chat" : {
6619
6603
"max_tokens" : 16384 ,
6622
6606
"input_cost_per_token" : 7e-08 ,
6623
6607
"output_cost_per_token" : 2.8e-07 ,
6624
6608
"litellm_provider" : " perplexity" ,
6625
- "mode" : " chat" ,
6626
- "supports_tool_choice" : true
6609
+ "mode" : " chat"
6627
6610
},
6628
6611
"perplexity/sonar-small-online" : {
6629
6612
"max_tokens" : 12000 ,
6633
6616
"output_cost_per_token" : 2.8e-07 ,
6634
6617
"input_cost_per_request" : 0.005 ,
6635
6618
"litellm_provider" : " perplexity" ,
6636
- "mode" : " chat" ,
6637
- "supports_tool_choice" : true
6619
+ "mode" : " chat"
6638
6620
},
6639
6621
"perplexity/sonar-medium-chat" : {
6640
6622
"max_tokens" : 16384 ,
6643
6625
"input_cost_per_token" : 6e-07 ,
6644
6626
"output_cost_per_token" : 1.8e-06 ,
6645
6627
"litellm_provider" : " perplexity" ,
6646
- "mode" : " chat" ,
6647
- "supports_tool_choice" : true
6628
+ "mode" : " chat"
6648
6629
},
6649
6630
"perplexity/sonar-medium-online" : {
6650
6631
"max_tokens" : 12000 ,
6654
6635
"output_cost_per_token" : 1.8e-06 ,
6655
6636
"input_cost_per_request" : 0.005 ,
6656
6637
"litellm_provider" : " perplexity" ,
6657
- "mode" : " chat" ,
6658
- "supports_tool_choice" : true
6638
+ "mode" : " chat"
6659
6639
},
6660
6640
"fireworks_ai/accounts/fireworks/models/llama-v3p2-1b-instruct" : {
6661
6641
"max_tokens" : 16384 ,
8429
8409
"input_cost_per_token" : 7.2e-07 ,
8430
8410
"output_cost_per_token" : 7.2e-07 ,
8431
8411
"litellm_provider" : " bedrock_converse" ,
8432
- "mode" : " chat"
8412
+ "mode" : " chat" ,
8413
+ "supports_function_calling" : true ,
8414
+ "supports_tool_choice" : false
8433
8415
},
8434
8416
"together_ai/meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo" : {
8435
8417
"input_cost_per_token" : 1.8e-07 ,
9194
9176
"input_cost_per_second" : 3.333e-05 ,
9195
9177
"output_cost_per_second" : 0.0 ,
9196
9178
"litellm_provider" : " assemblyai"
9179
+ },
9180
+ "azure/gpt-3.5-turbo-0125" : {
9181
+ "max_tokens" : 4096 ,
9182
+ "max_input_tokens" : 16384 ,
9183
+ "max_output_tokens" : 4096 ,
9184
+ "input_cost_per_token" : 5e-07 ,
9185
+ "output_cost_per_token" : 1.5e-06 ,
9186
+ "litellm_provider" : " azure" ,
9187
+ "mode" : " chat" ,
9188
+ "supports_function_calling" : true ,
9189
+ "supports_parallel_function_calling" : true ,
9190
+ "deprecation_date" : " 2025-03-31" ,
9191
+ "supports_tool_choice" : true
9192
+ },
9193
+ "azure/gpt-3.5-turbo" : {
9194
+ "max_tokens" : 4096 ,
9195
+ "max_input_tokens" : 4097 ,
9196
+ "max_output_tokens" : 4096 ,
9197
+ "input_cost_per_token" : 5e-07 ,
9198
+ "output_cost_per_token" : 1.5e-06 ,
9199
+ "litellm_provider" : " azure" ,
9200
+ "mode" : " chat" ,
9201
+ "supports_function_calling" : true ,
9202
+ "supports_tool_choice" : true
9203
+ },
9204
+ "gemini-2.0-pro-exp-02-05" : {
9205
+ "max_tokens" : 8192 ,
9206
+ "max_input_tokens" : 2097152 ,
9207
+ "max_output_tokens" : 8192 ,
9208
+ "max_images_per_prompt" : 3000 ,
9209
+ "max_videos_per_prompt" : 10 ,
9210
+ "max_video_length" : 1 ,
9211
+ "max_audio_length_hours" : 8.4 ,
9212
+ "max_audio_per_prompt" : 1 ,
9213
+ "max_pdf_size_mb" : 30 ,
9214
+ "input_cost_per_image" : 0 ,
9215
+ "input_cost_per_video_per_second" : 0 ,
9216
+ "input_cost_per_audio_per_second" : 0 ,
9217
+ "input_cost_per_token" : 0 ,
9218
+ "input_cost_per_character" : 0 ,
9219
+ "input_cost_per_token_above_128k_tokens" : 0 ,
9220
+ "input_cost_per_character_above_128k_tokens" : 0 ,
9221
+ "input_cost_per_image_above_128k_tokens" : 0 ,
9222
+ "input_cost_per_video_per_second_above_128k_tokens" : 0 ,
9223
+ "input_cost_per_audio_per_second_above_128k_tokens" : 0 ,
9224
+ "output_cost_per_token" : 0 ,
9225
+ "output_cost_per_character" : 0 ,
9226
+ "output_cost_per_token_above_128k_tokens" : 0 ,
9227
+ "output_cost_per_character_above_128k_tokens" : 0 ,
9228
+ "litellm_provider" : " vertex_ai-language-models" ,
9229
+ "mode" : " chat" ,
9230
+ "supports_system_messages" : true ,
9231
+ "supports_function_calling" : true ,
9232
+ "supports_vision" : true ,
9233
+ "supports_audio_input" : true ,
9234
+ "supports_video_input" : true ,
9235
+ "supports_pdf_input" : true ,
9236
+ "supports_response_schema" : true ,
9237
+ "supports_tool_choice" : true ,
9238
+ "source" : " https://cloud.google.com/vertex-ai/generative-ai/pricing"
9239
+ },
9240
+ "us.meta.llama3-3-70b-instruct-v1:0" : {
9241
+ "max_tokens" : 4096 ,
9242
+ "max_input_tokens" : 128000 ,
9243
+ "max_output_tokens" : 4096 ,
9244
+ "input_cost_per_token" : 7.2e-07 ,
9245
+ "output_cost_per_token" : 7.2e-07 ,
9246
+ "litellm_provider" : " bedrock_converse" ,
9247
+ "mode" : " chat" ,
9248
+ "supports_function_calling" : true ,
9249
+ "supports_tool_choice" : false
9250
+ },
9251
+ "perplexity/sonar" : {
9252
+ "max_tokens" : 127072 ,
9253
+ "max_input_tokens" : 127072 ,
9254
+ "max_output_tokens" : 127072 ,
9255
+ "input_cost_per_token" : 1e-06 ,
9256
+ "output_cost_per_token" : 1e-06 ,
9257
+ "litellm_provider" : " perplexity" ,
9258
+ "mode" : " chat"
9259
+ },
9260
+ "perplexity/sonar-pro" : {
9261
+ "max_tokens" : 200000 ,
9262
+ "max_input_tokens" : 200000 ,
9263
+ "max_output_tokens" : 8096 ,
9264
+ "input_cost_per_token" : 3e-06 ,
9265
+ "output_cost_per_token" : 1.5e-05 ,
9266
+ "litellm_provider" : " perplexity" ,
9267
+ "mode" : " chat"
9197
9268
}
9198
9269
}
0 commit comments