Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
426 changes: 426 additions & 0 deletions docs/my-website/docs/providers/aibadgr.md

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions docs/my-website/sidebars.js
Original file line number Diff line number Diff line change
Expand Up @@ -667,6 +667,7 @@ const sidebars = {
},
"providers/litellm_proxy",
"providers/ai21",
"providers/aibadgr",
"providers/aiml",
"providers/aleph_alpha",
"providers/anyscale",
Expand Down
1 change: 1 addition & 0 deletions litellm/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -610,6 +610,7 @@
"hyperbolic",
"vercel_ai_gateway",
"aiml",
"aibadgr",
"wandb",
"cometapi",
"clarifai",
Expand Down
5 changes: 5 additions & 0 deletions litellm/llms/openai_like/providers.json
Original file line number Diff line number Diff line change
@@ -1,4 +1,9 @@
{
"aibadgr": {
"base_url": "https://aibadgr.com/api/v1",
"api_key_env": "AIBADGR_API_KEY",
"api_base_env": "AIBADGR_BASE_URL"
},
"publicai": {
"base_url": "https://api.publicai.co/v1",
"api_key_env": "PUBLICAI_API_KEY",
Expand Down
1 change: 1 addition & 0 deletions litellm/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -4483,6 +4483,7 @@ def embedding( # noqa: PLR0915
or custom_llm_provider == "together_ai"
or custom_llm_provider == "nvidia_nim"
or custom_llm_provider == "litellm_proxy"
or custom_llm_provider == "aibadgr"
):
api_base = (
api_base
Expand Down
1 change: 1 addition & 0 deletions litellm/types/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -3001,6 +3001,7 @@ class LlmProviders(str, Enum):
STABILITY = "stability"
HEROKU = "heroku"
AIML = "aiml"
AIBADGR = "aibadgr"
COMETAPI = "cometapi"
OCI = "oci"
AUTO_ROUTER = "auto_router"
Expand Down
17 changes: 17 additions & 0 deletions provider_endpoints_support.json
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,23 @@
"a2a": true
}
},
"aibadgr": {
"display_name": "AI Badgr (`aibadgr`)",
"url": "https://docs.litellm.ai/docs/providers/aibadgr",
"endpoints": {
"chat_completions": true,
"messages": true,
"responses": false,
"embeddings": true,
"image_generations": false,
"audio_transcriptions": false,
"audio_speech": false,
"moderations": false,
"batches": false,
"rerank": false,
"a2a": true
}
},
"ai21": {
"display_name": "AI21 (`ai21`)",
"url": "https://docs.litellm.ai/docs/providers/ai21",
Expand Down
74 changes: 71 additions & 3 deletions tests/test_litellm/llms/openai_like/test_json_providers.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,16 @@ def test_load_json_providers(self):
"""Test that JSON providers load correctly"""
from litellm.llms.openai_like.json_loader import JSONProviderRegistry

# Verify aibadgr is loaded
assert JSONProviderRegistry.exists("aibadgr")

# Get aibadgr config
aibadgr = JSONProviderRegistry.get("aibadgr")
assert aibadgr is not None
assert aibadgr.base_url == "https://aibadgr.com/api/v1"
assert aibadgr.api_key_env == "AIBADGR_API_KEY"
assert aibadgr.api_base_env == "AIBADGR_BASE_URL"

# Verify publicai is loaded
assert JSONProviderRegistry.exists("publicai")

Expand All @@ -42,6 +52,7 @@ def test_dynamic_config_generation(self):
from litellm.llms.openai_like.dynamic_config import create_config_class
from litellm.llms.openai_like.json_loader import JSONProviderRegistry

# Test PublicAI
provider = JSONProviderRegistry.get("publicai")
config_class = create_config_class(provider)
config = config_class()
Expand All @@ -57,6 +68,22 @@ def test_dynamic_config_generation(self):
assert api_base == "https://custom.api.com"
assert api_key == "test-key"

# Test AI Badgr
provider = JSONProviderRegistry.get("aibadgr")
config_class = create_config_class(provider)
config = config_class()

# Test API info resolution
api_base, api_key = config._get_openai_compatible_provider_info(None, None)
assert api_base == "https://aibadgr.com/api/v1"

# Test with custom base
api_base, api_key = config._get_openai_compatible_provider_info(
"https://custom-aibadgr.com/api/v1", "test-key"
)
assert api_base == "https://custom-aibadgr.com/api/v1"
assert api_key == "test-key"

def test_parameter_mapping(self):
"""Test parameter mapping works"""
from litellm.llms.openai_like.dynamic_config import create_config_class
Expand Down Expand Up @@ -103,6 +130,19 @@ def test_provider_resolution(self):
get_llm_provider,
)

# Test aibadgr provider resolution
model, provider, api_key, api_base = get_llm_provider(
model="aibadgr/premium",
custom_llm_provider=None,
api_base=None,
api_key=None,
)

assert model == "premium"
assert provider == "aibadgr"
assert api_base == "https://aibadgr.com/api/v1"

# Test publicai provider resolution
model, provider, api_key, api_base = get_llm_provider(
model="publicai/gpt-4",
custom_llm_provider=None,
Expand All @@ -114,18 +154,46 @@ def test_provider_resolution(self):
assert provider == "publicai"
assert api_base == "https://api.publicai.co/v1"

def test_aibadgr_tier_models(self):
"""Test AI Badgr tier model resolution"""
from litellm.litellm_core_utils.get_llm_provider_logic import (
get_llm_provider,
)

# Test tier models (budget, basic, normal, premium)
tier_models = ["budget", "basic", "normal", "premium"]
for tier in tier_models:
model, provider, api_key, api_base = get_llm_provider(
model=f"aibadgr/{tier}",
custom_llm_provider=None,
api_base=None,
api_key=None,
)
assert model == tier
assert provider == "aibadgr"
assert api_base == "https://aibadgr.com/api/v1"

def test_provider_config_manager(self):
"""Test that ProviderConfigManager returns JSON-based configs"""
from litellm import LlmProviders
from litellm.utils import ProviderConfigManager

# Test PublicAI
config = ProviderConfigManager.get_provider_chat_config(
model="gpt-4", provider=LlmProviders.PUBLICAI
)

assert config is not None
assert config.custom_llm_provider == "publicai"

# Test AI Badgr
config = ProviderConfigManager.get_provider_chat_config(
model="premium", provider=LlmProviders.AIBADGR
)

assert config is not None
assert config.custom_llm_provider == "aibadgr"


class TestPublicAIIntegration:
"""Integration tests for PublicAI provider"""
Expand Down Expand Up @@ -283,9 +351,9 @@ def test_publicai_content_list_conversion(self):
test_loader.test_parameter_mapping()
print(" ✓ Parameter mapping works")

print("\n4. Testing excluded params...")
test_loader.test_excluded_params()
print(" ✓ Excluded params work")
print("\n4. Testing supported params...")
test_loader.test_supported_params()
print(" ✓ Supported params work")

print("\n5. Testing provider resolution...")
test_loader.test_provider_resolution()
Expand Down
Loading