Skip to content

Commit 7949b9a

Browse files
tests
1 parent 2306411 commit 7949b9a

File tree

4 files changed

+550
-2
lines changed

4 files changed

+550
-2
lines changed

litellm/llms/morph/rerank/transformation.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,12 +2,11 @@
22

33
import httpx
44

5-
import litellm
65
from litellm.litellm_core_utils.litellm_logging import Logging as LiteLLMLoggingObj
76
from litellm.llms.base_llm.chat.transformation import BaseLLMException
87
from litellm.llms.base_llm.rerank.transformation import BaseRerankConfig
98
from litellm.secret_managers.main import get_secret_str
10-
from litellm.types.rerank import OptionalRerankParams, RerankRequest
9+
from litellm.types.rerank import OptionalRerankParams
1110
from litellm.types.utils import RerankResponse
1211

1312

Lines changed: 215 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,215 @@
1+
"""
2+
Unit tests for Morph configuration.
3+
4+
These tests validate the MorphChatConfig class which extends OpenAILikeChatConfig.
5+
Morph is an OpenAI-compatible provider with a few customizations.
6+
"""
7+
8+
import os
9+
import sys
10+
from typing import Dict, List, Optional
11+
from unittest.mock import patch
12+
13+
import pytest
14+
15+
sys.path.insert(
16+
0, os.path.abspath("../../../../..")
17+
) # Adds the parent directory to the system path
18+
19+
from litellm.llms.morph.chat.transformation import MorphChatConfig
20+
21+
22+
class TestMorphChatConfig:
23+
"""Test class for MorphChatConfig functionality"""
24+
25+
def test_validate_environment(self):
26+
"""Test that validate_environment adds correct headers"""
27+
config = MorphChatConfig()
28+
headers = {}
29+
api_key = "fake-morph-key"
30+
31+
result = config.validate_environment(
32+
headers=headers,
33+
model="morph/apply-v1",
34+
messages=[{"role": "user", "content": "Hello"}],
35+
optional_params={},
36+
litellm_params={},
37+
api_key=api_key,
38+
api_base="https://api.morphllm.com/v1",
39+
)
40+
41+
# Verify headers
42+
assert result["Authorization"] == f"Bearer {api_key}"
43+
assert result["Content-Type"] == "application/json"
44+
45+
def test_get_openai_compatible_provider_info(self):
46+
"""Test the _get_openai_compatible_provider_info method"""
47+
config = MorphChatConfig()
48+
api_key = "fake-morph-key"
49+
50+
result = config._get_openai_compatible_provider_info(
51+
api_base=None,
52+
api_key=api_key,
53+
)
54+
55+
# Verify correct API base is returned
56+
assert result[0] == "https://api.morphllm.com/v1"
57+
assert result[1] == api_key
58+
59+
def test_missing_api_key(self):
60+
"""Test error handling when API key is missing"""
61+
config = MorphChatConfig()
62+
63+
with pytest.raises(ValueError) as excinfo:
64+
config.validate_environment(
65+
headers={},
66+
model="morph/apply-v1",
67+
messages=[{"role": "user", "content": "Hello"}],
68+
optional_params={},
69+
litellm_params={},
70+
api_key=None,
71+
api_base="https://api.morphllm.com/v1",
72+
)
73+
74+
assert "Morph API key is required" in str(excinfo.value)
75+
76+
def test_inheritance(self):
77+
"""Test proper inheritance from OpenAILikeChatConfig"""
78+
config = MorphChatConfig()
79+
80+
from litellm.llms.openai_like.chat.transformation import OpenAILikeChatConfig
81+
82+
assert isinstance(config, OpenAILikeChatConfig)
83+
assert hasattr(config, "_get_openai_compatible_provider_info")
84+
85+
def test_morph_completion_mock(self, respx_mock):
86+
"""
87+
Mock test for Morph completion using the model format from docs.
88+
This test mocks the actual HTTP request to test the integration properly.
89+
"""
90+
import respx
91+
from litellm import completion
92+
93+
# Set up environment variables for the test
94+
api_key = "fake-morph-key"
95+
api_base = "https://api.morphllm.com/v1"
96+
model = "morph/apply-v1"
97+
98+
# Mock the HTTP request to the Morph API
99+
respx_mock.post(f"{api_base}/chat/completions").respond(
100+
json={
101+
"id": "chatcmpl-123",
102+
"object": "chat.completion",
103+
"created": 1677652288,
104+
"model": "apply-v1",
105+
"choices": [
106+
{
107+
"index": 0,
108+
"message": {
109+
"role": "assistant",
110+
"content": "```python\nprint(\"Hi from LiteLLM!\")\n```\n\nThis simple Python code prints a greeting message from LiteLLM.",
111+
},
112+
"finish_reason": "stop",
113+
}
114+
],
115+
"usage": {"prompt_tokens": 9, "completion_tokens": 12, "total_tokens": 21},
116+
},
117+
status_code=200
118+
)
119+
120+
# Make the actual API call through LiteLLM
121+
response = completion(
122+
model=model,
123+
messages=[{"role": "user", "content": "write code for saying hi from LiteLLM"}],
124+
api_key=api_key,
125+
api_base=api_base
126+
)
127+
128+
# Verify response structure
129+
assert response is not None
130+
assert hasattr(response, "choices")
131+
assert len(response.choices) > 0
132+
assert hasattr(response.choices[0], "message")
133+
assert hasattr(response.choices[0].message, "content")
134+
assert response.choices[0].message.content is not None
135+
136+
# Check for specific content in the response
137+
assert "```python" in response.choices[0].message.content
138+
assert "Hi from LiteLLM" in response.choices[0].message.content
139+
140+
def test_morph_apply_code_updates(self, respx_mock):
141+
"""
142+
Test Morph's Apply Code Updates functionality which uses special tags
143+
for code and updates as per https://docs.morphllm.com/api-reference/endpoint/apply
144+
"""
145+
import respx
146+
from litellm import completion
147+
148+
# Set up environment variables for the test
149+
api_key = "fake-morph-key"
150+
api_base = "https://api.morphllm.com/v1"
151+
model = "morph/apply-v1"
152+
153+
# Original code and update with Morph's special tags
154+
original_code = """def calculate_total(items):
155+
total = 0
156+
for item in items:
157+
total += item.price
158+
return total"""
159+
160+
update_snippet = """def calculate_total(items):
161+
total = 0
162+
for item in items:
163+
total += item.price
164+
return total * 1.1 # Add 10% tax"""
165+
166+
user_message = f"<code>{original_code}</code>\n<update>{update_snippet}</update>"
167+
168+
# Expected response after applying the update
169+
expected_updated_code = """
170+
def calculate_total(items):
171+
total = 0
172+
for item in items:
173+
total += item.price
174+
return total * 1.1 # Add 10% tax
175+
"""
176+
177+
# Mock the HTTP request to the Morph API
178+
respx_mock.post(f"{api_base}/chat/completions").respond(
179+
json={
180+
"id": "chatcmpl-123",
181+
"object": "chat.completion",
182+
"created": 1677652288,
183+
"model": "apply-v1",
184+
"choices": [
185+
{
186+
"index": 0,
187+
"message": {
188+
"role": "assistant",
189+
"content": expected_updated_code,
190+
},
191+
"finish_reason": "stop",
192+
}
193+
],
194+
"usage": {"prompt_tokens": 25, "completion_tokens": 32, "total_tokens": 57},
195+
},
196+
status_code=200
197+
)
198+
199+
# Make the actual API call through LiteLLM
200+
response = completion(
201+
model=model,
202+
messages=[{"role": "user", "content": user_message}],
203+
api_key=api_key,
204+
api_base=api_base
205+
)
206+
207+
# Verify response structure
208+
assert response is not None
209+
assert hasattr(response, "choices")
210+
assert len(response.choices) > 0
211+
assert hasattr(response.choices[0], "message")
212+
assert hasattr(response.choices[0].message, "content")
213+
214+
# Check that the response contains the expected updated code
215+
assert response.choices[0].message.content.strip() == expected_updated_code.strip()
Lines changed: 127 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,127 @@
1+
"""
2+
Unit tests for Morph embedding configuration.
3+
4+
These tests validate the MorphEmbeddingConfig class which extends OpenAILikeEmbeddingHandler.
5+
"""
6+
7+
import os
8+
import sys
9+
from typing import Dict, List, Optional
10+
from unittest.mock import patch
11+
12+
import pytest
13+
14+
sys.path.insert(
15+
0, os.path.abspath("../../../../..")
16+
) # Adds the parent directory to the system path
17+
18+
from litellm.llms.morph.embedding.transformation import MorphEmbeddingConfig
19+
20+
21+
class TestMorphEmbeddingConfig:
22+
"""Test class for MorphEmbeddingConfig functionality"""
23+
24+
def test_validate_environment(self):
25+
"""Test that validate_environment adds correct headers"""
26+
config = MorphEmbeddingConfig()
27+
headers = {}
28+
api_key = "fake-morph-key"
29+
30+
result = config.validate_environment(
31+
headers=headers,
32+
model="morph/morph-embedding-v2",
33+
optional_params={},
34+
api_key=api_key,
35+
api_base="https://api.morphllm.com/v1",
36+
)
37+
38+
# Verify headers
39+
assert result["Authorization"] == f"Bearer {api_key}"
40+
assert result["Content-Type"] == "application/json"
41+
42+
def test_get_openai_compatible_provider_info(self):
43+
"""Test the _get_openai_compatible_provider_info method"""
44+
config = MorphEmbeddingConfig()
45+
api_key = "fake-morph-key"
46+
47+
result = config._get_openai_compatible_provider_info(
48+
api_base=None,
49+
api_key=api_key,
50+
)
51+
52+
# Verify correct API base is returned
53+
assert result[0] == "https://api.morphllm.com/v1"
54+
assert result[1] == api_key
55+
56+
def test_missing_api_key(self):
57+
"""Test error handling when API key is missing"""
58+
config = MorphEmbeddingConfig()
59+
60+
with pytest.raises(ValueError) as excinfo:
61+
config.validate_environment(
62+
headers={},
63+
model="morph/morph-embedding-v2",
64+
optional_params={},
65+
api_key=None,
66+
api_base="https://api.morphllm.com/v1",
67+
)
68+
69+
assert "Morph API key is required" in str(excinfo.value)
70+
71+
def test_inheritance(self):
72+
"""Test proper inheritance from OpenAILikeEmbeddingHandler"""
73+
config = MorphEmbeddingConfig()
74+
75+
from litellm.llms.openai_like.embedding.handler import OpenAILikeEmbeddingHandler
76+
77+
assert isinstance(config, OpenAILikeEmbeddingHandler)
78+
assert hasattr(config, "_get_openai_compatible_provider_info")
79+
80+
def test_morph_embedding_mock(self, respx_mock):
81+
"""
82+
Mock test for Morph embeddings API.
83+
This test mocks the actual HTTP request to test the integration properly.
84+
"""
85+
import respx
86+
from litellm import embedding
87+
88+
# Set up environment variables for the test
89+
api_key = "fake-morph-key"
90+
api_base = "https://api.morphllm.com/v1"
91+
model = "morph/morph-embedding-v2"
92+
93+
# Mock the HTTP request to the Morph API
94+
respx_mock.post(f"{api_base}/embeddings").respond(
95+
json={
96+
"object": "list",
97+
"data": [
98+
{
99+
"object": "embedding",
100+
"embedding": [0.1, 0.2, 0.3, 0.4, 0.5],
101+
"index": 0
102+
}
103+
],
104+
"model": "morph-embedding-v2",
105+
"usage": {
106+
"prompt_tokens": 5,
107+
"total_tokens": 5
108+
}
109+
},
110+
status_code=200
111+
)
112+
113+
# Make the actual API call through LiteLLM
114+
response = embedding(
115+
model=model,
116+
input="Hello world",
117+
api_key=api_key,
118+
api_base=api_base
119+
)
120+
121+
# Verify response structure
122+
assert response is not None
123+
assert hasattr(response, "data")
124+
assert len(response.data) > 0
125+
assert hasattr(response.data[0], "embedding")
126+
assert isinstance(response.data[0].embedding, list)
127+
assert len(response.data[0].embedding) == 5

0 commit comments

Comments
 (0)