1
+ """
2
+ Unit tests for Morph configuration.
3
+
4
+ These tests validate the MorphChatConfig class which extends OpenAILikeChatConfig.
5
+ Morph is an OpenAI-compatible provider with a few customizations.
6
+ """
7
+
8
+ import os
9
+ import sys
10
+ from typing import Dict , List , Optional
11
+ from unittest .mock import patch
12
+
13
+ import pytest
14
+
15
+ sys .path .insert (
16
+ 0 , os .path .abspath ("../../../../.." )
17
+ ) # Adds the parent directory to the system path
18
+
19
+ from litellm .llms .morph .chat .transformation import MorphChatConfig
20
+
21
+
22
+ class TestMorphChatConfig :
23
+ """Test class for MorphChatConfig functionality"""
24
+
25
+ def test_validate_environment (self ):
26
+ """Test that validate_environment adds correct headers"""
27
+ config = MorphChatConfig ()
28
+ headers = {}
29
+ api_key = "fake-morph-key"
30
+
31
+ result = config .validate_environment (
32
+ headers = headers ,
33
+ model = "morph/apply-v1" ,
34
+ messages = [{"role" : "user" , "content" : "Hello" }],
35
+ optional_params = {},
36
+ litellm_params = {},
37
+ api_key = api_key ,
38
+ api_base = "https://api.morphllm.com/v1" ,
39
+ )
40
+
41
+ # Verify headers
42
+ assert result ["Authorization" ] == f"Bearer { api_key } "
43
+ assert result ["Content-Type" ] == "application/json"
44
+
45
+ def test_get_openai_compatible_provider_info (self ):
46
+ """Test the _get_openai_compatible_provider_info method"""
47
+ config = MorphChatConfig ()
48
+ api_key = "fake-morph-key"
49
+
50
+ result = config ._get_openai_compatible_provider_info (
51
+ api_base = None ,
52
+ api_key = api_key ,
53
+ )
54
+
55
+ # Verify correct API base is returned
56
+ assert result [0 ] == "https://api.morphllm.com/v1"
57
+ assert result [1 ] == api_key
58
+
59
+ def test_missing_api_key (self ):
60
+ """Test error handling when API key is missing"""
61
+ config = MorphChatConfig ()
62
+
63
+ with pytest .raises (ValueError ) as excinfo :
64
+ config .validate_environment (
65
+ headers = {},
66
+ model = "morph/apply-v1" ,
67
+ messages = [{"role" : "user" , "content" : "Hello" }],
68
+ optional_params = {},
69
+ litellm_params = {},
70
+ api_key = None ,
71
+ api_base = "https://api.morphllm.com/v1" ,
72
+ )
73
+
74
+ assert "Morph API key is required" in str (excinfo .value )
75
+
76
+ def test_inheritance (self ):
77
+ """Test proper inheritance from OpenAILikeChatConfig"""
78
+ config = MorphChatConfig ()
79
+
80
+ from litellm .llms .openai_like .chat .transformation import OpenAILikeChatConfig
81
+
82
+ assert isinstance (config , OpenAILikeChatConfig )
83
+ assert hasattr (config , "_get_openai_compatible_provider_info" )
84
+
85
+ def test_morph_completion_mock (self , respx_mock ):
86
+ """
87
+ Mock test for Morph completion using the model format from docs.
88
+ This test mocks the actual HTTP request to test the integration properly.
89
+ """
90
+ import respx
91
+ from litellm import completion
92
+
93
+ # Set up environment variables for the test
94
+ api_key = "fake-morph-key"
95
+ api_base = "https://api.morphllm.com/v1"
96
+ model = "morph/apply-v1"
97
+
98
+ # Mock the HTTP request to the Morph API
99
+ respx_mock .post (f"{ api_base } /chat/completions" ).respond (
100
+ json = {
101
+ "id" : "chatcmpl-123" ,
102
+ "object" : "chat.completion" ,
103
+ "created" : 1677652288 ,
104
+ "model" : "apply-v1" ,
105
+ "choices" : [
106
+ {
107
+ "index" : 0 ,
108
+ "message" : {
109
+ "role" : "assistant" ,
110
+ "content" : "```python\n print(\" Hi from LiteLLM!\" )\n ```\n \n This simple Python code prints a greeting message from LiteLLM." ,
111
+ },
112
+ "finish_reason" : "stop" ,
113
+ }
114
+ ],
115
+ "usage" : {"prompt_tokens" : 9 , "completion_tokens" : 12 , "total_tokens" : 21 },
116
+ },
117
+ status_code = 200
118
+ )
119
+
120
+ # Make the actual API call through LiteLLM
121
+ response = completion (
122
+ model = model ,
123
+ messages = [{"role" : "user" , "content" : "write code for saying hi from LiteLLM" }],
124
+ api_key = api_key ,
125
+ api_base = api_base
126
+ )
127
+
128
+ # Verify response structure
129
+ assert response is not None
130
+ assert hasattr (response , "choices" )
131
+ assert len (response .choices ) > 0
132
+ assert hasattr (response .choices [0 ], "message" )
133
+ assert hasattr (response .choices [0 ].message , "content" )
134
+ assert response .choices [0 ].message .content is not None
135
+
136
+ # Check for specific content in the response
137
+ assert "```python" in response .choices [0 ].message .content
138
+ assert "Hi from LiteLLM" in response .choices [0 ].message .content
139
+
140
+ def test_morph_apply_code_updates (self , respx_mock ):
141
+ """
142
+ Test Morph's Apply Code Updates functionality which uses special tags
143
+ for code and updates as per https://docs.morphllm.com/api-reference/endpoint/apply
144
+ """
145
+ import respx
146
+ from litellm import completion
147
+
148
+ # Set up environment variables for the test
149
+ api_key = "fake-morph-key"
150
+ api_base = "https://api.morphllm.com/v1"
151
+ model = "morph/apply-v1"
152
+
153
+ # Original code and update with Morph's special tags
154
+ original_code = """def calculate_total(items):
155
+ total = 0
156
+ for item in items:
157
+ total += item.price
158
+ return total"""
159
+
160
+ update_snippet = """def calculate_total(items):
161
+ total = 0
162
+ for item in items:
163
+ total += item.price
164
+ return total * 1.1 # Add 10% tax"""
165
+
166
+ user_message = f"<code>{ original_code } </code>\n <update>{ update_snippet } </update>"
167
+
168
+ # Expected response after applying the update
169
+ expected_updated_code = """
170
+ def calculate_total(items):
171
+ total = 0
172
+ for item in items:
173
+ total += item.price
174
+ return total * 1.1 # Add 10% tax
175
+ """
176
+
177
+ # Mock the HTTP request to the Morph API
178
+ respx_mock .post (f"{ api_base } /chat/completions" ).respond (
179
+ json = {
180
+ "id" : "chatcmpl-123" ,
181
+ "object" : "chat.completion" ,
182
+ "created" : 1677652288 ,
183
+ "model" : "apply-v1" ,
184
+ "choices" : [
185
+ {
186
+ "index" : 0 ,
187
+ "message" : {
188
+ "role" : "assistant" ,
189
+ "content" : expected_updated_code ,
190
+ },
191
+ "finish_reason" : "stop" ,
192
+ }
193
+ ],
194
+ "usage" : {"prompt_tokens" : 25 , "completion_tokens" : 32 , "total_tokens" : 57 },
195
+ },
196
+ status_code = 200
197
+ )
198
+
199
+ # Make the actual API call through LiteLLM
200
+ response = completion (
201
+ model = model ,
202
+ messages = [{"role" : "user" , "content" : user_message }],
203
+ api_key = api_key ,
204
+ api_base = api_base
205
+ )
206
+
207
+ # Verify response structure
208
+ assert response is not None
209
+ assert hasattr (response , "choices" )
210
+ assert len (response .choices ) > 0
211
+ assert hasattr (response .choices [0 ], "message" )
212
+ assert hasattr (response .choices [0 ].message , "content" )
213
+
214
+ # Check that the response contains the expected updated code
215
+ assert response .choices [0 ].message .content .strip () == expected_updated_code .strip ()
0 commit comments