@@ -109,12 +109,14 @@ def test_stream(openai_client, model):
109
109
110
110
def test_stream_empty (openai_client , model ):
111
111
mock_delta = unittest .mock .Mock (content = None , tool_calls = None )
112
+ mock_usage = unittest .mock .Mock (prompt_tokens = 0 , completion_tokens = 0 , total_tokens = 0 )
112
113
113
114
mock_event_1 = unittest .mock .Mock (choices = [unittest .mock .Mock (finish_reason = None , delta = mock_delta )])
114
115
mock_event_2 = unittest .mock .Mock (choices = [unittest .mock .Mock (finish_reason = "stop" , delta = mock_delta )])
115
- mock_event_3 = unittest .mock .Mock (spec = [])
116
+ mock_event_3 = unittest .mock .Mock ()
117
+ mock_event_4 = unittest .mock .Mock (usage = mock_usage )
116
118
117
- openai_client .chat .completions .create .return_value = iter ([mock_event_1 , mock_event_2 , mock_event_3 ])
119
+ openai_client .chat .completions .create .return_value = iter ([mock_event_1 , mock_event_2 , mock_event_3 , mock_event_4 ])
118
120
119
121
request = {"model" : "m1" , "messages" : [{"role" : "user" , "content" : []}]}
120
122
response = model .stream (request )
@@ -125,6 +127,7 @@ def test_stream_empty(openai_client, model):
125
127
{"chunk_type" : "content_start" , "data_type" : "text" },
126
128
{"chunk_type" : "content_stop" , "data_type" : "text" },
127
129
{"chunk_type" : "message_stop" , "data" : "stop" },
130
+ {"chunk_type" : "metadata" , "data" : mock_usage },
128
131
]
129
132
130
133
assert tru_events == exp_events
0 commit comments