Skip to content

Commit 9e46aba

Browse files
AssemblyAImattgefen-aai
authored andcommitted
Project import generated by Copybara.
GitOrigin-RevId: 2a553450940d7cb88913b0ebed9ca310eb254c8b
1 parent 909703f commit 9e46aba

File tree

3 files changed

+332
-15
lines changed

3 files changed

+332
-15
lines changed

assemblyai/types.py

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -525,6 +525,7 @@ def validate_max_speakers(cls, v, info):
525525
"max_speakers_expected must be greater than or equal to min_speakers_expected"
526526
)
527527
return v
528+
528529
else:
529530

530531
@validator("max_speakers_expected")
@@ -1607,6 +1608,7 @@ class Word(BaseModel):
16071608
@field_validator("start", mode="before")
16081609
def set_start_default(cls, v):
16091610
return 0 if v is None else v
1611+
16101612
else:
16111613

16121614
@validator("start", pre=True)
@@ -2317,13 +2319,30 @@ class LemurUsage(BaseModel):
23172319
"The number of output tokens generated by the model"
23182320

23192321

2322+
class LemurRequestDetails(BaseModel):
2323+
request_endpoint: str
2324+
temperature: float
2325+
final_model: str
2326+
max_output_size: int
2327+
created_at: datetime
2328+
transcript_ids: Optional[List[str]] = None
2329+
input_text: Optional[str] = None
2330+
questions: Optional[List[LemurQuestion]] = None
2331+
prompt: Optional[str] = None
2332+
context: Optional[Union[dict, str]] = None
2333+
answer_format: Optional[str] = None
2334+
2335+
23202336
class BaseLemurResponse(BaseModel):
23212337
request_id: str
23222338
"The unique identifier of your LeMUR request"
23232339

23242340
usage: LemurUsage
23252341
"The usage numbers for the LeMUR request"
23262342

2343+
request: Optional[LemurRequestDetails] = None
2344+
"The request details the user passed into the POST request. Optional since this only exists on the GET request."
2345+
23272346

23282347
class LemurStringResponse(BaseLemurResponse):
23292348
"""

tests/unit/factories.py

Lines changed: 80 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -246,6 +246,49 @@ class Meta:
246246
)
247247

248248

249+
class LemurRequestDetails(factory.Factory):
250+
class Meta:
251+
model = types.LemurRequestDetails
252+
253+
request_endpoint = factory.Faker("text")
254+
temperature = factory.Faker("pyfloat")
255+
final_model = factory.Faker("text")
256+
max_output_size = factory.Faker("pyint")
257+
created_at = factory.Faker("date")
258+
259+
260+
class LemurTaskRequestDetails(LemurRequestDetails):
261+
"""Request details specific to LeMUR task operations"""
262+
263+
request_endpoint = "/lemur/v3/task"
264+
prompt = factory.Faker("text")
265+
266+
267+
class LemurSummaryRequestDetails(LemurRequestDetails):
268+
"""Request details specific to LeMUR summary operations"""
269+
270+
request_endpoint = "/lemur/v3/summary"
271+
context = factory.LazyFunction(lambda: {"key": "value"})
272+
answer_format = factory.Faker("sentence")
273+
274+
275+
class LemurQuestionRequestDetails(LemurRequestDetails):
276+
"""Request details specific to LeMUR question-answer operations"""
277+
278+
request_endpoint = "/lemur/v3/question-answer"
279+
questions = [
280+
{
281+
"question": "What is the main topic?",
282+
"answer_format": "short sentence",
283+
"context": "Meeting context",
284+
},
285+
{
286+
"question": "What is the sentiment?",
287+
"answer_options": ["positive", "negative", "neutral"],
288+
},
289+
]
290+
291+
249292
class LemurUsage(factory.Factory):
250293
class Meta:
251294
model = types.LemurUsage
@@ -310,6 +353,43 @@ class Meta:
310353
request_id = factory.Faker("uuid4")
311354
usage = factory.SubFactory(LemurUsage)
312355
response = factory.Faker("text")
356+
request = factory.SubFactory(LemurRequestDetails)
357+
358+
359+
# Factories specifically for get_response endpoint tests (include request field)
360+
class LemurTaskResponseWithRequest(factory.Factory):
361+
class Meta:
362+
model = types.LemurTaskResponse
363+
364+
request_id = factory.Faker("uuid4")
365+
usage = factory.SubFactory(LemurUsage)
366+
response = factory.Faker("text")
367+
request = factory.SubFactory(LemurTaskRequestDetails)
368+
369+
370+
class LemurSummaryResponseWithRequest(factory.Factory):
371+
class Meta:
372+
model = types.LemurSummaryResponse
373+
374+
request_id = factory.Faker("uuid4")
375+
usage = factory.SubFactory(LemurUsage)
376+
response = factory.Faker("text")
377+
request = factory.SubFactory(LemurSummaryRequestDetails)
378+
379+
380+
class LemurQuestionResponseWithRequest(factory.Factory):
381+
class Meta:
382+
model = types.LemurQuestionResponse
383+
384+
request_id = factory.Faker("uuid4")
385+
usage = factory.SubFactory(LemurUsage)
386+
response = factory.List(
387+
[
388+
factory.SubFactory(LemurQuestionAnswer),
389+
factory.SubFactory(LemurQuestionAnswer),
390+
]
391+
)
392+
request = factory.SubFactory(LemurQuestionRequestDetails)
313393

314394

315395
class LemurPurgeResponse(factory.Factory):

0 commit comments

Comments
 (0)