Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 9 additions & 4 deletions examples/async_structured_outputs.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,10 @@
from pydantic import BaseModel

from mistralai import Mistral
from typing import List

async def main():

async def main():
api_key = os.environ["MISTRAL_API_KEY"]
client = Mistral(api_key=api_key)

Expand All @@ -16,18 +17,22 @@ class Explanation(BaseModel):
output: str

class MathDemonstration(BaseModel):
steps: list[Explanation]
steps: List[Explanation]
final_answer: str

chat_response = await client.chat.parse_async(
model="mistral-large-2411",
messages=[
{"role": "system", "content": "You are a helpful math tutor. You will be provided with a math problem, and your goal will be to output a step by step solution, along with a final answer. For each step, just provide the output as an equation use the explanation field to detail the reasoning."},
{
"role": "system",
"content": "You are a helpful math tutor. You will be provided with a math problem, and your goal will be to output a step by step solution, along with a final answer. For each step, just provide the output as an equation use the explanation field to detail the reasoning.",
},
{"role": "user", "content": "How can I solve 8x + 7 = -23"},
],
response_format = MathDemonstration
response_format=MathDemonstration,
)
print(chat_response.choices[0].message.parsed)


if __name__ == "__main__":
asyncio.run(main())
26 changes: 19 additions & 7 deletions examples/structured_outputs.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,9 @@

from mistralai import Mistral

from typing import List


def main():
api_key = os.environ["MISTRAL_API_KEY"]
client = Mistral(api_key=api_key)
Expand All @@ -14,32 +17,41 @@ class Explanation(BaseModel):
output: str

class MathDemonstration(BaseModel):
steps: list[Explanation]
steps: List[Explanation]
final_answer: str

print("Using the .parse method to parse the response into a Pydantic model:\n")
chat_response = client.chat.parse(
model="mistral-large-latest",
messages=[
{"role": "system", "content": "You are a helpful math tutor. You will be provided with a math problem, and your goal will be to output a step by step solution, along with a final answer. For each step, just provide the output as an equation use the explanation field to detail the reasoning."},
{
"role": "system",
"content": "You are a helpful math tutor. You will be provided with a math problem, and your goal will be to output a step by step solution, along with a final answer. For each step, just provide the output as an equation use the explanation field to detail the reasoning.",
},
{"role": "user", "content": "How can I solve 8x + 7 = -23"},
],
response_format = MathDemonstration
response_format=MathDemonstration,
)
print(chat_response.choices[0].message.parsed)

# Or with the streaming API
print("\nUsing the .parse_stream method to stream back the response into a JSON Schema:\n")
print(
"\nUsing the .parse_stream method to stream back the response into a JSON Schema:\n"
)
with client.chat.parse_stream(
model="mistral-large-latest",
messages=[
{"role": "system", "content": "You are a helpful math tutor. You will be provided with a math problem, and your goal will be to output a step by step solution, along with a final answer. For each step, just provide the output as an equation use the explanation field to detail the reasoning."},
{
"role": "system",
"content": "You are a helpful math tutor. You will be provided with a math problem, and your goal will be to output a step by step solution, along with a final answer. For each step, just provide the output as an equation use the explanation field to detail the reasoning.",
},
{"role": "user", "content": "How can I solve 8x + 7 = -23"},
],
response_format=MathDemonstration
response_format=MathDemonstration,
) as stream:
for chunk in stream:
print(chunk.data.choices[0].delta.content, end="")


if __name__ == "__main__":
main()
main()