Skip to content

Commit a1b8729

Browse files
author
Stainless Bot
committed
feat: feat: add tracer for Mistral AI
1 parent 24230df commit a1b8729

File tree

3 files changed

+479
-6
lines changed

3 files changed

+479
-6
lines changed
Lines changed: 154 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,154 @@
1+
{
2+
"cells": [
3+
{
4+
"cell_type": "markdown",
5+
"id": "2722b419",
6+
"metadata": {},
7+
"source": [
8+
"[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/openlayer-ai/openlayer-python/blob/main/examples/tracing/mistral/mistral_tracing.ipynb)\n",
9+
"\n",
10+
"\n",
11+
"# <a id=\"top\">Mistral AI tracing</a>\n",
12+
"\n",
13+
"This notebook illustrates how to get started tracing Mistral LLMs with Openlayer."
14+
]
15+
},
16+
{
17+
"cell_type": "code",
18+
"execution_count": null,
19+
"id": "020c8f6a",
20+
"metadata": {},
21+
"outputs": [],
22+
"source": [
23+
"!pip install mistralai openlayer"
24+
]
25+
},
26+
{
27+
"cell_type": "markdown",
28+
"id": "75c2a473",
29+
"metadata": {},
30+
"source": [
31+
"## 1. Set the environment variables"
32+
]
33+
},
34+
{
35+
"cell_type": "code",
36+
"execution_count": null,
37+
"id": "f3f4fa13",
38+
"metadata": {},
39+
"outputs": [],
40+
"source": [
41+
"import os\n",
42+
"\n",
43+
"# Openlayer env variables\n",
44+
"os.environ[\"OPENLAYER_API_KEY\"] = \"YOUR_OPENLAYER_API_KEY_HERE\"\n",
45+
"os.environ[\"OPENLAYER_INFERENCE_PIPELINE_ID\"] = \"YOUR_OPENLAYER_INFERENCE_PIPELINE_ID_HERE\""
46+
]
47+
},
48+
{
49+
"cell_type": "markdown",
50+
"id": "9758533f",
51+
"metadata": {},
52+
"source": [
53+
"## 2. Import the `trace_mistral` function"
54+
]
55+
},
56+
{
57+
"cell_type": "code",
58+
"execution_count": null,
59+
"id": "c35d9860-dc41-4f7c-8d69-cc2ac7e5e485",
60+
"metadata": {},
61+
"outputs": [],
62+
"source": [
63+
"import mistralai\n",
64+
"from openlayer.lib import trace_mistral\n",
65+
"\n",
66+
"mistral_client = trace_mistral(mistralai.Mistral(api_key=\"YOUR_MISTRAL_AI_API_KEY_HERE\"))"
67+
]
68+
},
69+
{
70+
"cell_type": "markdown",
71+
"id": "72a6b954",
72+
"metadata": {},
73+
"source": [
74+
"## 3. Use the traced Mistral AI client normally"
75+
]
76+
},
77+
{
78+
"cell_type": "markdown",
79+
"id": "76a350b4",
80+
"metadata": {},
81+
"source": [
82+
"That's it! Now you can continue using the traced Mistral AI client normally. The data is automatically published to Openlayer and you can start creating tests around it!"
83+
]
84+
},
85+
{
86+
"cell_type": "code",
87+
"execution_count": null,
88+
"id": "e00c1c79",
89+
"metadata": {},
90+
"outputs": [],
91+
"source": [
92+
"response = mistral_client.chat.complete(\n",
93+
" model=\"mistral-large-latest\",\n",
94+
" messages = [\n",
95+
" {\n",
96+
" \"role\": \"user\",\n",
97+
" \"content\": \"What is the best French cheese?\",\n",
98+
" },\n",
99+
" ]\n",
100+
")"
101+
]
102+
},
103+
{
104+
"cell_type": "code",
105+
"execution_count": null,
106+
"id": "d5093b5b-539c-4119-b5d3-dda6524edaa9",
107+
"metadata": {},
108+
"outputs": [],
109+
"source": [
110+
"stream_response = mistral_client.chat.stream(\n",
111+
" model = \"mistral-large-latest\",\n",
112+
" messages = [\n",
113+
" {\n",
114+
" \"role\": \"user\",\n",
115+
" \"content\": \"What's the meaning of life?\",\n",
116+
" },\n",
117+
" ]\n",
118+
")\n",
119+
"\n",
120+
"for chunk in stream_response:\n",
121+
" print(chunk.data.choices[0].delta.content)"
122+
]
123+
},
124+
{
125+
"cell_type": "code",
126+
"execution_count": null,
127+
"id": "2654f47f-fadd-4142-b185-4d992a30c46a",
128+
"metadata": {},
129+
"outputs": [],
130+
"source": []
131+
}
132+
],
133+
"metadata": {
134+
"kernelspec": {
135+
"display_name": "Python 3 (ipykernel)",
136+
"language": "python",
137+
"name": "python3"
138+
},
139+
"language_info": {
140+
"codemirror_mode": {
141+
"name": "ipython",
142+
"version": 3
143+
},
144+
"file_extension": ".py",
145+
"mimetype": "text/x-python",
146+
"name": "python",
147+
"nbconvert_exporter": "python",
148+
"pygments_lexer": "ipython3",
149+
"version": "3.9.19"
150+
}
151+
},
152+
"nbformat": 4,
153+
"nbformat_minor": 5
154+
}

src/openlayer/lib/__init__.py

Lines changed: 13 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,7 @@
11
"""Openlayer lib.
22
"""
33

4-
__all__ = [
5-
"trace",
6-
"trace_anthropic",
7-
"trace_openai",
8-
"trace_openai_assistant_thread_run",
9-
]
4+
__all__ = ["trace", "trace_anthropic", "trace_openai", "trace_openai_assistant_thread_run", "trace_mistral"]
105

116
# ---------------------------------- Tracing --------------------------------- #
127
from .tracing import tracer
@@ -44,3 +39,15 @@ def trace_openai_assistant_thread_run(client, run):
4439
from .integrations import openai_tracer
4540

4641
return openai_tracer.trace_openai_assistant_thread_run(client, run)
42+
43+
44+
def trace_mistral(client):
45+
"""Trace Mistral chat completions."""
46+
# pylint: disable=import-outside-toplevel
47+
import mistralai
48+
49+
from .integrations import mistral_tracer
50+
51+
if not isinstance(client, mistralai.Mistral):
52+
raise ValueError("Invalid client. Please provide a Mistral client.")
53+
return mistral_tracer.trace_mistral(client)

0 commit comments

Comments
 (0)