Skip to content

Commit 7970e82

Browse files
authored
Add Azure Provider (#1091)
1 parent bee95f5 commit 7970e82

File tree

12 files changed

+319
-13
lines changed

12 files changed

+319
-13
lines changed

docs/api/providers.md

+2
Original file line numberDiff line numberDiff line change
@@ -11,3 +11,5 @@
1111
::: pydantic_ai.providers.bedrock
1212

1313
::: pydantic_ai.providers.groq
14+
15+
::: pydantic_ai.providers.azure

docs/models.md

+22
Original file line numberDiff line numberDiff line change
@@ -806,6 +806,28 @@ Usage(requests=1, request_tokens=57, response_tokens=8, total_tokens=65, details
806806
1. The name of the model running on the remote server
807807
2. The url of the remote server
808808

809+
### Azure AI Foundry
810+
811+
If you want to use [Azure AI Foundry](https://ai.azure.com/) as your provider, you can do so by using the
812+
[`AzureProvider`][pydantic_ai.providers.azure.AzureProvider] class.
813+
814+
```python {title="azure_provider_example.py"}
815+
from pydantic_ai import Agent
816+
from pydantic_ai.models.openai import OpenAIModel
817+
from pydantic_ai.providers.azure import AzureProvider
818+
819+
model = OpenAIModel(
820+
'gpt-4o',
821+
provider=AzureProvider(
822+
azure_endpoint='your-azure-endpoint',
823+
api_version='your-api-version',
824+
api_key='your-api-key',
825+
),
826+
)
827+
agent = Agent(model)
828+
...
829+
```
830+
809831
### OpenRouter
810832

811833
To use [OpenRouter](https://openrouter.ai), first create an API key at [openrouter.ai/keys](https://openrouter.ai/keys).

pydantic_ai_slim/pydantic_ai/models/openai.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -106,7 +106,7 @@ def __init__(
106106
self,
107107
model_name: OpenAIModelName,
108108
*,
109-
provider: Literal['openai', 'deepseek'] | Provider[AsyncOpenAI] = 'openai',
109+
provider: Literal['openai', 'deepseek', 'azure'] | Provider[AsyncOpenAI] = 'openai',
110110
system_prompt_role: OpenAISystemPromptRole | None = None,
111111
system: str | None = 'openai',
112112
) -> None: ...
@@ -130,7 +130,7 @@ def __init__(
130130
self,
131131
model_name: OpenAIModelName,
132132
*,
133-
provider: Literal['openai', 'deepseek'] | Provider[AsyncOpenAI] | None = None,
133+
provider: Literal['openai', 'deepseek', 'azure'] | Provider[AsyncOpenAI] | None = None,
134134
base_url: str | None = None,
135135
api_key: str | None = None,
136136
openai_client: AsyncOpenAI | None = None,
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,108 @@
1+
from __future__ import annotations as _annotations
2+
3+
import os
4+
from typing import overload
5+
6+
import httpx
7+
from openai import AsyncOpenAI
8+
9+
from pydantic_ai.models import cached_async_http_client
10+
11+
try:
12+
from openai import AsyncAzureOpenAI
13+
except ImportError as _import_error: # pragma: no cover
14+
raise ImportError(
15+
'Please install the `openai` package to use the Azure provider, '
16+
"you can use the `openai` optional group — `pip install 'pydantic-ai-slim[openai]'`"
17+
) from _import_error
18+
19+
20+
from . import Provider
21+
22+
23+
class AzureProvider(Provider[AsyncOpenAI]):
24+
"""Provider for Azure OpenAI API.
25+
26+
See <https://azure.microsoft.com/en-us/products/ai-foundry> for more information.
27+
"""
28+
29+
@property
30+
def name(self) -> str:
31+
return 'azure'
32+
33+
@property
34+
def base_url(self) -> str:
35+
assert self._base_url is not None
36+
return self._base_url
37+
38+
@property
39+
def client(self) -> AsyncOpenAI:
40+
return self._client
41+
42+
@overload
43+
def __init__(self, *, openai_client: AsyncAzureOpenAI) -> None: ...
44+
45+
@overload
46+
def __init__(
47+
self,
48+
*,
49+
azure_endpoint: str | None = None,
50+
api_version: str | None = None,
51+
api_key: str | None = None,
52+
http_client: httpx.AsyncClient | None = None,
53+
) -> None: ...
54+
55+
def __init__(
56+
self,
57+
*,
58+
azure_endpoint: str | None = None,
59+
api_version: str | None = None,
60+
api_key: str | None = None,
61+
openai_client: AsyncAzureOpenAI | None = None,
62+
http_client: httpx.AsyncClient | None = None,
63+
) -> None:
64+
"""Create a new Azure provider.
65+
66+
Args:
67+
azure_endpoint: The Azure endpoint to use for authentication, if not provided, the `AZURE_OPENAI_ENDPOINT`
68+
environment variable will be used if available.
69+
api_version: The API version to use for authentication, if not provided, the `OPENAI_API_VERSION`
70+
environment variable will be used if available.
71+
api_key: The API key to use for authentication, if not provided, the `AZURE_OPENAI_API_KEY` environment variable
72+
will be used if available.
73+
openai_client: An existing
74+
[`AsyncAzureOpenAI`](https://github.com/openai/openai-python#microsoft-azure-openai)
75+
client to use. If provided, `base_url`, `api_key`, and `http_client` must be `None`.
76+
http_client: An existing `httpx.AsyncClient` to use for making HTTP requests.
77+
"""
78+
if openai_client is not None:
79+
assert azure_endpoint is None, 'Cannot provide both `openai_client` and `azure_endpoint`'
80+
assert http_client is None, 'Cannot provide both `openai_client` and `http_client`'
81+
assert api_key is None, 'Cannot provide both `openai_client` and `api_key`'
82+
self._base_url = str(openai_client.base_url)
83+
self._client = openai_client
84+
else:
85+
azure_endpoint = azure_endpoint or os.getenv('AZURE_OPENAI_ENDPOINT')
86+
if azure_endpoint is None: # pragma: no cover
87+
raise ValueError(
88+
'Must provide one of the `azure_endpoint` argument or the `AZURE_OPENAI_ENDPOINT` environment variable'
89+
)
90+
91+
if api_key is None and 'OPENAI_API_KEY' not in os.environ: # pragma: no cover
92+
raise ValueError(
93+
'Must provide one of the `api_key` argument or the `OPENAI_API_KEY` environment variable'
94+
)
95+
96+
if api_version is None and 'OPENAI_API_VERSION' not in os.environ: # pragma: no cover
97+
raise ValueError(
98+
'Must provide one of the `api_version` argument or the `OPENAI_API_VERSION` environment variable'
99+
)
100+
101+
http_client = http_client or cached_async_http_client()
102+
self._client = AsyncAzureOpenAI(
103+
azure_endpoint=azure_endpoint,
104+
api_key=api_key,
105+
api_version=api_version,
106+
http_client=http_client,
107+
)
108+
self._base_url = str(self._client.base_url)

pydantic_ai_slim/pydantic_ai/providers/bedrock.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
from botocore.exceptions import NoRegionError
1111
except ImportError as _import_error:
1212
raise ImportError(
13-
'Please install `boto3` to use the Bedrock provider, '
13+
'Please install the `boto3` package to use the Bedrock provider, '
1414
"you can use the `bedrock` optional group — `pip install 'pydantic-ai-slim[bedrock]'`"
1515
) from _import_error
1616

pydantic_ai_slim/pydantic_ai/providers/deepseek.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
from openai import AsyncOpenAI
1313
except ImportError as _import_error: # pragma: no cover
1414
raise ImportError(
15-
'Please install `openai` to use the DeepSeek provider, '
15+
'Please install the `openai` package to use the DeepSeek provider, '
1616
"you can use the `openai` optional group — `pip install 'pydantic-ai-slim[openai]'`"
1717
) from _import_error
1818

pydantic_ai_slim/pydantic_ai/providers/google_vertex.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
from google.oauth2.service_account import Credentials as ServiceAccountCredentials
2222
except ImportError as _import_error:
2323
raise ImportError(
24-
'Please install `google-auth` to use the Google Vertex AI provider, '
24+
'Please install the `google-auth` package to use the Google Vertex AI provider, '
2525
"you can use the `vertexai` optional group — `pip install 'pydantic-ai-slim[vertexai]'`"
2626
) from _import_error
2727

pydantic_ai_slim/pydantic_ai/providers/groq.py

+1-3
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
from groq import AsyncGroq
1212
except ImportError as _import_error: # pragma: no cover
1313
raise ImportError(
14-
'Please install `groq` to use the Groq provider, '
14+
'Please install the `groq` package to use the Groq provider, '
1515
"you can use the `groq` optional group — `pip install 'pydantic-ai-slim[groq]'`"
1616
) from _import_error
1717

@@ -66,8 +66,6 @@ def __init__(
6666
)
6767

6868
if groq_client is not None:
69-
assert http_client is None, 'Cannot provide both `groq_client` and `http_client`'
70-
assert api_key is None, 'Cannot provide both `groq_client` and `api_key`'
7169
self._client = groq_client
7270
elif http_client is not None:
7371
self._client = AsyncGroq(base_url=self.base_url, api_key=api_key, http_client=http_client)

pydantic_ai_slim/pydantic_ai/providers/openai.py

+1-4
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
from __future__ import annotations as _annotations
22

33
import os
4-
from typing import TypeVar
54

65
import httpx
76

@@ -11,15 +10,13 @@
1110
from openai import AsyncOpenAI
1211
except ImportError as _import_error: # pragma: no cover
1312
raise ImportError(
14-
'Please install `openai` to use the OpenAI provider, '
13+
'Please install the `openai` package to use the OpenAI provider, '
1514
"you can use the `openai` optional group — `pip install 'pydantic-ai-slim[openai]'`"
1615
) from _import_error
1716

1817

1918
from . import Provider
2019

21-
InterfaceClient = TypeVar('InterfaceClient')
22-
2320

2421
class OpenAIProvider(Provider[AsyncOpenAI]):
2522
"""Provider for OpenAI API."""

tests/json_body_serializer.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
from yaml import Dumper, Loader
1515

1616
FILTERED_HEADER_PREFIXES = ['anthropic-', 'cf-', 'x-']
17-
FILTERED_HEADERS = {'authorization', 'date', 'request-id', 'server', 'user-agent', 'via', 'set-cookie'}
17+
FILTERED_HEADERS = {'authorization', 'date', 'request-id', 'server', 'user-agent', 'via', 'set-cookie', 'api-key'}
1818

1919

2020
class LiteralDumper(Dumper):
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,107 @@
1+
interactions:
2+
- request:
3+
headers:
4+
accept:
5+
- application/json
6+
accept-encoding:
7+
- gzip, deflate
8+
connection:
9+
- keep-alive
10+
content-length:
11+
- '111'
12+
content-type:
13+
- application/json
14+
host:
15+
- pydanticai7521574644.openai.azure.com
16+
method: POST
17+
parsed_body:
18+
messages:
19+
- content: What is the capital of France?
20+
role: user
21+
model: gpt-4o
22+
n: 1
23+
stream: false
24+
uri: https://pydanticai7521574644.openai.azure.com/openai/deployments/gpt-4o/chat/completions?api-version=2024-12-01-preview
25+
response:
26+
headers:
27+
apim-request-id:
28+
- 1d93fae1-cb8e-4789-8fb6-d26577a3cb77
29+
azureml-model-session:
30+
- v20250225-1-161802030
31+
cmp-upstream-response-duration:
32+
- '235'
33+
content-length:
34+
- '1223'
35+
content-type:
36+
- application/json
37+
ms-azureml-model-time:
38+
- '315'
39+
strict-transport-security:
40+
- max-age=31536000; includeSubDomains; preload
41+
parsed_body:
42+
choices:
43+
- content_filter_results:
44+
hate:
45+
filtered: false
46+
severity: safe
47+
protected_material_code:
48+
detected: false
49+
filtered: false
50+
protected_material_text:
51+
detected: false
52+
filtered: false
53+
self_harm:
54+
filtered: false
55+
severity: safe
56+
sexual:
57+
filtered: false
58+
severity: safe
59+
violence:
60+
filtered: false
61+
severity: safe
62+
finish_reason: stop
63+
index: 0
64+
logprobs: null
65+
message:
66+
content: The capital of France is **Paris**.
67+
refusal: null
68+
role: assistant
69+
created: 1741880483
70+
id: chatcmpl-BAeyRj7gU6aCNSSAskAFbupBWYMIT
71+
model: gpt-4o-2024-11-20
72+
object: chat.completion
73+
prompt_filter_results:
74+
- content_filter_results:
75+
hate:
76+
filtered: false
77+
severity: safe
78+
jailbreak:
79+
detected: false
80+
filtered: false
81+
self_harm:
82+
filtered: false
83+
severity: safe
84+
sexual:
85+
filtered: false
86+
severity: safe
87+
violence:
88+
filtered: false
89+
severity: safe
90+
prompt_index: 0
91+
system_fingerprint: fp_ded0d14823
92+
usage:
93+
completion_tokens: 9
94+
completion_tokens_details:
95+
accepted_prediction_tokens: 0
96+
audio_tokens: 0
97+
reasoning_tokens: 0
98+
rejected_prediction_tokens: 0
99+
prompt_tokens: 14
100+
prompt_tokens_details:
101+
audio_tokens: 0
102+
cached_tokens: 0
103+
total_tokens: 23
104+
status:
105+
code: 200
106+
message: OK
107+
version: 1

0 commit comments

Comments
 (0)