Skip to content

Commit

Permalink
feat(client): add ._request_id property to object responses
Browse files Browse the repository at this point in the history
  • Loading branch information
RobertCraigie committed Sep 17, 2024
1 parent 4d45eb5 commit e9780f7
Show file tree
Hide file tree
Showing 5 changed files with 108 additions and 3 deletions.
5 changes: 4 additions & 1 deletion src/openai/_legacy_response.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@

from ._types import NoneType
from ._utils import is_given, extract_type_arg, is_annotated_type
from ._models import BaseModel, is_basemodel
from ._models import BaseModel, is_basemodel, add_request_id
from ._constants import RAW_RESPONSE_HEADER
from ._streaming import Stream, AsyncStream, is_stream_class_type, extract_stream_chunk_type
from ._exceptions import APIResponseValidationError
Expand Down Expand Up @@ -138,6 +138,9 @@ class MyModel(BaseModel):
if is_given(self._options.post_parser):
parsed = self._options.post_parser(parsed)

if isinstance(parsed, BaseModel):
add_request_id(parsed, self.request_id)

self._parsed_by_type[cache_key] = parsed
return parsed

Expand Down
34 changes: 33 additions & 1 deletion src/openai/_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import os
import inspect
from typing import TYPE_CHECKING, Any, Type, Union, Generic, TypeVar, Callable, cast
from typing import TYPE_CHECKING, Any, Type, Union, Generic, TypeVar, Callable, Optional, cast
from datetime import date, datetime
from typing_extensions import (
Unpack,
Expand Down Expand Up @@ -94,6 +94,23 @@ def model_fields_set(self) -> set[str]:
class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated]
extra: Any = pydantic.Extra.allow # type: ignore

if TYPE_CHECKING:
_request_id: Optional[str] = None
"""The ID of the request, returned via the X-Request-ID header. Useful for debugging requests and reporting issues to OpenAI.
This will **only** be set for the top-level response object, it will not be defined for nested objects. For example:
```py
completion = await client.chat.completions.create(...)
completion._request_id # req_id_xxx
completion.usage._request_id # raises `AttributeError`
```
Note: unlike other properties that use an `_` prefix, this property
*is* public. Unless documented otherwise, all other `_` prefix properties,
methods and modules are *private*.
"""

def to_dict(
self,
*,
Expand Down Expand Up @@ -662,6 +679,21 @@ def set_pydantic_config(typ: Any, config: pydantic.ConfigDict) -> None:
setattr(typ, "__pydantic_config__", config) # noqa: B010


def add_request_id(obj: BaseModel, request_id: str | None) -> None:
obj._request_id = request_id

# in Pydantic v1, using setattr like we do above causes the attribute
# to be included when serializing the model which we don't want in this
# case so we need to explicitly exclude it
if not PYDANTIC_V2:
try:
exclude_fields = obj.__exclude_fields__ # type: ignore
except AttributeError:
cast(Any, obj).__exclude_fields__ = {"_request_id", "__exclude_fields__"}
else:
cast(Any, obj).__exclude_fields__ = {*(exclude_fields or {}), "_request_id", "__exclude_fields__"}


# our use of subclasssing here causes weirdness for type checkers,
# so we just pretend that we don't subclass
if TYPE_CHECKING:
Expand Down
8 changes: 7 additions & 1 deletion src/openai/_response.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@

from ._types import NoneType
from ._utils import is_given, extract_type_arg, is_annotated_type, extract_type_var_from_base
from ._models import BaseModel, is_basemodel
from ._models import BaseModel, is_basemodel, add_request_id
from ._constants import RAW_RESPONSE_HEADER, OVERRIDE_CAST_TO_HEADER
from ._streaming import Stream, AsyncStream, is_stream_class_type, extract_stream_chunk_type
from ._exceptions import OpenAIError, APIResponseValidationError
Expand Down Expand Up @@ -315,6 +315,9 @@ class MyModel(BaseModel):
if is_given(self._options.post_parser):
parsed = self._options.post_parser(parsed)

if isinstance(parsed, BaseModel):
add_request_id(parsed, self.request_id)

self._parsed_by_type[cache_key] = parsed
return parsed

Expand Down Expand Up @@ -419,6 +422,9 @@ class MyModel(BaseModel):
if is_given(self._options.post_parser):
parsed = self._options.post_parser(parsed)

if isinstance(parsed, BaseModel):
add_request_id(parsed, self.request_id)

self._parsed_by_type[cache_key] = parsed
return parsed

Expand Down
21 changes: 21 additions & 0 deletions tests/test_legacy_response.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,27 @@ def test_response_parse_custom_model(client: OpenAI) -> None:
assert obj.bar == 2


def test_response_basemodel_request_id(client: OpenAI) -> None:
response = LegacyAPIResponse(
raw=httpx.Response(
200,
headers={"x-request-id": "my-req-id"},
content=json.dumps({"foo": "hello!", "bar": 2}),
),
client=client,
stream=False,
stream_cls=None,
cast_to=str,
options=FinalRequestOptions.construct(method="get", url="/foo"),
)

obj = response.parse(to=CustomModel)
assert obj._request_id == "my-req-id"
assert obj.foo == "hello!"
assert obj.bar == 2
assert obj.to_dict() == {"foo": "hello!", "bar": 2}


def test_response_parse_annotated_type(client: OpenAI) -> None:
response = LegacyAPIResponse(
raw=httpx.Response(200, content=json.dumps({"foo": "hello!", "bar": 2})),
Expand Down
43 changes: 43 additions & 0 deletions tests/test_response.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,6 +156,49 @@ async def test_async_response_parse_custom_model(async_client: AsyncOpenAI) -> N
assert obj.bar == 2


def test_response_basemodel_request_id(client: OpenAI) -> None:
response = APIResponse(
raw=httpx.Response(
200,
headers={"x-request-id": "my-req-id"},
content=json.dumps({"foo": "hello!", "bar": 2}),
),
client=client,
stream=False,
stream_cls=None,
cast_to=str,
options=FinalRequestOptions.construct(method="get", url="/foo"),
)

obj = response.parse(to=CustomModel)
assert obj._request_id == "my-req-id"
assert obj.foo == "hello!"
assert obj.bar == 2
assert obj.to_dict() == {"foo": "hello!", "bar": 2}


@pytest.mark.asyncio
async def test_async_response_basemodel_request_id(client: OpenAI) -> None:
response = AsyncAPIResponse(
raw=httpx.Response(
200,
headers={"x-request-id": "my-req-id"},
content=json.dumps({"foo": "hello!", "bar": 2}),
),
client=client,
stream=False,
stream_cls=None,
cast_to=str,
options=FinalRequestOptions.construct(method="get", url="/foo"),
)

obj = await response.parse(to=CustomModel)
assert obj._request_id == "my-req-id"
assert obj.foo == "hello!"
assert obj.bar == 2
assert obj.to_dict() == {"foo": "hello!", "bar": 2}


def test_response_parse_annotated_type(client: OpenAI) -> None:
response = APIResponse(
raw=httpx.Response(200, content=json.dumps({"foo": "hello!", "bar": 2})),
Expand Down

0 comments on commit e9780f7

Please sign in to comment.