diff --git a/cloudevents_pydantic/bindings/http.py b/cloudevents_pydantic/bindings/http.py index 26780b5..eb65222 100644 --- a/cloudevents_pydantic/bindings/http.py +++ b/cloudevents_pydantic/bindings/http.py @@ -26,16 +26,16 @@ List, NamedTuple, Type, + TypeVar, cast, ) from pydantic import TypeAdapter -from typing_extensions import TypeVar from cloudevents_pydantic.events import CloudEvent from cloudevents_pydantic.formats import json -_T = TypeVar("_T", bound=CloudEvent, default=CloudEvent) +_T = TypeVar("_T", bound=CloudEvent) class HTTPComponents(NamedTuple): @@ -44,12 +44,12 @@ class HTTPComponents(NamedTuple): class HTTPHandler(Generic[_T]): - event_class: Type[_T] + event_adapter: TypeAdapter[_T] batch_adapter: TypeAdapter[List[_T]] def __init__(self, event_class: Type[_T] = cast(Type[_T], CloudEvent)) -> None: super().__init__() - self.event_class = event_class + self.event_adapter = TypeAdapter(event_class) self.batch_adapter = TypeAdapter(List[event_class]) # type: ignore[valid-type] def to_json(self, event: _T) -> HTTPComponents: @@ -90,7 +90,7 @@ def from_json( :return: The deserialized event :rtype: CloudEvent """ - return json.from_json(body, self.event_class) + return json.from_json(body, self.event_adapter) def from_json_batch( self, diff --git a/cloudevents_pydantic/formats/json.py b/cloudevents_pydantic/formats/json.py index 6823d0e..e7bac26 100644 --- a/cloudevents_pydantic/formats/json.py +++ b/cloudevents_pydantic/formats/json.py @@ -20,14 +20,13 @@ # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER = # DEALINGS IN THE SOFTWARE. = # ============================================================================== -from typing import List, Type, overload +from typing import List, TypeVar from pydantic import TypeAdapter -from typing_extensions import TypeVar from ..events import CloudEvent -_T = TypeVar("_T", bound=CloudEvent, default=CloudEvent) +_T = TypeVar("_T", bound=CloudEvent) def to_json(event: CloudEvent) -> str: @@ -42,22 +41,20 @@ def to_json(event: CloudEvent) -> str: return event.model_dump_json() -@overload -def from_json(data: str) -> CloudEvent: ... -@overload -def from_json(data: str, event_class: Type[_T]) -> _T: ... -def from_json(data: str, event_class: Type[CloudEvent] = CloudEvent) -> CloudEvent: +def from_json( + data: str, event_adapter: TypeAdapter[_T] = TypeAdapter(CloudEvent) +) -> _T: """ Deserializes an event from JSON format. :param data: the JSON representation of the event :type data: str - :param event_class: The event class to build - :type event_class: Type[CloudEvent] + :param event_adapter: The event class to build + :type event_adapter: Type[CloudEvent] :return: The deserialized event :rtype: CloudEvent """ - return event_class.model_validate_json(data) + return event_adapter.validate_json(data) def to_json_batch( diff --git a/docs/event_class.md b/docs/event_class.md index 501cac9..f9cb5df 100644 --- a/docs/event_class.md +++ b/docs/event_class.md @@ -104,6 +104,7 @@ When you create event types in your app you will want to make sure to follow the be kept up to date and make sure their validation, serialization and deserialization rules will be compliant with the [CloudEvents spec](https://github.com/cloudevents/spec/tree/main). +/// tab | class Syntax ```python from typing import TypedDict, Literal from cloudevents_pydantic.events import CloudEvent, field_types @@ -121,6 +122,23 @@ event = OrderCreated.event_factory( data={"a_str": "a nice string", "an_int": 1}, ) ``` +/// + +/// tab | inline Syntax +```python +from typing import TypedDict, Literal +from cloudevents_pydantic.events import CloudEvent, field_types + +class OrderCreated(CloudEvent): + data: TypedDict("OrderCreatedData", {"a_str": field_types.String, "an_int": field_types.Integer}) + type: Literal["order_created"] = "order_created" + source: field_types.String = "order_service" + +event = OrderCreated.event_factory( + data={"a_str": "a nice string", "an_int": 1}, +) +``` +/// /// admonition | Use subclasses type: warning diff --git a/docs/protocol_bindings/http_binding.md b/docs/protocol_bindings/http_binding.md index 48d650e..c6fd964 100644 --- a/docs/protocol_bindings/http_binding.md +++ b/docs/protocol_bindings/http_binding.md @@ -37,15 +37,14 @@ def do_something(): /// admonition | Why you have to reuse the same object? type: tip -When the HTTPHandler instance is created it creates internally a Pydantic `TypeAdapter` -for the event class, to handle efficiently event batches. This is an expensive operation. -Check the [Pydantic documentation](https://docs.pydantic.dev/latest/concepts/performance/#typeadapter-instantiated-once) -about this. +When the HTTPHandler instance is created it creates internally instances of Pydantic `TypeAdapter` +for the event class, to handle efficiently event serialization and discriminated unions. This is +an expensive operation. Check the [Pydantic documentation](https://docs.pydantic.dev/latest/concepts/performance/#typeadapter-instantiated-once) about this. /// -## Serialize a JSON event +## Deserialize a JSON event -HTTP serialization returns header and body to be used in a HTTP request. +HTTP deserialization parses the body to reconstruct the event. /// tab | Custom Event class ```python @@ -55,20 +54,15 @@ from cloudevents_pydantic.bindings.http import HTTPHandler class OrderCreated(CloudEvent): ... -minimal_attributes = { - "type": "order_created", - "source": "https://example.com/event-producer", - "id": "b96267e2-87be-4f7a-b87c-82f64360d954", - "specversion": "1.0", -} +single_event_json = '{"data":null,"source":"https://example.com/event-producer","id":"b96267e2-87be-4f7a-b87c-82f64360d954","type":"com.example.string","specversion":"1.0","time":"2022-07-16T12:03:20.519216+04:00","subject":null,"datacontenttype":null,"dataschema":null}' +batch_event_json = '[{"data":null,"source":"https://example.com/event-producer","id":"b96267e2-87be-4f7a-b87c-82f64360d954","type":"com.example.string","specversion":"1.0","time":"2022-07-16T12:03:20.519216+04:00","subject":null,"datacontenttype":null,"dataschema":null}]' http_handler = HTTPHandler(OrderCreated) -event = OrderCreated.event_factory(**minimal_attributes) # Single event -headers, body = http_handler.to_json(event) +event = http_handler.from_json(single_event_json) # Batch (list) of events -headers, body = http_handler.to_json_batch([event]) +batch_of_events = http_handler.from_json_batch(batch_event_json) ``` /// @@ -88,15 +82,57 @@ http_handler = HTTPHandler() event = CloudEvent.event_factory(**minimal_attributes) # Single event -json_string = http_handler.to_json(event) +event = http_handler.to_json(event) # Batch (list) of events -json_batch_string = http_handler.to_json_batch([event]) +batch_of_events = http_handler.to_json_batch([event]) ``` /// -## Deserialize a JSON event +/// details | Use discriminated Unions to handle multiple Event classes + type: warning -HTTP deserialization parses the body to reconstruct the event. +You'll want to use [discriminated unions](https://docs.pydantic.dev/latest/concepts/unions/#discriminated-unions) +as event class and use a single `HTTPHandler` for multiple Event classes to be more efficient on validation +and to produce a correct schema. + +```python +from typing import Annotated, Literal, Union + +from pydantic import Field +from typing_extensions import TypedDict + +from cloudevents_pydantic.bindings.http import HTTPHandler +from cloudevents_pydantic.events import CloudEvent + + +class OrderCreatedEvent(CloudEvent): + data: TypedDict("OrderCreatedData", {"order_id": str}) + type: Literal["order_created"] + + +class CustomerCreatedEvent(CloudEvent): + data: TypedDict("CustomerCreatedData", {"customer_id": str}) + type: Literal["customer_created"] + + +Event = Annotated[ + Union[OrderCreatedEvent, CustomerCreatedEvent], + Field(discriminator="type"), +] + +http_handler = HTTPHandler(Event) + +customer_event_json = '{"data":{"customer_id":"123"},"source":"customer_service","id":"123","type":"customer_created","specversion":"1.0","time":null,"subject":null,"datacontenttype":null,"dataschema":null}' + +print(type(http_handler.from_json(customer_event_json))) +# +``` +/// + + +## Serialize a JSON event + +HTTP serialization returns header and body to be used in a HTTP request. /// tab | Custom Event class ```python @@ -106,15 +142,20 @@ from cloudevents_pydantic.bindings.http import HTTPHandler class OrderCreated(CloudEvent): ... -single_event_json = '{"data":null,"source":"https://example.com/event-producer","id":"b96267e2-87be-4f7a-b87c-82f64360d954","type":"com.example.string","specversion":"1.0","time":"2022-07-16T12:03:20.519216+04:00","subject":null,"datacontenttype":null,"dataschema":null}' -batch_event_json = '[{"data":null,"source":"https://example.com/event-producer","id":"b96267e2-87be-4f7a-b87c-82f64360d954","type":"com.example.string","specversion":"1.0","time":"2022-07-16T12:03:20.519216+04:00","subject":null,"datacontenttype":null,"dataschema":null}]' +minimal_attributes = { + "type": "order_created", + "source": "https://example.com/event-producer", + "id": "b96267e2-87be-4f7a-b87c-82f64360d954", + "specversion": "1.0", +} http_handler = HTTPHandler(OrderCreated) +event = OrderCreated.event_factory(**minimal_attributes) # Single event -event = http_handler.from_json(single_event_json) +headers, body = http_handler.to_json(event) # Batch (list) of events -batch_of_events = http_handler.from_json_batch(batch_event_json) +headers, body = http_handler.to_json_batch([event]) ``` /// @@ -134,8 +175,8 @@ http_handler = HTTPHandler() event = CloudEvent.event_factory(**minimal_attributes) # Single event -event = http_handler.to_json(event) +json_string = http_handler.to_json(event) # Batch (list) of events -batch_of_events = http_handler.to_json_batch([event]) +json_batch_string = http_handler.to_json_batch([event]) ``` /// diff --git a/tests/bindings/test_http.py b/tests/bindings/test_http.py index da8c497..f1b0d62 100644 --- a/tests/bindings/test_http.py +++ b/tests/bindings/test_http.py @@ -21,7 +21,7 @@ # DEALINGS IN THE SOFTWARE. = # ============================================================================== from typing import List -from unittest.mock import patch +from unittest.mock import MagicMock, call, patch import pytest from pydantic import Field, TypeAdapter @@ -97,16 +97,24 @@ def from_json_batch_spy(): yield mocked_function -def test_initialization_defaults_to_cloudevents(type_adapter_init_mock): - handler = HTTPHandler() - assert handler.event_class is CloudEvent - type_adapter_init_mock.assert_called_once_with(List[CloudEvent]) +def test_initialization_defaults_to_cloudevents(type_adapter_init_mock: MagicMock): + HTTPHandler() + type_adapter_init_mock.assert_has_calls( + [ + call(CloudEvent), + call(List[CloudEvent]), + ] + ) -def test_initialization_uses_provided_event_class(type_adapter_init_mock): - handler = HTTPHandler(event_class=SomeEvent) - assert handler.event_class is SomeEvent - type_adapter_init_mock.assert_called_once_with(List[SomeEvent]) +def test_initialization_uses_provided_event_class(type_adapter_init_mock: MagicMock): + HTTPHandler(event_class=SomeEvent) + type_adapter_init_mock.assert_has_calls( + [ + call(SomeEvent), + call(List[SomeEvent]), + ] + ) @pytest.mark.parametrize( @@ -143,7 +151,7 @@ def test_from_json(from_json_spy): handler = HTTPHandler(event_class=SomeEvent) event = handler.from_json(valid_json) - from_json_spy.assert_called_once_with(valid_json, handler.event_class) + from_json_spy.assert_called_once_with(valid_json, handler.event_adapter) assert event == SomeEvent(**test_attributes) assert event != CloudEvent(**test_attributes) assert isinstance(event, SomeEvent) diff --git a/tests/formats/test_json.py b/tests/formats/test_json.py index 2aa2fed..96ca583 100644 --- a/tests/formats/test_json.py +++ b/tests/formats/test_json.py @@ -375,7 +375,7 @@ class BinaryDataEvent(CloudEvent): batch_adapter=TypeAdapter(Sequence[BinaryDataEvent]), )[0] else: - event = from_json(json_string, BinaryDataEvent) + event = from_json(json_string, TypeAdapter(BinaryDataEvent)) assert event.data == expected_value assert isinstance(event, BinaryDataEvent) @@ -412,5 +412,5 @@ class SomeData(TypedDict): class BinaryNestedEvent(CloudEvent): data: SomeData - event: BinaryNestedEvent = from_json(json_input, BinaryNestedEvent) + event: BinaryNestedEvent = from_json(json_input, TypeAdapter(BinaryNestedEvent)) assert event.data["data"] == b"\x02\x03\x05\x07"