diff --git a/.github/workflows/lint_and_test.yml b/.github/workflows/lint_and_test.yml
index 75125051..f53a2e0f 100644
--- a/.github/workflows/lint_and_test.yml
+++ b/.github/workflows/lint_and_test.yml
@@ -81,7 +81,7 @@ jobs:
just setup-runner
- name: Run tests with coverage
- if: ${{ matrix.python_version.cov }}
+ if: ${{ (matrix.python_version.cov && github.event_name == 'pull_request') }}
run:
just inv test-on-ci
--py-target ${{ matrix.python_version.tox }}
diff --git a/README.md b/README.md
index f7bed037..fbbff31d 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,7 @@
@@ -25,7 +25,7 @@ An extremely flexible and configurable data model conversion library.
Install
```bash
-pip install adaptix==3.0.0b4
+pip install adaptix==3.0.0b5
```
Use for model loading and dumping.
@@ -114,7 +114,7 @@ assert (
* Support [dozens](https://adaptix.readthedocs.io/en/latest/loading-and-dumping/specific-types-behavior.html) of types,
including different model kinds:
``@dataclass``, ``TypedDict``, ``NamedTuple``,
- [``attrs``](https://www.attrs.org/en/stable/) and [``sqlalchemy``](https://docs.sqlalchemy.org/en/20/)
+ [``attrs``](https://www.attrs.org/en/stable/), [``sqlalchemy``](https://docs.sqlalchemy.org/en/20/) and [``pydantic``](https://docs.pydantic.dev/latest/).
* Working with self-referenced data types (such as linked lists or trees).
* Saving [path](https://adaptix.readthedocs.io/en/latest/loading-and-dumping/tutorial.html#error-handling)
where an exception is raised (including unexpected errors).
diff --git a/docs/changelog/changelog_body.rst b/docs/changelog/changelog_body.rst
index a9bbec32..ecbcb120 100644
--- a/docs/changelog/changelog_body.rst
+++ b/docs/changelog/changelog_body.rst
@@ -1,10 +1,43 @@
----------------------------------------------------
+.. _v3.0.0b5:
+
+`3.0.0b5
`__ -- 2024-04-20
+=============================================================================
+
+.. _v3.0.0b5-Features:
+
+Features
+--------
+
+- Add support for Pydantic models!
+
+ Now you can work with pydantic models like any other:
+ construct from dict, serialize to dict, convert to any other model, and convert it to any other model.
+
+ Also, you can use :func:`.integrations.pydantic.native_pydantic` to delegate loading and dumping to pydantic itself.
+
+- Add support for dumping ``Literal`` inside ``Union``. `#237 `__
+- Add support for ``BytesIO`` and ``IO[bytes]``. `#270 `__
+- Error messages are more obvious.
+
+.. _v3.0.0b5-Breaking Changes:
+
+Breaking Changes
+----------------
+
+- Forbid use of constructs like ``P[SomeClass].ANY`` because it is misleading (you have to use ``P.ANY`` directly).
+- Private fields (any field starting with underscore) are skipped at dumping.
+ See :ref:`private_fields_dumping` for details.
+
+----------------------------------------------------
+
+
.. _v3.0.0b4:
-`3.0.0b4 `_ -- 2024-03-30
-============================================================================
+`3.0.0b4 `__ -- 2024-03-30
+=============================================================================
.. _v3.0.0b4-Features:
@@ -43,18 +76,18 @@ Bug Fixes
.. _v3.0.0b3:
-`3.0.0b3 `_ -- 2024-03-08
-============================================================================
+`3.0.0b3 `__ -- 2024-03-08
+=============================================================================
.. _v3.0.0b3-Features:
Features
--------
-- :func:`.conversion.link` accepts ``coercer`` parameter. `#256 `_
-- Add :func:`.conversion.link_constant` to link constant values and constant factories. `#258 `_
-- Add coercer for case when source union is subset of destination union (simple ``==`` check is using). `#242 `_
-- No coercer error now contains type information. `#252 `_
+- :func:`.conversion.link` accepts ``coercer`` parameter. `#256 `__
+- Add :func:`.conversion.link_constant` to link constant values and constant factories. `#258 `__
+- Add coercer for case when source union is subset of destination union (simple ``==`` check is using). `#242 `__
+- No coercer error now contains type information. `#252 `__
- Add coercer for ``Optional[S] -> Optional[D]`` if ``S`` is coercible to ``D``. `#254 `_
.. _v3.0.0b3-Bug Fixes:
@@ -62,17 +95,17 @@ Features
Bug Fixes
---------
-- Fix ``SyntaxError`` with lambda in :func:`.coercer`. `#243 `_
-- Model dumping now trying to save the original order of fields inside the dict. `#247 `_
-- Fix introspection of sqlalchemy models with ``column_property`` (all ColumnElement is ignored excepting Column itself). `#250 `_
+- Fix ``SyntaxError`` with lambda in :func:`.coercer`. `#243 `__
+- Model dumping now trying to save the original order of fields inside the dict. `#247 `__
+- Fix introspection of sqlalchemy models with ``column_property`` (all ColumnElement is ignored excepting Column itself). `#250 `__
----------------------------------------------------
.. _v3.0.0b2:
-`3.0.0b2 `_ -- 2024-02-16
-============================================================================
+`3.0.0b2 `__ -- 2024-02-16
+=============================================================================
.. _v3.0.0b2-Features:
@@ -84,15 +117,15 @@ Features
Now, you can generate boilerplate converter function by adaptix.
See :ref:`conversion tutorial ` for details.
- Basic support for sqlalchemy models are added!
-- Added enum support inside Literal. `#178 `_
+- Added enum support inside Literal. `#178 `__
- Added flags support.
Now adaptix has two different ways to process flags: :func:`.flag_by_exact_value` (by default)
- and :func:`.flag_by_member_names`. `#197 `_
-- Added defaultdict support. `#216 `_
-- Added support of mapping for :func:`.enum_by_name` provider. `#223 `_
+ and :func:`.flag_by_member_names`. `#197 `__
+- Added defaultdict support. `#216 `__
+- Added support of mapping for :func:`.enum_by_name` provider. `#223 `__
- Created the correct path (fixing python bug) for processing ``Required`` and ``NotRequired`` with stringified annotations
- or ``from __future__ import annotations``. `#227 `_
+ or ``from __future__ import annotations``. `#227 `__
.. _v3.0.0b2-Breaking Changes:
@@ -139,14 +172,14 @@ Deprecations
Bug Fixes
---------
-- Fixed parameter shuffling on skipping optional field. `#229 `_
+- Fixed parameter shuffling on skipping optional field. `#229 `__
----------------------------------------------------
.. _v3.0.0b1:
-`3.0.0b1 `_ -- 2023-12-16
-============================================================================
+`3.0.0b1 `__ -- 2023-12-16
+=============================================================================
Start of changelog.
diff --git a/docs/changelog/template.rst.jinja2 b/docs/changelog/template.rst.jinja2
index 1382d121..6db52c92 100644
--- a/docs/changelog/template.rst.jinja2
+++ b/docs/changelog/template.rst.jinja2
@@ -4,8 +4,8 @@
.. _v{{ versiondata.version }}:
-`{{ versiondata.version }} `_ -- {{ versiondata.date }}
-{{ top_underline * ((versiondata.version * 2 + versiondata.date)|length + 52) }}
+`{{ versiondata.version }} `__ -- {{ versiondata.date }}
+{{ top_underline * ((versiondata.version * 2 + versiondata.date)|length + 53) }}
{% endif %}
{% for section, _ in sections.items() %}
{% set underline = underlines[0] %}{% if section %}{{section}}
diff --git a/docs/common/installation.rst b/docs/common/installation.rst
index 2045eba7..57cd2d63 100644
--- a/docs/common/installation.rst
+++ b/docs/common/installation.rst
@@ -2,30 +2,17 @@ Just use pip to install the library
.. code-block:: text
- pip install adaptix==3.0.0b4
+ pip install adaptix==3.0.0b5
Integrations with 3-rd party libraries are turned on automatically,
-but you can install adaptix with `extras `_
+but you can install adaptix with `extras `__
to check that versions are compatible.
There are two variants of extras. The first one checks that the version is the same or newer than the last supported,
the second (strict) additionally checks that the version same or older than the last tested version.
-.. list-table::
- :header-rows: 1
-
- * - Extras
- - Versions bound
- * - ``attrs``
- - ``attrs >= 21.3.0``
- * - ``attrs-strict``
- - ``attrs >= 21.3.0, <= 23.2.0``
- * - ``sqlalchemy``
- - ``sqlalchemy >= 2.0.0``
- * - ``sqlalchemy-strict``
- - ``sqlalchemy >= 2.0.0, <= 2.0.29``
-
+.. custom-adaptix-extras-table::
Extras are specified inside square brackets, separating by comma.
@@ -33,5 +20,5 @@ So, this is valid installation variants:
.. code-block:: text
- pip install adaptix[attrs-strict]==3.0.0b4
- pip install adaptix[attrs, sqlalchemy-strict]==3.0.0b4
+ pip install adaptix[attrs-strict]==3.0.0b5
+ pip install adaptix[attrs, sqlalchemy-strict]==3.0.0b5
diff --git a/docs/conversion/tutorial.rst b/docs/conversion/tutorial.rst
index 9ab618ed..77462185 100644
--- a/docs/conversion/tutorial.rst
+++ b/docs/conversion/tutorial.rst
@@ -57,7 +57,7 @@ Sometimes you need to add extra data to the source model. For this, you can use
``# mypy: disable-error-code="empty-body"`` on the top of the file is needed
because mypy forbids functions without body.
-Also, you can set this option at `mypy config `_
+Also, you can set this option at `mypy config `__
or supress each error individually via ``# type: ignore[empty-body]``.
.. _fields-linking:
diff --git a/docs/custom_ext/bench_tools.py b/docs/custom_ext/bench_tools.py
index f13fccc9..c0996cf9 100644
--- a/docs/custom_ext/bench_tools.py
+++ b/docs/custom_ext/bench_tools.py
@@ -1,16 +1,14 @@
import json
-from textwrap import dedent
from typing import Dict
from zipfile import ZipFile
import plotly
from docutils import nodes
-from docutils.statemachine import StringList
-from sphinx.util import docutils
from sphinx.util.docutils import SphinxDirective
from benchmarks.bench_nexus import BENCHMARK_HUBS, KEY_TO_HUB, RELEASE_DATA, Renderer, pyperf_bench_to_measure
+from .macros import SphinxMacroDirective, directive
from .utils import file_ascii_hash
@@ -43,10 +41,10 @@ def run(self):
]
-class CustomBenchUsedDistributions(SphinxDirective):
+class CustomBenchUsedDistributions(SphinxMacroDirective):
required_arguments = 0
- def get_list_table(self) -> str:
+ def generate_string(self) -> str:
distributions: Dict[str, str] = {}
for hub_description in BENCHMARK_HUBS:
@@ -58,7 +56,7 @@ def get_list_table(self) -> str:
pyperf_bench_to_measure(release_zip.read(file)).distributions,
)
- result = dedent(
+ return directive(
"""
.. list-table::
:header-rows: 1
@@ -67,29 +65,20 @@ def get_list_table(self) -> str:
- Used version
- Last version
""",
- )
- for dist in sorted(distributions.keys()):
- version = distributions[dist]
- result += dedent(
+ [
f"""
- * - `{dist} `_
- - ``{version}``
+ * - `{dist} `__
+ - ``{distributions[dist]}``
- .. image:: https://img.shields.io/pypi/v/{dist}?logo=pypi&label=%20&color=white&style=flat
:target: https://pypi.org/project/{dist}/
:class: only-light
.. image:: https://img.shields.io/pypi/v/{dist}?logo=pypi&label=%20&color=%23242424&style=flat
:target: https://pypi.org/project/{dist}/
:class: only-dark
- """,
- ).replace("\n", "\n ")
- return result
-
- def run(self):
- list_table = self.get_list_table()
- rst = StringList(list_table.split("\n"), source="fake.rst")
- node = docutils.nodes.paragraph()
- self.state.nested_parse(rst, 0, node)
- return node.children
+ """
+ for dist in sorted(distributions.keys())
+ ],
+ )
def setup(app):
diff --git a/docs/custom_ext/macros.py b/docs/custom_ext/macros.py
index 9fa8773b..e4ae11f2 100644
--- a/docs/custom_ext/macros.py
+++ b/docs/custom_ext/macros.py
@@ -1,5 +1,8 @@
+import tomllib
from abc import ABC, abstractmethod
+from pathlib import Path
from textwrap import dedent, indent
+from typing import Iterable
from docutils.statemachine import StringList
from sphinx.util import docutils
@@ -21,27 +24,56 @@ def run(self):
return node.children
+def directive(header: str, contents: Iterable[str] = ()) -> str:
+ return dedent(header) + "\n" + "\n".join(indent(dedent(content), " ") for content in contents)
+
+
class CustomNonGuaranteedBehavior(SphinxMacroDirective):
required_arguments = 0
has_content = True
def generate_string(self) -> str:
- result = dedent(
+ return directive(
"""
- .. admonition:: Non-guaranteed behavior
- :class: caution
-
+ .. admonition:: Non-guaranteed behavior
+ :class: caution
""",
+ self.content,
)
- content = indent(
- "\n".join(self.content),
- " ",
+
+
+ADAPTIX_PYPROJECT = tomllib.loads(Path(__file__).parent.parent.parent.joinpath("pyproject.toml").read_text())
+
+
+class CustomAdaptixExtrasTable(SphinxMacroDirective):
+ required_arguments = 0
+ has_content = False
+
+ def generate_string(self) -> str:
+ return directive(
+ """
+ .. list-table::
+ :header-rows: 1
+ """,
+ [
+ """
+ * - Extras
+ - Versions bound
+ """,
+ *[
+ f"""
+ * - ``{extras}``
+ - ``{'; '.join(deps)}``
+ """
+ for extras, deps in ADAPTIX_PYPROJECT["project"]["optional-dependencies"].items()
+ ],
+ ],
)
- return result + content
def setup(app):
app.add_directive("custom-non-guaranteed-behavior", CustomNonGuaranteedBehavior)
+ app.add_directive("custom-adaptix-extras-table", CustomAdaptixExtrasTable)
return {
"version": file_ascii_hash(__file__),
diff --git a/docs/examples/conversion/tutorial/putting_together.py b/docs/examples/conversion/tutorial/putting_together.py
index 3eca56dc..62886be5 100644
--- a/docs/examples/conversion/tutorial/putting_together.py
+++ b/docs/examples/conversion/tutorial/putting_together.py
@@ -4,8 +4,7 @@
from uuid import UUID
from adaptix import P
-from adaptix._internal.conversion.facade.provider import from_param
-from adaptix.conversion import coercer, impl_converter, link
+from adaptix.conversion import coercer, from_param, impl_converter, link
@dataclass
diff --git a/docs/examples/loading-and-dumping/extended_usage/private_fields_including_no_rename_pydantic.py b/docs/examples/loading-and-dumping/extended_usage/private_fields_including_no_rename_pydantic.py
new file mode 100644
index 00000000..2c0e4339
--- /dev/null
+++ b/docs/examples/loading-and-dumping/extended_usage/private_fields_including_no_rename_pydantic.py
@@ -0,0 +1,26 @@
+from pydantic import BaseModel
+
+from adaptix import Retort, name_mapping
+
+
+class Book(BaseModel):
+ title: str
+ price: int
+ _private: int
+
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+ self._private = 1
+
+
+retort = Retort(
+ recipe=[
+ name_mapping(Book, map={"_private": "_private"}),
+ ],
+)
+book = Book(title="Fahrenheit 451", price=100)
+assert retort.dump(book) == {
+ "title": "Fahrenheit 451",
+ "price": 100,
+ "_private": 1,
+}
diff --git a/docs/examples/loading-and-dumping/extended_usage/private_fields_including_pydantic.py b/docs/examples/loading-and-dumping/extended_usage/private_fields_including_pydantic.py
new file mode 100644
index 00000000..42cf1fc4
--- /dev/null
+++ b/docs/examples/loading-and-dumping/extended_usage/private_fields_including_pydantic.py
@@ -0,0 +1,26 @@
+from pydantic import BaseModel
+
+from adaptix import Retort, name_mapping
+
+
+class Book(BaseModel):
+ title: str
+ price: int
+ _private: int
+
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+ self._private = 1
+
+
+retort = Retort(
+ recipe=[
+ name_mapping(Book, map={"_private": "private_field"}),
+ ],
+)
+book = Book(title="Fahrenheit 451", price=100)
+assert retort.dump(book) == {
+ "title": "Fahrenheit 451",
+ "price": 100,
+ "private_field": 1,
+}
diff --git a/docs/examples/loading-and-dumping/extended_usage/private_fields_skipping_pydantic.py b/docs/examples/loading-and-dumping/extended_usage/private_fields_skipping_pydantic.py
new file mode 100644
index 00000000..1cc97043
--- /dev/null
+++ b/docs/examples/loading-and-dumping/extended_usage/private_fields_skipping_pydantic.py
@@ -0,0 +1,21 @@
+from pydantic import BaseModel
+
+from adaptix import Retort
+
+
+class Book(BaseModel):
+ title: str
+ price: int
+ _private: int
+
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+ self._private = 1
+
+
+retort = Retort()
+book = Book(title="Fahrenheit 451", price=100)
+assert retort.dump(book) == {
+ "title": "Fahrenheit 451",
+ "price": 100,
+}
diff --git a/tests/unit/morphing/model/shape_provider/__init__.py b/docs/examples/reference/integrations/__init__.py
similarity index 100%
rename from tests/unit/morphing/model/shape_provider/__init__.py
rename to docs/examples/reference/integrations/__init__.py
diff --git a/docs/examples/reference/integrations/native_pydantic.py b/docs/examples/reference/integrations/native_pydantic.py
new file mode 100644
index 00000000..6c1224c9
--- /dev/null
+++ b/docs/examples/reference/integrations/native_pydantic.py
@@ -0,0 +1,25 @@
+from pydantic import BaseModel, Field
+
+from adaptix import Retort
+from adaptix.integrations.pydantic import native_pydantic
+
+
+class Book(BaseModel):
+ title: str = Field(alias="name")
+ price: int
+
+
+data = {
+ "name": "Fahrenheit 451",
+ "price": 100,
+}
+
+retort = Retort(
+ recipe=[
+ native_pydantic(Book),
+ ],
+)
+
+book = retort.load(data, Book)
+assert book == Book(name="Fahrenheit 451", price=100)
+assert retort.dump(book) == data
diff --git a/docs/loading-and-dumping/extended-usage.rst b/docs/loading-and-dumping/extended-usage.rst
index a86ebb09..11a8b795 100644
--- a/docs/loading-and-dumping/extended-usage.rst
+++ b/docs/loading-and-dumping/extended-usage.rst
@@ -299,7 +299,7 @@ Mapping to list
Some APIs store structures as lists or arrays rather than dict for optimization purposes.
For example, Binance uses it to represent
-`historical market data `_.
+`historical market data `__.
There is :paramref:`.name_mapping.as_list` that converts the model to a list.
Position at the list is determined by order of field definition.
@@ -353,6 +353,26 @@ The first provider override parameters of next providers.
.. literalinclude:: /examples/loading-and-dumping/extended_usage/chaining_overriding.py
+.. _private_fields_dumping:
+
+Private fields dumping
+-----------------------------------
+
+By default, adaptix skips private fields (any field starting with underscore) at dumping.
+
+.. literalinclude:: /examples/loading-and-dumping/extended_usage/private_fields_skipping_pydantic.py
+
+You can include this fields by setting alias.
+
+.. literalinclude:: /examples/loading-and-dumping/extended_usage/private_fields_including_pydantic.py
+
+Alias can be equal to field name (field id) and field will be included.
+
+.. dropdown:: Including private field without renaming
+
+ .. literalinclude:: /examples/loading-and-dumping/extended_usage/private_fields_including_no_rename_pydantic.py
+
+
.. _advanced-mapping:
Advanced mapping
@@ -385,7 +405,7 @@ The mapping result is union of 5 types:
So the field will be skipped despite the match by :paramref:`.name_mapping.only`.
Name mapping reuses concepts of recipe inside retort and also implements
-`chain-of-responsibility `_ design pattern.
+`chain-of-responsibility `__ design pattern.
Only the first element matched by its predicate is used to determine the mapping result.
diff --git a/docs/loading-and-dumping/specific-types-behavior.rst b/docs/loading-and-dumping/specific-types-behavior.rst
index f74fea5b..e69eb846 100644
--- a/docs/loading-and-dumping/specific-types-behavior.rst
+++ b/docs/loading-and-dumping/specific-types-behavior.rst
@@ -67,6 +67,11 @@ Exact list: ``bytes``, ``bytearray``, ``ByteString``.
Value is represented as base64 encoded string.
+BytesIO and IO[bytes]
+'''''''''''''''''''''''''''''''''''''
+
+Value is represented as base64 encoded string.
+
re.Pattern
''''''''''''
@@ -168,7 +173,7 @@ they will be processed via the corresponding dumper.
Be careful when you use a ``0``, ``1``, ``False`` and ``True`` as ``Literal`` members.
Due to type hint caching ``Literal[0, 1]`` sometimes returns ``Literal[False, True]``.
-It was fixed only at `Python 3.9.1 `_.
+It was fixed only at `Python 3.9.1 `__.
Union
'''''''''''''''''
@@ -202,8 +207,8 @@ For objects of types that are not listed in the union,
but which are a subclass of some union case, the base class dumper is used.
If there are several parents, it will be the selected class that appears first in ``.mro()`` list.
-Also, builtin dumper can not work
-with union containing non-class type hints like ``Union[Literal['foo', 'bar'], int]``.
+Also, builtin dumper can work only with class type hints and ``Literal``.
+For example, type hints like ``LiteralString | int`` can not be dumped.
Iterable subclasses
'''''''''''''''''''''
diff --git a/docs/loading-and-dumping/tutorial.rst b/docs/loading-and-dumping/tutorial.rst
index bab53e92..d70e2b33 100644
--- a/docs/loading-and-dumping/tutorial.rst
+++ b/docs/loading-and-dumping/tutorial.rst
@@ -85,7 +85,7 @@ therefore disabling it can improve performance.
Retort recipe
----------------
Retort also supports a more powerful and more flexible configuration system via `recipe`.
-It implements `chain-of-responsibility `_
+It implements `chain-of-responsibility `__
design pattern.
The recipe consists of `providers`, each of which can precisely override one of the retort's behavior aspects.
@@ -132,7 +132,7 @@ Basic rules:
#. If you pass a class, the provider will be applied to all same types.
#. If you pass an abstract class, the provider will be applied to all subclasses.
-#. If you pass a `runtime checkable protocol `_,
+#. If you pass a `runtime checkable protocol `__,
the provider will be applied to all protocol implementations.
#. If you pass a string, it will be interpreted as a regex
and the provider will be applied to all fields with id matched by the regex.
@@ -218,6 +218,7 @@ that returns the actual exception or pass the string to raise :class:`~.load_err
.. literalinclude:: /examples/loading-and-dumping/tutorial/validators_2.pytb
+.. _Error handling:
Error handling
==================
@@ -236,13 +237,13 @@ and designed to produce machine-readable structured errors.
By default, all thrown errors are collected into :class:`~.load_error.AggregateLoadError`,
each exception has an additional note describing path of place where the error is caused.
This path is called a ``Struct trail`` and acts like
-`JSONPath `_
+`JSONPath `__
pointing to location inside the input data.
For Python versions less than 3.11, an extra package ``exceptiongroup`` is used.
This package patch some functions from ``traceback``
during import to backport ``ExceptionGroup`` rendering to early versions.
-More details at `documentation `_.
+More details at `documentation `__.
By default, all collection-like and model-like loaders wrap all errors into :class:`~.load_error.AggregateLoadError`.
Each sub-exception contains a trail relative to the parent exception.
@@ -265,7 +266,7 @@ Changing ``debug_trail`` to ``DebugTrail.DISABLE`` make the raised exception act
.. literalinclude:: /examples/loading-and-dumping/tutorial/load_error_dt_disable.pytb
If there is at least one unexpected error :class:`~.load_error.AggregateLoadError`
-is replaced by standard `ExceptionGroup `_.
+is replaced by standard `ExceptionGroup `__.
For the dumping process any exception is unexpected, so it always will be wrapped with ``ExceptionGroup``
.. literalinclude:: /examples/loading-and-dumping/tutorial/unexpected_error.py
diff --git a/docs/overview.rst b/docs/overview.rst
index 6ffcd8bb..c6544296 100644
--- a/docs/overview.rst
+++ b/docs/overview.rst
@@ -16,7 +16,7 @@ Installation
.. code-block:: text
- pip install adaptix==3.0.0b4
+ pip install adaptix==3.0.0b5
Example
diff --git a/docs/readme_advantages.md b/docs/readme_advantages.md
index da0ab711..feeccb33 100644
--- a/docs/readme_advantages.md
+++ b/docs/readme_advantages.md
@@ -7,7 +7,7 @@
* Support [dozens](https://adaptix.readthedocs.io/en/latest/loading-and-dumping/specific-types-behavior.html) of types,
including different model kinds:
``@dataclass``, ``TypedDict``, ``NamedTuple``,
- [``attrs``](https://www.attrs.org/en/stable/) and [``sqlalchemy``](https://docs.sqlalchemy.org/en/20/)
+ [``attrs``](https://www.attrs.org/en/stable/), [``sqlalchemy``](https://docs.sqlalchemy.org/en/20/) and [``pydantic``](https://docs.pydantic.dev/latest/).
* Working with self-referenced data types (such as linked lists or trees).
* Saving [path](https://adaptix.readthedocs.io/en/latest/loading-and-dumping/tutorial.html#error-handling)
where an exception is raised (including unexpected errors).
diff --git a/docs/reference/contributing.rst b/docs/reference/contributing.rst
index d61ca65c..a95bcf1e 100644
--- a/docs/reference/contributing.rst
+++ b/docs/reference/contributing.rst
@@ -10,7 +10,7 @@ How to setup the repository
You have to use WSL to develop the project on Windows.
-#. Install `Just `_
+#. Install `Just `__
Just is a command runner that is used here instead of ``make``.
@@ -35,7 +35,7 @@ How to setup the repository
directory ``benchmarks/release_data`` will be empty.
You can fix it executing ``git submodule update --init --recursive``.
-#. Create `venv `_ and run
+#. Create `venv `__ and run
.. code-block:: bash
@@ -77,7 +77,7 @@ Compile dependencies
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Compile raw dependencies (``requirements/raw/*``)
-into file with locked versions via `pip-tools `_.
+into file with locked versions via `pip-tools `__.
.. code-block:: bash
@@ -138,7 +138,7 @@ Produce coverage report
Create coverage report. All coverage reports will be merged into ``coverage.xml`` file at working directory.
You can import it to IDE. Instruction for
-`PyCharm `_.
+`PyCharm `__.
.. code-block:: bash
diff --git a/docs/reference/integrations.rst b/docs/reference/integrations.rst
index 621271ff..30d8373b 100644
--- a/docs/reference/integrations.rst
+++ b/docs/reference/integrations.rst
@@ -13,52 +13,97 @@ Models are classes that have a predefined set of fields. Adaptix process models
Models that are supported out of the box:
-- `dataclass `_
-- `NamedTuple `_
- (`namedtuple `_
+- `dataclass `__
+- `NamedTuple `__
+ (`namedtuple `__
also is supported, but types of all fields will be ``Any``)
-- `TypedDict `_
-- `attrs `_ (only from ``>=21.3.0``)
-- `sqlalchemy `_ (only from ``>=2.0.0``)
+- `TypedDict `__
+- `attrs `__ (only from ``>=21.3.0``)
+- `sqlalchemy `__ (only from ``>=2.0.0``)
+- `pydantic `__ (only from ``>=2.0.0``)
Arbitrary types also are supported to be loaded by introspection of ``__init__`` method,
but it can not be dumped.
You do not need to do anything to enable support for models from a third-party library.
-Everything just works. But you can install adaptix with certain `extras `_
+Everything just works. But you can install adaptix with certain `extras `__
to ensure version compatibility.
+Due to the way Python works with annotations, there is a `bug `__,
+when field annotation of ``TypedDict`` is stringified or ``from __future__ import annotations`` is placed
+in file ``Required`` and ``NotRequired`` specifiers is ignored
+when ``required_keys`` and ``optional_keys`` is calculated.
+Adaptix takes this into account and processes it properly.
-Known limitations:
-- dataclass
+Known peculiarities and limitations
+---------------------------------------
- - Signature of custom ``__init__`` method must be same as signature of generated by ``@dataclass``,
- because there is no way to distinguish them.
+dataclass
+^^^^^^^^^^^
+- Signature of custom ``__init__`` method must be same as signature of generated by ``@dataclass``,
+ because there is no way to distinguish them.
-- ``__init__`` introspection or using :func:`.constructor`
+``__init__`` introspection or using :func:`.constructor`
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- - Fields of unpacked typed dict (``**kwargs: Unpack[YourTypedDict]``) cannot collide with parameters of function.
+- Fields of unpacked typed dict (``**kwargs: Unpack[YourTypedDict]``) cannot collide with parameters of function.
-- sqlalchemy
+sqlalchemy
+^^^^^^^^^^^^^^^^
- - Only mapping to ``Table`` is supported,
- implementations for ``FromClause`` instances such as ``Subquery`` and ``Join`` are not provided.
+- Only mapping to ``Table`` is supported,
+ implementations for ``FromClause`` instances such as ``Subquery`` and ``Join`` are not provided.
- - ``dataclass`` and ``attrs`` mapped by sqlalchemy are not supported for introspection.
+- ``dataclass`` and ``attrs`` mapped by sqlalchemy are not supported for introspection.
- - It does not support registering order of mapped fields by design,
- so you should use manual mapping to list instead automatic ``as_list=True``.
+- It does not support registering order of mapped fields by design,
+ so you should use manual mapping to list instead automatic ``as_list=True``.
- - Relationships with custom ``collection_class`` are not supported.
+- Relationships with custom ``collection_class`` are not supported.
- - All input fields of foreign keys and relationships are considered as optional
- due to user can pass only relationship instance or only foreign key value.
+- All input fields of foreign keys and relationships are considered as optional
+ due to user can pass only relationship instance or only foreign key value.
+pydantic
+^^^^^^^^^^^^^^^^^
-Due to the way Python works with annotations, there is a `bug `_,
-when field annotation of ``TypedDict`` is stringified or ``from __future__ import annotations`` is placed
-in file ``Required`` and ``NotRequired`` specifiers is ignored
-when ``required_keys`` and ``optional_keys`` is calculated.
-Adaptix takes this into account and processes it properly.
+- Custom ``__init__`` function must have only one parameter
+ accepting arbitrary keyword arguments (like ``**kwargs`` or ``**data``).
+
+- There are 3 category of fields: regular fields, computed fields (marked properties) and private attributes.
+ Pydantic tracks order inside one category, but does not track between categories.
+ Also, pydantic does not keep right order inside private attributes.
+
+ Therefore, during the dumping of fields, regular fields will come first,
+ followed by computed fields, and then private attributes.
+ You can use use manual mapping to list instead automatic ``as_list=True`` to control the order.
+
+- Fields with constraints defined by parameters (like ``f1: int = Field(gt=1, ge=10)``)
+ are translated to ``Annotated`` with corresponding metadata.
+ Metadata is generated by Pydantic and consists of objects from
+ `annotated_types `__ package
+ (like ``Annotated[int, Gt(gt=1), Ge(ge=10)]``).
+
+- Parametrized generic pydantic models do not expose common type hints dunders
+ that prevents appropriate type hints introspection.
+ This leads to incorrect generics resolving in some tricky cases.
+
+ Also, there are some bugs in generic resolving inside pydantic itself.
+
+- Pydantic does not support variadic generics.
+
+- ``pydantic.dataclasses`` is not supported.
+
+- ``pydantic.v1`` is not supported.
+
+
+Working with Pydantic
+=======================
+
+By default, any pydantic model is loaded and dumped like any other model.
+For example, any aliases or config parameters defined inside the model are ignored.
+You can override this behavior to use a native pydantic validation/serialization mechanism.
+
+.. literalinclude:: /examples/reference/integrations/native_pydantic.py
diff --git a/examples/api_division/retort.py b/examples/api_division/retort.py
index 1cf830f3..bbec6484 100644
--- a/examples/api_division/retort.py
+++ b/examples/api_division/retort.py
@@ -23,7 +23,7 @@ def string_cp866_mutator(data: str):
try:
t_data.encode("cp866", "strict")
except UnicodeEncodeError as e:
- bad_char = e.object[e.start: e.end] # pylint: disable=unsubscriptable-object
+ bad_char = e.object[e.start: e.end]
raise ValueLoadError(f"Char {bad_char!r} can not be represented at CP866", data)
return t_data
diff --git a/examples/sqlalchemy_json/adapter.py b/examples/sqlalchemy_json/adapter.py
index 36160b71..08c0b646 100644
--- a/examples/sqlalchemy_json/adapter.py
+++ b/examples/sqlalchemy_json/adapter.py
@@ -3,7 +3,6 @@
from adaptix import AdornedRetort, TypeHint
-# pylint: disable=abstract-method
# SQLAlchemy does not require to implement process_literal_param and python_type
class ModelJSON(TypeDecorator):
impl = JSON
diff --git a/examples/sqlalchemy_json/test_example.py b/examples/sqlalchemy_json/test_example.py
index 262eb441..c3ed4a49 100644
--- a/examples/sqlalchemy_json/test_example.py
+++ b/examples/sqlalchemy_json/test_example.py
@@ -1,4 +1,3 @@
-# pylint: disable=redefined-outer-name
import pytest
from sqlalchemy import insert, select, update
from sqlalchemy.orm import sessionmaker
diff --git a/pyproject.toml b/pyproject.toml
index 09962aeb..72f3bf85 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -4,7 +4,7 @@ build-backend = 'setuptools.build_meta'
[project]
name = 'adaptix'
-version = '3.0.0b4'
+version = '3.0.0b5'
description = 'An extremely flexible and configurable data model conversion library'
readme = 'README.md'
requires-python = '>=3.8'
@@ -38,6 +38,8 @@ attrs = ['attrs >= 21.3.0']
attrs-strict = ['attrs >= 21.3.0, <= 23.2.0']
sqlalchemy = ['sqlalchemy >= 2.0.0']
sqlalchemy-strict = ['sqlalchemy >= 2.0.0, <= 2.0.29']
+pydantic = ['pydantic >= 2.0.0']
+pydantic-strict = ['pydantic >= 2.0.0, <= 2.7.0']
[project.urls]
'Homepage' = 'https://github.com/reagento/adaptix'
@@ -63,7 +65,13 @@ email = '17@itishka.org'
[tool.pytest.ini_options]
python_classes = 'WeDoNotUseClassTestCase'
-python_files = ['test_*.py', '*_test.py', 'bench_[!nexus]*.py', 'tests_helpers.py', 'local_helpers.py']
+python_files = [
+ 'test_*.py',
+ '*_test.py',
+ 'bench_[!nexus]*.py',
+ 'tests/tests_helpers/tests_helpers/*.py',
+ 'local_helpers.py',
+]
testpaths = ['tests', 'examples']
filterwarnings = ['ignore::adaptix.TypedDictAt38Warning']
@@ -189,7 +197,7 @@ parametrize-names-type = "list"
package = 'adaptix'
filename = 'docs/changelog/changelog_body.rst'
template = 'docs/changelog/template.rst.jinja2'
-issue_format = '`#{issue} `_'
+issue_format = '`#{issue} `__'
directory = 'docs/changelog/fragments'
type = [
diff --git a/requirements/bench.txt b/requirements/bench.txt
index aca014fe..54866fca 100644
--- a/requirements/bench.txt
+++ b/requirements/bench.txt
@@ -22,19 +22,19 @@ mashumaro==3.10
# via -r requirements/raw/bench.txt
msgspec==0.18.4 ; implementation_name != "pypy"
# via -r requirements/raw/bench.txt
-packaging==23.2
+packaging==24.0
# via
# marshmallow
# pytest
-pluggy==1.3.0
+pluggy==1.4.0
# via pytest
psutil==5.9.5
# via
# -r requirements/raw/bench.txt
# pyperf
-pydantic==2.4.2
+pydantic==2.7.0
# via -r requirements/raw/bench.txt
-pydantic-core==2.10.1
+pydantic-core==2.18.1
# via pydantic
pyperf==2.6.1
# via -r requirements/raw/bench.txt
@@ -42,7 +42,7 @@ pytest==7.4.2
# via -r requirements/raw/bench.txt
schematics==2.1.1
# via -r requirements/raw/bench.txt
-typing-extensions==4.9.0
+typing-extensions==4.11.0
# via
# mashumaro
# pydantic
diff --git a/requirements/bench_pypy38.txt b/requirements/bench_pypy38.txt
new file mode 100644
index 00000000..160c8343
--- /dev/null
+++ b/requirements/bench_pypy38.txt
@@ -0,0 +1,49 @@
+#
+# This file is autogenerated by pip-compile with Python 3.12
+# by the following command:
+#
+# pip-compile --allow-unsafe --output-file=requirements/bench_pypy38.txt --strip-extras requirements/raw/bench_pypy38.txt
+#
+-e ./benchmarks
+ # via -r requirements/raw/bench_pypy38.txt
+annotated-types==0.6.0
+ # via pydantic
+attrs==23.2.0
+ # via cattrs
+cattrs==23.1.2
+ # via -r requirements/raw/bench_pypy38.txt
+dataclass-factory==2.16
+ # via -r requirements/raw/bench_pypy38.txt
+iniconfig==2.0.0
+ # via pytest
+marshmallow==3.20.1
+ # via -r requirements/raw/bench_pypy38.txt
+mashumaro==3.10
+ # via -r requirements/raw/bench_pypy38.txt
+msgspec==0.18.4 ; implementation_name != "pypy"
+ # via -r requirements/raw/bench_pypy38.txt
+packaging==24.0
+ # via
+ # marshmallow
+ # pytest
+pluggy==1.4.0
+ # via pytest
+psutil==5.9.5
+ # via
+ # -r requirements/raw/bench_pypy38.txt
+ # pyperf
+pydantic==2.5.3
+ # via -r requirements/raw/bench_pypy38.txt
+pydantic-core==2.14.6
+ # via pydantic
+pyperf==2.6.1
+ # via -r requirements/raw/bench_pypy38.txt
+pytest==7.4.2
+ # via -r requirements/raw/bench_pypy38.txt
+schematics==2.1.1
+ # via -r requirements/raw/bench_pypy38.txt
+typing-extensions==4.11.0
+ # via
+ # mashumaro
+ # pydantic
+ # pydantic-core
diff --git a/requirements/dev.txt b/requirements/dev.txt
index 75a4f979..d99fbd92 100644
--- a/requirements/dev.txt
+++ b/requirements/dev.txt
@@ -48,7 +48,7 @@ colorama==0.4.6
# tox
contourpy==1.2.0
# via matplotlib
-coverage==7.4.3
+coverage==7.4.4
# via
# -r requirements/raw/runner.txt
# -r requirements/raw/test_extra_none.txt
@@ -86,9 +86,7 @@ gitdb==4.0.11
gitpython==3.1.40
# via -r requirements/raw/doc.txt
greenlet==3.0.2
- # via
- # -r requirements/raw/test_extra_none.txt
- # sqlalchemy
+ # via sqlalchemy
identify==2.5.33
# via pre-commit
idna==3.6
@@ -182,9 +180,11 @@ psutil==5.9.5
# pyperf
pycodestyle==2.11.1
# via flake8
-pydantic==2.4.2
- # via -r requirements/raw/bench.txt
-pydantic-core==2.10.1
+pydantic==2.7.0
+ # via
+ # -r requirements/raw/bench.txt
+ # -r requirements/raw/test_extra_new.txt
+pydantic-core==2.18.1
# via pydantic
pyflakes==3.1.0
# via flake8
@@ -289,7 +289,6 @@ tox==4.11.4
# via -r requirements/raw/runner.txt
typing-extensions==4.9.0
# via
- # -r requirements/raw/test_extra_none.txt
# mashumaro
# mypy
# pydantic
diff --git a/requirements/doc.txt b/requirements/doc.txt
index 0f379fff..7434bb10 100644
--- a/requirements/doc.txt
+++ b/requirements/doc.txt
@@ -6,7 +6,7 @@
#
-e ./benchmarks
# via -r requirements/raw/bench.txt
-alabaster==0.7.13
+alabaster==0.7.16
# via sphinx
annotated-types==0.6.0
# via pydantic
@@ -14,11 +14,11 @@ attrs==23.2.0
# via cattrs
babel==2.14.0
# via sphinx
-beautifulsoup4==4.12.2
+beautifulsoup4==4.12.3
# via furo
cattrs==23.1.2
# via -r requirements/raw/bench.txt
-certifi==2023.11.17
+certifi==2024.2.2
# via requests
charset-normalizer==3.3.2
# via requests
@@ -39,13 +39,13 @@ gitdb==4.0.11
# via gitpython
gitpython==3.1.40
# via -r requirements/raw/doc.txt
-idna==3.6
+idna==3.7
# via requests
imagesize==1.4.1
# via sphinx
iniconfig==2.0.0
# via pytest
-jinja2==3.1.2
+jinja2==3.1.3
# via
# myst-parser
# sphinx
@@ -53,7 +53,7 @@ markdown-it-py==3.0.0
# via
# mdit-py-plugins
# myst-parser
-markupsafe==2.1.3
+markupsafe==2.1.5
# via jinja2
marshmallow==3.20.1
# via -r requirements/raw/bench.txt
@@ -67,7 +67,7 @@ msgspec==0.18.4 ; implementation_name != "pypy"
# via -r requirements/raw/bench.txt
myst-parser==2.0.0
# via -r requirements/raw/doc.txt
-packaging==23.2
+packaging==24.0
# via
# marshmallow
# plotly
@@ -77,15 +77,15 @@ pbr==6.0.0
# via sphinxcontrib-apidoc
plotly==5.18.0
# via -r requirements/raw/doc.txt
-pluggy==1.3.0
+pluggy==1.4.0
# via pytest
psutil==5.9.5
# via
# -r requirements/raw/bench.txt
# pyperf
-pydantic==2.4.2
+pydantic==2.7.0
# via -r requirements/raw/bench.txt
-pydantic-core==2.10.1
+pydantic-core==2.18.1
# via pydantic
pygments==2.17.2
# via
@@ -118,11 +118,6 @@ sphinx==7.2.6
# sphinx-paramlinks
# sphinx-reredirects
# sphinxcontrib-apidoc
- # sphinxcontrib-applehelp
- # sphinxcontrib-devhelp
- # sphinxcontrib-htmlhelp
- # sphinxcontrib-qthelp
- # sphinxcontrib-serializinghtml
# sphinxext-opengraph
sphinx-basic-ng==1.0.0b2
# via furo
@@ -138,26 +133,26 @@ sphinx-reredirects==0.1.3
# via -r requirements/raw/doc.txt
sphinxcontrib-apidoc==0.4.0
# via -r requirements/raw/doc.txt
-sphinxcontrib-applehelp==1.0.7
+sphinxcontrib-applehelp==1.0.8
# via sphinx
-sphinxcontrib-devhelp==1.0.5
+sphinxcontrib-devhelp==1.0.6
# via sphinx
-sphinxcontrib-htmlhelp==2.0.4
+sphinxcontrib-htmlhelp==2.0.5
# via sphinx
sphinxcontrib-jsmath==1.0.1
# via sphinx
-sphinxcontrib-qthelp==1.0.6
+sphinxcontrib-qthelp==1.0.7
# via sphinx
-sphinxcontrib-serializinghtml==1.1.9
+sphinxcontrib-serializinghtml==1.1.10
# via sphinx
sphinxext-opengraph==0.9.1
# via -r requirements/raw/doc.txt
tenacity==8.2.3
# via plotly
-typing-extensions==4.9.0
+typing-extensions==4.11.0
# via
# mashumaro
# pydantic
# pydantic-core
-urllib3==2.1.0
+urllib3==2.2.1
# via requests
diff --git a/requirements/lint.txt b/requirements/lint.txt
index d7f23e21..7786a9d2 100644
--- a/requirements/lint.txt
+++ b/requirements/lint.txt
@@ -36,7 +36,7 @@ colorama==0.4.6
# via radon
contourpy==1.2.0
# via matplotlib
-coverage==7.4.3
+coverage==7.4.4
# via -r requirements/raw/test_extra_none.txt
cycler==0.12.1
# via matplotlib
@@ -70,9 +70,7 @@ gitdb==4.0.11
gitpython==3.1.40
# via -r requirements/raw/doc.txt
greenlet==3.0.2
- # via
- # -r requirements/raw/test_extra_none.txt
- # sqlalchemy
+ # via sqlalchemy
identify==2.5.33
# via pre-commit
idna==3.6
@@ -152,9 +150,11 @@ psutil==5.9.5
# pyperf
pycodestyle==2.11.1
# via flake8
-pydantic==2.4.2
- # via -r requirements/raw/bench.txt
-pydantic-core==2.10.1
+pydantic==2.7.0
+ # via
+ # -r requirements/raw/bench.txt
+ # -r requirements/raw/test_extra_new.txt
+pydantic-core==2.18.1
# via pydantic
pyflakes==3.1.0
# via flake8
@@ -251,7 +251,6 @@ toml==0.10.2
# via vulture
typing-extensions==4.9.0
# via
- # -r requirements/raw/test_extra_none.txt
# mashumaro
# mypy
# pydantic
diff --git a/requirements/raw/bench.txt b/requirements/raw/bench.txt
index 61808c5f..5e41d4d3 100644
--- a/requirements/raw/bench.txt
+++ b/requirements/raw/bench.txt
@@ -6,7 +6,7 @@ pyperf==2.6.1
psutil==5.9.5
mashumaro==3.10
-pydantic==2.4.2
+pydantic==2.7.0
cattrs==23.1.2
schematics==2.1.1
dataclass-factory==2.16
diff --git a/requirements/raw/bench_pypy38.txt b/requirements/raw/bench_pypy38.txt
new file mode 100644
index 00000000..82febbd2
--- /dev/null
+++ b/requirements/raw/bench_pypy38.txt
@@ -0,0 +1,14 @@
+pytest==7.4.2
+
+-e ./benchmarks
+
+pyperf==2.6.1
+psutil==5.9.5
+
+mashumaro==3.10
+pydantic==2.5.3 # last version with pypy38 support
+cattrs==23.1.2
+schematics==2.1.1
+dataclass-factory==2.16
+marshmallow==3.20.1
+msgspec==0.18.4; implementation_name != "pypy"
diff --git a/requirements/raw/runner.txt b/requirements/raw/runner.txt
index 3f32b89e..a7528361 100644
--- a/requirements/raw/runner.txt
+++ b/requirements/raw/runner.txt
@@ -1,3 +1,3 @@
tox==4.11.4
invoke==2.2.0
-coverage==7.4.3
+coverage==7.4.4
diff --git a/requirements/raw/test_extra_new.txt b/requirements/raw/test_extra_new.txt
index 9d59fbfa..ff2fa457 100644
--- a/requirements/raw/test_extra_new.txt
+++ b/requirements/raw/test_extra_new.txt
@@ -1,3 +1,4 @@
-r test_extra_none.txt
attrs==23.2.0
sqlalchemy==2.0.29
+pydantic==2.7.0
diff --git a/requirements/raw/test_extra_new_pypy38.txt b/requirements/raw/test_extra_new_pypy38.txt
new file mode 100644
index 00000000..091b8f36
--- /dev/null
+++ b/requirements/raw/test_extra_new_pypy38.txt
@@ -0,0 +1,4 @@
+-r test_extra_none.txt
+attrs==23.2.0
+sqlalchemy==2.0.29
+pydantic==2.5.3 # last version with pypy38 support
diff --git a/requirements/raw/test_extra_none.txt b/requirements/raw/test_extra_none.txt
index 820dc5c3..0798217b 100644
--- a/requirements/raw/test_extra_none.txt
+++ b/requirements/raw/test_extra_none.txt
@@ -6,6 +6,4 @@ sqlalchemy>=2.0.0
dirty-equals==0.7.1.post0
-typing-extensions==4.9.0
-greenlet==3.0.2
-coverage==7.4.3
+coverage==7.4.4
diff --git a/requirements/raw/test_extra_old.txt b/requirements/raw/test_extra_old.txt
index b314752c..c49d6391 100644
--- a/requirements/raw/test_extra_old.txt
+++ b/requirements/raw/test_extra_old.txt
@@ -1,3 +1,10 @@
-r test_extra_none.txt
attrs==21.3.0
sqlalchemy==2.0.0
+pydantic==2.0.0
+
+# pydantic-core has dependency:
+# `typing-extensions >=4.6.0,<4.7.0; platform_python_implementation == "PyPy"`
+# Final requirements file generated on cpython ignoring this contraint.
+# This is leads to ResolutionImpossible error on PyPy run
+typing-extensions>=4.6.0, <4.7.0
diff --git a/requirements/runner.txt b/requirements/runner.txt
index 4feccc95..86e79038 100644
--- a/requirements/runner.txt
+++ b/requirements/runner.txt
@@ -10,7 +10,7 @@ chardet==5.2.0
# via tox
colorama==0.4.6
# via tox
-coverage==7.4.3
+coverage==7.4.4
# via -r requirements/raw/runner.txt
distlib==0.3.8
# via virtualenv
diff --git a/requirements/test_extra_new.txt b/requirements/test_extra_new.txt
index 84ae08ac..d163514b 100644
--- a/requirements/test_extra_new.txt
+++ b/requirements/test_extra_new.txt
@@ -6,16 +6,16 @@
#
-e ./tests/tests_helpers
# via -r requirements/raw/test_extra_none.txt
+annotated-types==0.6.0
+ # via pydantic
attrs==23.2.0
# via -r requirements/raw/test_extra_new.txt
-coverage==7.4.3
+coverage==7.4.4
# via -r requirements/raw/test_extra_none.txt
dirty-equals==0.7.1.post0
# via -r requirements/raw/test_extra_none.txt
greenlet==3.0.2
- # via
- # -r requirements/raw/test_extra_none.txt
- # sqlalchemy
+ # via sqlalchemy
iniconfig==2.0.0
# via pytest
packaging==23.2
@@ -24,6 +24,10 @@ phonenumberslite==8.13.26
# via -r requirements/raw/test_extra_none.txt
pluggy==1.3.0
# via pytest
+pydantic==2.7.0
+ # via -r requirements/raw/test_extra_new.txt
+pydantic-core==2.18.1
+ # via pydantic
pytest==7.4.2
# via -r requirements/raw/test_extra_none.txt
pytz==2023.3.post1
@@ -34,5 +38,6 @@ sqlalchemy==2.0.29
# -r requirements/raw/test_extra_none.txt
typing-extensions==4.9.0
# via
- # -r requirements/raw/test_extra_none.txt
+ # pydantic
+ # pydantic-core
# sqlalchemy
diff --git a/requirements/test_extra_new_pypy38.txt b/requirements/test_extra_new_pypy38.txt
new file mode 100644
index 00000000..eb5ddb32
--- /dev/null
+++ b/requirements/test_extra_new_pypy38.txt
@@ -0,0 +1,43 @@
+#
+# This file is autogenerated by pip-compile with Python 3.12
+# by the following command:
+#
+# pip-compile --allow-unsafe --output-file=requirements/test_extra_new_pypy38.txt --strip-extras requirements/raw/test_extra_new_pypy38.txt
+#
+-e ./tests/tests_helpers
+ # via -r requirements/raw/test_extra_none.txt
+annotated-types==0.6.0
+ # via pydantic
+attrs==23.2.0
+ # via -r requirements/raw/test_extra_new_pypy38.txt
+coverage==7.4.4
+ # via -r requirements/raw/test_extra_none.txt
+dirty-equals==0.7.1.post0
+ # via -r requirements/raw/test_extra_none.txt
+greenlet==3.0.3
+ # via sqlalchemy
+iniconfig==2.0.0
+ # via pytest
+packaging==24.0
+ # via pytest
+phonenumberslite==8.13.26
+ # via -r requirements/raw/test_extra_none.txt
+pluggy==1.4.0
+ # via pytest
+pydantic==2.5.3
+ # via -r requirements/raw/test_extra_new_pypy38.txt
+pydantic-core==2.14.6
+ # via pydantic
+pytest==7.4.2
+ # via -r requirements/raw/test_extra_none.txt
+pytz==2024.1
+ # via dirty-equals
+sqlalchemy==2.0.29
+ # via
+ # -r requirements/raw/test_extra_new_pypy38.txt
+ # -r requirements/raw/test_extra_none.txt
+typing-extensions==4.11.0
+ # via
+ # pydantic
+ # pydantic-core
+ # sqlalchemy
diff --git a/requirements/test_extra_none.txt b/requirements/test_extra_none.txt
index 034c665e..a50fa0f3 100644
--- a/requirements/test_extra_none.txt
+++ b/requirements/test_extra_none.txt
@@ -6,14 +6,12 @@
#
-e ./tests/tests_helpers
# via -r requirements/raw/test_extra_none.txt
-coverage==7.4.3
+coverage==7.4.4
# via -r requirements/raw/test_extra_none.txt
dirty-equals==0.7.1.post0
# via -r requirements/raw/test_extra_none.txt
greenlet==3.0.2
- # via
- # -r requirements/raw/test_extra_none.txt
- # sqlalchemy
+ # via sqlalchemy
iniconfig==2.0.0
# via pytest
packaging==23.2
@@ -29,6 +27,4 @@ pytz==2023.3.post1
sqlalchemy==2.0.23
# via -r requirements/raw/test_extra_none.txt
typing-extensions==4.9.0
- # via
- # -r requirements/raw/test_extra_none.txt
- # sqlalchemy
+ # via sqlalchemy
diff --git a/requirements/test_extra_old.txt b/requirements/test_extra_old.txt
index 058a5d0c..0a05bb50 100644
--- a/requirements/test_extra_old.txt
+++ b/requirements/test_extra_old.txt
@@ -6,33 +6,39 @@
#
-e ./tests/tests_helpers
# via -r requirements/raw/test_extra_none.txt
+annotated-types==0.6.0
+ # via pydantic
attrs==21.3.0
# via -r requirements/raw/test_extra_old.txt
-coverage==7.4.3
+coverage==7.4.4
# via -r requirements/raw/test_extra_none.txt
dirty-equals==0.7.1.post0
# via -r requirements/raw/test_extra_none.txt
-greenlet==3.0.2
- # via
- # -r requirements/raw/test_extra_none.txt
- # sqlalchemy
+greenlet==3.0.3
+ # via sqlalchemy
iniconfig==2.0.0
# via pytest
-packaging==23.2
+packaging==24.0
# via pytest
phonenumberslite==8.13.26
# via -r requirements/raw/test_extra_none.txt
-pluggy==1.3.0
+pluggy==1.4.0
# via pytest
+pydantic==2.0
+ # via -r requirements/raw/test_extra_old.txt
+pydantic-core==2.0.1
+ # via pydantic
pytest==7.4.2
# via -r requirements/raw/test_extra_none.txt
-pytz==2023.3.post1
+pytz==2024.1
# via dirty-equals
sqlalchemy==2.0.0
# via
# -r requirements/raw/test_extra_none.txt
# -r requirements/raw/test_extra_old.txt
-typing-extensions==4.9.0
+typing-extensions==4.6.3
# via
- # -r requirements/raw/test_extra_none.txt
+ # -r requirements/raw/test_extra_old.txt
+ # pydantic
+ # pydantic-core
# sqlalchemy
diff --git a/scripts/astpath_lint.py b/scripts/astpath_lint.py
index d808eabb..9e4a9bff 100644
--- a/scripts/astpath_lint.py
+++ b/scripts/astpath_lint.py
@@ -64,7 +64,7 @@ class RuleMatch:
module="typing",
variable="get_type_hints",
error_msg="Use type_tools.get_all_type_hints() instead of typing.get_type_hints()",
- exclude=["src/adaptix/_internal/type_tools/basic_utils.py"],
+ exclude=["src/adaptix/_internal/type_tools/fundamentals.py"],
),
ImportRule(
module="_decimal",
@@ -72,6 +72,18 @@ class RuleMatch:
error_msg='Import Decimal from public module "decimal"',
exclude=[],
),
+ ImportRule(
+ module="typing",
+ variable="get_args",
+ error_msg="Use type_tools.get_generic_args() instead of typing.get_args()",
+ exclude=["src/adaptix/_internal/type_tools/fundamentals.py"],
+ ),
+ ImportRule(
+ module="typing",
+ variable="get_origin",
+ error_msg="Use type_tools.strip_alias() instead of typing.get_origin()",
+ exclude=["src/adaptix/_internal/type_tools/fundamentals.py"],
+ ),
]
diff --git a/src/adaptix/_internal/feature_requirement.py b/src/adaptix/_internal/feature_requirement.py
index 23f02b50..ea10c9cb 100644
--- a/src/adaptix/_internal/feature_requirement.py
+++ b/src/adaptix/_internal/feature_requirement.py
@@ -174,5 +174,8 @@ def fail_reason(self) -> str:
HAS_SUPPORTED_SQLALCHEMY_PKG = DistributionVersionRequirement("sqlalchemy", "2.0.0")
HAS_SQLALCHEMY_PKG = DistributionRequirement("sqlalchemy")
+HAS_SUPPORTED_PYDANTIC_PKG = DistributionVersionRequirement("pydantic", "2.0.0")
+HAS_PYDANTIC_PKG = DistributionRequirement("pydantic")
+
IS_CPYTHON = PythonImplementationRequirement("cpython")
IS_PYPY = PythonImplementationRequirement("pypy")
diff --git a/src/adaptix/_internal/integrations/__init__.py b/src/adaptix/_internal/integrations/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/src/adaptix/_internal/integrations/pydantic/__init__.py b/src/adaptix/_internal/integrations/pydantic/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/src/adaptix/_internal/integrations/pydantic/native.py b/src/adaptix/_internal/integrations/pydantic/native.py
new file mode 100644
index 00000000..5f2c3dd7
--- /dev/null
+++ b/src/adaptix/_internal/integrations/pydantic/native.py
@@ -0,0 +1,144 @@
+from typing import Any, Callable, Dict, Literal, Mapping, Optional, TypeVar, Union
+
+from ...common import Dumper, Loader
+from ...morphing.load_error import LoadError
+from ...morphing.provider_template import DumperProvider, LoaderProvider
+from ...morphing.request_cls import DumperRequest, LoaderRequest
+from ...provider.essential import Mediator, Provider
+from ...provider.facade.provider import bound_by_any
+from ...provider.loc_stack_filtering import Pred
+from ...utils import Omittable, Omitted
+
+try:
+ from pydantic import ConfigDict, TypeAdapter, ValidationError
+ from pydantic.main import IncEx
+except ImportError:
+ pass
+
+
+T = TypeVar("T")
+
+
+class NativePydanticProvider(LoaderProvider, DumperProvider):
+ def __init__(
+ self,
+ config: Optional["ConfigDict"],
+ validation_params: Mapping[str, Omittable[Any]],
+ serialization_params: Mapping[str, Omittable[Any]],
+ ):
+ self._config = config
+ self._validation_params = validation_params
+ self._serialization_params = serialization_params
+
+ def _skip_omitted(self, mapping: Mapping[str, T]) -> Mapping[str, T]:
+ return {k: v for k, v in mapping.items() if v != Omitted()}
+
+ def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader:
+ validation_params = self._skip_omitted(self._validation_params)
+ validator = TypeAdapter(request.last_loc.type, config=self._config).validator.validate_python
+
+ if not validation_params:
+ def native_pydantic_loader_no_params(data):
+ try:
+ return validator(data)
+ except ValidationError as e:
+ raise LoadError from e
+
+ return native_pydantic_loader_no_params
+
+ def native_pydantic_loader(data):
+ try:
+ return validator(data, **validation_params)
+ except ValidationError as e:
+ raise LoadError from e
+
+ return native_pydantic_loader
+
+ def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper:
+ serialization_params = self._skip_omitted(self._serialization_params)
+ serializer = TypeAdapter(request.last_loc.type, config=self._config).serializer.to_python
+
+ if not serialization_params:
+ return serializer
+
+ def native_pydantic_dumper(data):
+ return serializer(data, **serialization_params)
+
+ return native_pydantic_dumper
+
+
+def native_pydantic(
+ *preds: Pred,
+ # loading (validation) parameters
+ strict: Omittable[Optional[bool]] = Omitted(),
+ from_attributes: Omittable[Optional[bool]] = Omitted(),
+ # dumping (serialization) parameters
+ mode: Omittable[Union[Literal["json", "python"], str]] = Omitted(), # noqa: PYI051
+ include: Omittable["IncEx"] = Omitted(),
+ exclude: Omittable["IncEx"] = Omitted(),
+ by_alias: Omittable[bool] = Omitted(),
+ exclude_unset: Omittable[bool] = Omitted(),
+ exclude_defaults: Omittable[bool] = Omitted(),
+ exclude_none: Omittable[bool] = Omitted(),
+ round_trip: Omittable[bool] = Omitted(),
+ warnings: Omittable[Union[bool, Literal["none", "warn", "error"]]] = Omitted(),
+ fallback: Omittable[Callable[[Any], Any]] = Omitted(),
+ serialize_as_any: Omittable[bool] = Omitted(),
+ # common parameters
+ context: Omittable[Optional[Dict[str, Any]]] = Omitted(),
+ config: Optional["ConfigDict"] = None,
+) -> Provider:
+ """Provider that represents value via pydantic.
+ You can use this function to validate or serialize pydantic models via pydantic itself.
+ Provider constructs ``TypeAdapter`` for a type to load and dump data.
+
+ :param preds: Predicates specifying where the provider should be used.
+ The provider will be applied if any predicates meet the conditions,
+ if no predicates are passed, the provider will be used for all Enums.
+ See :ref:`predicate-system` for details.
+
+ :param strict: Parameter passed directly to ``.validate_python()`` method
+ :param from_attributes: Parameter passed directly to ``.validate_python()`` method
+
+ :param mode: Parameter passed directly to ``.to_python()`` method
+ :param include: Parameter passed directly to ``.to_python()`` method
+ :param exclude: Parameter passed directly to ``.to_python()`` method
+ :param by_alias: Parameter passed directly to ``.to_python()`` method
+ :param exclude_unset: Parameter passed directly to ``.to_python()`` method
+ :param exclude_defaults: Parameter passed directly to ``.to_python()`` method
+ :param exclude_none: Parameter passed directly to ``.to_python()`` method
+ :param round_trip: Parameter passed directly to ``.to_python()`` method
+ :param warnings: Parameter passed directly to ``.to_python()`` method
+ :param fallback: Parameter passed directly to ``.to_python()`` method
+ :param serialize_as_any: Parameter passed directly to ``.to_python()`` method
+
+ :param context: Parameter passed directly to ``.validate_python()`` and ``.to_python()`` methods
+ :param config: Parameter passed directly to ``config`` parameter of ``TypeAdapter`` constructor
+
+ :return: Desired provider
+ """
+ return bound_by_any(
+ preds,
+ NativePydanticProvider(
+ config=config,
+ validation_params={
+ "strict": strict,
+ "from_attributes": from_attributes,
+ "context": context,
+ },
+ serialization_params={
+ "mode": mode,
+ "include": include,
+ "exclude": exclude,
+ "by_alias": by_alias,
+ "context": context,
+ "exclude_unset": exclude_unset,
+ "exclude_defaults": exclude_defaults,
+ "exclude_none": exclude_none,
+ "round_trip": round_trip,
+ "warnings": warnings,
+ "fallback": fallback,
+ "serialize_as_any": serialize_as_any,
+ },
+ ),
+ )
diff --git a/src/adaptix/_internal/model_tools/definitions.py b/src/adaptix/_internal/model_tools/definitions.py
index 7799c286..fca1492e 100644
--- a/src/adaptix/_internal/model_tools/definitions.py
+++ b/src/adaptix/_internal/model_tools/definitions.py
@@ -1,7 +1,6 @@
from abc import ABC, abstractmethod
from dataclasses import dataclass, field
from enum import Enum
-from keyword import iskeyword
from typing import Any, Callable, FrozenSet, Generic, Hashable, Mapping, Optional, TypeVar, Union
from ..common import Catchable, TypeHint, VarTuple
@@ -147,7 +146,7 @@ def create_key_accessor(key: Union[str, int], access_error: Optional[Catchable])
def is_valid_field_id(value: str) -> bool:
- return value.isidentifier() or iskeyword(value)
+ return value.isidentifier()
@dataclass(frozen=True)
diff --git a/src/adaptix/_internal/model_tools/introspection/pydantic.py b/src/adaptix/_internal/model_tools/introspection/pydantic.py
new file mode 100644
index 00000000..0a599c48
--- /dev/null
+++ b/src/adaptix/_internal/model_tools/introspection/pydantic.py
@@ -0,0 +1,236 @@
+import inspect
+import itertools
+import typing
+from functools import cached_property
+from inspect import Parameter, Signature
+from typing import Any, Callable, Optional, Protocol, Sequence, Type
+
+try:
+ from pydantic import AliasChoices, BaseModel
+ from pydantic.fields import ComputedFieldInfo, FieldInfo
+ from pydantic_core import PydanticUndefined
+except ImportError:
+ pass
+
+from adaptix import TypeHint
+
+from ...feature_requirement import HAS_ANNOTATED, HAS_PYDANTIC_PKG, HAS_SUPPORTED_PYDANTIC_PKG
+from ...type_tools import get_all_type_hints, is_pydantic_class
+from ..definitions import (
+ ClarifiedIntrospectionError,
+ Default,
+ DefaultFactory,
+ DefaultValue,
+ FullShape,
+ InputField,
+ InputShape,
+ IntrospectionError,
+ NoDefault,
+ NoTargetPackageError,
+ OutputField,
+ OutputShape,
+ Param,
+ ParamKind,
+ ParamKwargs,
+ Shape,
+ TooOldPackageError,
+ create_attr_accessor,
+)
+
+
+class WithDefaults(Protocol):
+ default: Any
+ default_factory: Optional[Callable[[], Any]]
+
+
+def _get_default(field: WithDefaults) -> Default:
+ if field.default_factory is not None:
+ return DefaultFactory(field.default_factory)
+ if field.default is PydanticUndefined:
+ return NoDefault()
+ return DefaultValue(field.default)
+
+
+_config_defaults = {
+ "populate_by_name": False,
+ "extra": "ignore",
+}
+
+
+def _get_config_value(tp: "Type[BaseModel]", key: str) -> Any:
+ try:
+ return tp.model_config[key] # type: ignore[literal-required]
+ except KeyError:
+ pass
+
+ return _config_defaults[key]
+
+
+def _get_field_parameters(tp: "Type[BaseModel]", field_name: str, field_info: "FieldInfo") -> Sequence[str]:
+ # AliasPath is ignored
+ if field_info.validation_alias is None:
+ parameters = [field_name]
+ else:
+ parameters = [field_name] if _get_config_value(tp, "populate_by_name") else []
+ if isinstance(field_info.validation_alias, str):
+ parameters.append(field_info.validation_alias)
+ elif isinstance(field_info.validation_alias, AliasChoices):
+ parameters.extend(alias for alias in field_info.validation_alias.choices if isinstance(alias, str))
+ return [param for param in parameters if param.isidentifier()]
+
+
+def _get_field_parameter_name(tp: "Type[BaseModel]", field_name: str, field_info: "FieldInfo") -> str:
+ parameters = _get_field_parameters(tp, field_name, field_info)
+ if not parameters:
+ raise ClarifiedIntrospectionError(
+ f"Can not fetch parameter name for field {field_name!r}."
+ f" This means that field has only AliasPath aliases or non-python-identifier aliases"
+ f" and populate_by_name is disabled",
+ )
+ return parameters[0]
+
+
+def _signature_is_self_with_kwargs_only(init_signature: Signature) -> bool:
+ try:
+ self, kwargs = init_signature.parameters.values()
+ except ValueError:
+ return False
+ return (
+ self.kind in (Parameter.POSITIONAL_ONLY, Parameter.POSITIONAL_OR_KEYWORD)
+ and kwargs.kind == Parameter.VAR_KEYWORD
+ )
+
+
+def _get_field_type(field_info: "FieldInfo") -> TypeHint:
+ if field_info.metadata and HAS_ANNOTATED:
+ return typing.Annotated[(field_info.annotation, *field_info.metadata)]
+ return field_info.annotation
+
+
+def _get_input_shape(tp: "Type[BaseModel]") -> InputShape:
+ if not _signature_is_self_with_kwargs_only(inspect.signature(tp.__init__)):
+ raise ClarifiedIntrospectionError(
+ "Pydantic model `__init__` must takes only self and one variable keyword parameter",
+ )
+
+ return InputShape(
+ constructor=tp,
+ fields=tuple(
+ InputField(
+ id=field_id,
+ type=_get_field_type(field_info),
+ default=_get_default(field_info),
+ metadata={}, # pydantic metadata is the list
+ original=field_info,
+ is_required=_get_default(field_info) == NoDefault(),
+ )
+ for field_id, field_info in tp.model_fields.items()
+ ),
+ overriden_types=frozenset(
+ field_id for field_id in tp.model_fields
+ if field_id in tp.__annotations__
+ ),
+ params=tuple(
+ Param(
+ field_id=field_id,
+ kind=ParamKind.KW_ONLY,
+ name=_get_field_parameter_name(tp, field_id, field_info),
+ )
+ for field_id, field_info in tp.model_fields.items()
+ ),
+ kwargs=None if _get_config_value(tp, "extra") == "forbid" else ParamKwargs(Any),
+ )
+
+
+def _unwrap_getter_function(descriptor, field_id: str):
+ if isinstance(descriptor, property):
+ if descriptor.fget is None:
+ raise ClarifiedIntrospectionError(f"Computed field {field_id!r} has no getter")
+ return descriptor.fget
+ if isinstance(descriptor, cached_property):
+ return descriptor.func
+ raise ClarifiedIntrospectionError(f"Computed field {field_id!r} has unknown descriptor {descriptor}")
+
+
+def _get_computed_field_type(field_id: str, computed_field_info: "ComputedFieldInfo") -> TypeHint:
+ if computed_field_info.return_type is not PydanticUndefined:
+ return computed_field_info.return_type
+
+ getter_function = _unwrap_getter_function(computed_field_info.wrapped_property, field_id)
+ signature = inspect.signature(getter_function)
+ if signature.return_annotation is inspect.Signature.empty:
+ return Any
+ return signature.return_annotation
+
+
+def _get_output_shape(tp: "Type[BaseModel]") -> OutputShape:
+ type_hints = get_all_type_hints(tp)
+ fields = itertools.chain(
+ (
+ OutputField(
+ id=field_id,
+ type=_get_field_type(field_info),
+ default=_get_default(field_info),
+ metadata={}, # pydantic metadata is the list
+ original=field_info,
+ accessor=create_attr_accessor(field_id, is_required=True),
+ )
+ for field_id, field_info in tp.model_fields.items()
+ ),
+ (
+ OutputField(
+ id=field_id,
+ type=_get_computed_field_type(field_id, computed_field_dec.info),
+ default=NoDefault(),
+ metadata={},
+ original=computed_field_dec.info,
+ accessor=create_attr_accessor(field_id, is_required=True),
+ )
+ for field_id, computed_field_dec in tp.__pydantic_decorators__.computed_fields.items()
+ ),
+ (
+ OutputField(
+ id=field_id,
+ type=type_hints.get(field_id, Any),
+ default=_get_default(private_attr),
+ metadata={},
+ original=private_attr,
+ accessor=create_attr_accessor(field_id, is_required=True),
+ )
+ for field_id, private_attr in tp.__private_attributes__.items()
+ ),
+ )
+ return OutputShape(
+ fields=tuple(fields),
+ overriden_types=frozenset(
+ itertools.chain(
+ (
+ field_id for field_id in tp.model_fields
+ if field_id in tp.__annotations__
+ ),
+ (
+ field_id for field_id in tp.__pydantic_decorators__.computed_fields
+ if field_id in tp.__dict__
+ ),
+ (
+ field_id for field_id in tp.__private_attributes__
+ if field_id in tp.__annotations__ or field_id not in type_hints
+ ),
+ ),
+ ),
+ )
+
+
+def get_pydantic_shape(tp) -> FullShape:
+ if not HAS_SUPPORTED_PYDANTIC_PKG:
+ if not HAS_PYDANTIC_PKG:
+ raise NoTargetPackageError(HAS_PYDANTIC_PKG)
+ raise TooOldPackageError(HAS_SUPPORTED_PYDANTIC_PKG)
+
+ if not is_pydantic_class(tp):
+ raise IntrospectionError
+
+ return Shape(
+ input=_get_input_shape(tp),
+ output=_get_output_shape(tp),
+ )
diff --git a/src/adaptix/_internal/model_tools/introspection/sqlalchemy.py b/src/adaptix/_internal/model_tools/introspection/sqlalchemy.py
index bb8b1610..cad62453 100644
--- a/src/adaptix/_internal/model_tools/introspection/sqlalchemy.py
+++ b/src/adaptix/_internal/model_tools/introspection/sqlalchemy.py
@@ -1,5 +1,5 @@
import inspect
-from typing import Any, Generic, List, Mapping, Optional, TypeVar, get_args, get_origin
+from typing import Any, Generic, List, Mapping, Optional, TypeVar
from ...common import TypeHint
@@ -20,7 +20,7 @@
pass
from ...feature_requirement import HAS_SQLALCHEMY_PKG, HAS_SUPPORTED_SQLALCHEMY_PKG
-from ...type_tools import get_all_type_hints
+from ...type_tools import get_all_type_hints, get_generic_args, strip_alias
from ..definitions import (
ClarifiedIntrospectionError,
DefaultFactory,
@@ -67,8 +67,8 @@ def _is_context_sensitive(default: "CallableColumnDefault"):
def _unwrap_mapped_annotation(type_hint: TypeHint) -> TypeHint:
- if get_origin(type_hint) == Mapped:
- return get_args(type_hint)[0]
+ if strip_alias(type_hint) == Mapped:
+ return get_generic_args(type_hint)[0]
return type_hint
diff --git a/src/adaptix/_internal/morphing/concrete_provider.py b/src/adaptix/_internal/morphing/concrete_provider.py
index af692b28..2726a67b 100644
--- a/src/adaptix/_internal/morphing/concrete_provider.py
+++ b/src/adaptix/_internal/morphing/concrete_provider.py
@@ -6,6 +6,7 @@
from datetime import date, datetime, time, timedelta
from decimal import Decimal, InvalidOperation
from fractions import Fraction
+from io import BytesIO
from typing import Generic, Type, TypeVar, Union
from ..common import Dumper, Loader
@@ -138,6 +139,35 @@ def bytes_base64_loader(data):
return bytes_base64_loader
+@for_predicate(BytesIO)
+class BytesIOBase64Provider(BytesBase64Provider):
+ def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader:
+ bytes_base64_loader = super()._provide_loader(mediator, request)
+
+ def bytes_io_base64_loader(data):
+ return BytesIO(bytes_base64_loader(data))
+
+ return bytes_io_base64_loader
+
+ def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper:
+ def bytes_io_base64_dumper(data: BytesIO):
+ return b2a_base64(data.getvalue(), newline=False).decode("ascii")
+
+ return bytes_io_base64_dumper
+
+
+@for_predicate(typing.IO[bytes])
+class IOBytesBase64Provider(BytesIOBase64Provider):
+ def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper:
+ def io_bytes_base64_dumper(data: typing.IO[bytes]):
+ if data.seekable():
+ data.seek(0)
+
+ return b2a_base64(data.read(), newline=False).decode("ascii")
+
+ return io_bytes_base64_dumper
+
+
@for_predicate(bytearray)
class BytearrayBase64Provider(LoaderProvider, Base64DumperMixin):
_BYTES_PROVIDER = BytesBase64Provider()
diff --git a/src/adaptix/_internal/morphing/facade/retort.py b/src/adaptix/_internal/morphing/facade/retort.py
index 3134fd3c..f4983613 100644
--- a/src/adaptix/_internal/morphing/facade/retort.py
+++ b/src/adaptix/_internal/morphing/facade/retort.py
@@ -28,6 +28,8 @@
STR_LOADER_PROVIDER,
BytearrayBase64Provider,
BytesBase64Provider,
+ BytesIOBase64Provider,
+ IOBytesBase64Provider,
IsoFormatProvider,
LiteralStringProvider,
NoneProvider,
@@ -50,6 +52,7 @@
from ..model.dumper_provider import ModelDumperProvider
from ..model.loader_provider import ModelLoaderProvider
from ..name_layout.component import BuiltinExtraMoveAndPoliciesMaker, BuiltinSievesMaker, BuiltinStructureMaker
+from ..name_layout.name_mapping import SkipPrivateFieldsNameMappingProvider
from ..name_layout.provider import BuiltinNameLayoutProvider
from ..provider_template import ABCProxy
from ..request_cls import DumperRequest, LoaderRequest
@@ -97,6 +100,8 @@ class FilledRetort(OperatingRetort, ABC):
dumper(complex, complex.__str__),
BytesBase64Provider(),
+ BytesIOBase64Provider(),
+ IOBytesBase64Provider(),
BytearrayBase64Provider(),
*chain.from_iterable(
@@ -142,7 +147,9 @@ class FilledRetort(OperatingRetort, ABC):
chain=None,
skip=(),
only=P.ANY,
- map={},
+ map=[
+ SkipPrivateFieldsNameMappingProvider(),
+ ],
trim_trailing_underscore=True,
name_style=None,
as_list=False,
diff --git a/src/adaptix/_internal/morphing/generic_provider.py b/src/adaptix/_internal/morphing/generic_provider.py
index 100d459e..e1c40cad 100644
--- a/src/adaptix/_internal/morphing/generic_provider.py
+++ b/src/adaptix/_internal/morphing/generic_provider.py
@@ -3,7 +3,7 @@
from enum import Enum
from os import PathLike
from pathlib import Path
-from typing import Any, Collection, Dict, Iterable, Literal, Sequence, Set, Type, Union
+from typing import Any, Collection, Dict, Iterable, Literal, Optional, Sequence, Set, Type, Union
from ..common import Dumper, Loader
from ..compat import CompatExceptionGroup
@@ -382,10 +382,15 @@ def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper:
return as_is_stub
return self._get_single_optional_dumper(not_none_dumper)
- non_class_origins = [case.source for case in norm.args if not self._is_class_origin(case.origin)]
- if non_class_origins:
+ forbidden_origins = [
+ case.source
+ for case in norm.args
+ if not self._is_class_origin(case.origin) and case.origin != Literal
+ ]
+
+ if forbidden_origins:
raise CannotProvide(
- f"All cases of union must be class, but found {non_class_origins}",
+ f"All cases of union must be class or Literal, but found {forbidden_origins}",
is_terminal=True,
is_demonstrative=True,
)
@@ -410,14 +415,51 @@ def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper:
dumper_type_dispatcher = ClassDispatcher(
{type(None) if case.origin is None else case.origin: dumper for case, dumper in zip(norm.args, dumpers)},
)
- return self._get_dumper(dumper_type_dispatcher)
- def _get_dumper(self, dumper_type_dispatcher: ClassDispatcher[Any, Dumper]) -> Dumper:
+ literal_dumper = self._get_dumper_for_literal(norm, dumpers, dumper_type_dispatcher)
+
+ if literal_dumper:
+ return literal_dumper
+
+ return self._produce_dumper(dumper_type_dispatcher)
+
+ def _produce_dumper(self, dumper_type_dispatcher: ClassDispatcher[Any, Dumper]) -> Dumper:
def union_dumper(data):
return dumper_type_dispatcher.dispatch(type(data))(data)
return union_dumper
+ def _produce_dumper_for_literal(
+ self,
+ dumper_type_dispatcher: ClassDispatcher[Any, Dumper],
+ literal_dumper: Dumper,
+ literal_cases: Sequence[Any],
+ ) -> Dumper:
+ def union_dumper_with_literal(data):
+ if data in literal_cases:
+ return literal_dumper(data)
+ return dumper_type_dispatcher.dispatch(type(data))(data)
+
+ return union_dumper_with_literal
+
+ def _get_dumper_for_literal(
+ self,
+ norm: BaseNormType,
+ dumpers: Iterable[Any],
+ dumper_type_dispatcher: ClassDispatcher[Any, Dumper],
+ ) -> Optional[Dumper]:
+ try:
+ literal_type, literal_dumper = next(
+ (union_case, dumper) for union_case, dumper
+ in zip(norm.args, dumpers)
+ if union_case.origin is Literal
+ )
+ except StopIteration:
+ return None
+
+ literal_cases = [strip_annotated(arg) for arg in literal_type.args]
+ return self._produce_dumper_for_literal(dumper_type_dispatcher, literal_dumper, literal_cases)
+
def _get_single_optional_dumper(self, dumper: Dumper) -> Dumper:
def optional_dumper(data):
if data is None:
diff --git a/src/adaptix/_internal/morphing/model/basic_gen.py b/src/adaptix/_internal/morphing/model/basic_gen.py
index 24324b1d..a5dd30af 100644
--- a/src/adaptix/_internal/morphing/model/basic_gen.py
+++ b/src/adaptix/_internal/morphing/model/basic_gen.py
@@ -85,9 +85,8 @@ def hook(data: CodeGenHookData):
@property
def code_pairs(self):
return [
- (request.loc_stack[-2].loc.type, hook_data.source)
+ (request.last_loc.type, hook_data.source)
for request, hook_data in self.list
- if len(request.loc_stack) >= 2 # noqa: PLR2004
]
@property
diff --git a/src/adaptix/_internal/morphing/name_layout/name_mapping.py b/src/adaptix/_internal/morphing/name_layout/name_mapping.py
index 6caa3408..0cf6d12f 100644
--- a/src/adaptix/_internal/morphing/name_layout/name_mapping.py
+++ b/src/adaptix/_internal/morphing/name_layout/name_mapping.py
@@ -4,7 +4,7 @@
from typing import Callable, Iterable, Mapping, Optional, Tuple, Union
from ...common import EllipsisType
-from ...model_tools.definitions import BaseField, BaseShape, is_valid_field_id
+from ...model_tools.definitions import BaseField, BaseShape, OutputField, is_valid_field_id
from ...provider.essential import CannotProvide, Mediator, Provider
from ...provider.loc_stack_filtering import LocStackChecker, Pred
from ...provider.provider_wrapper import ProviderWithLSC
@@ -59,7 +59,7 @@ def _validate(self) -> None:
)
@static_provision_action
- def _provide_input_name_layout(self, mediator: Mediator, request: NameMappingRequest) -> Optional[KeyPath]:
+ def _provide_name_mapping(self, mediator: Mediator, request: NameMappingRequest) -> Optional[KeyPath]:
try:
map_result = self._name_map[request.field.id]
except KeyError:
@@ -76,7 +76,7 @@ def get_loc_stack_checker(self) -> Optional[LocStackChecker]:
return self._loc_stack_checker
@static_provision_action
- def _provide_input_name_layout(self, mediator: Mediator, request: NameMappingRequest) -> Optional[KeyPath]:
+ def _provide_name_mapping(self, mediator: Mediator, request: NameMappingRequest) -> Optional[KeyPath]:
self._apply_loc_stack_checker(mediator, request)
return resolve_map_result(request.generated_key, self._result)
@@ -90,7 +90,17 @@ def get_loc_stack_checker(self) -> Optional[LocStackChecker]:
return self._loc_stack_checker
@static_provision_action
- def _provide_input_name_layout(self, mediator: Mediator, request: NameMappingRequest) -> Optional[KeyPath]:
+ def _provide_name_mapping(self, mediator: Mediator, request: NameMappingRequest) -> Optional[KeyPath]:
self._apply_loc_stack_checker(mediator, request)
result = self._func(request.shape, request.field)
return resolve_map_result(request.generated_key, result)
+
+
+class SkipPrivateFieldsNameMappingProvider(StaticProvider):
+ @static_provision_action
+ def _provide_name_mapping(self, mediator: Mediator, request: NameMappingRequest) -> Optional[KeyPath]:
+ if not isinstance(request.field, OutputField):
+ raise CannotProvide
+ if request.field.id.startswith("_"):
+ return None
+ raise CannotProvide
diff --git a/src/adaptix/_internal/provider/loc_stack_filtering.py b/src/adaptix/_internal/provider/loc_stack_filtering.py
index 753e42b4..f480d0c7 100644
--- a/src/adaptix/_internal/provider/loc_stack_filtering.py
+++ b/src/adaptix/_internal/provider/loc_stack_filtering.py
@@ -283,12 +283,19 @@ def create_loc_stack_checker(pred: Pred) -> LocStackChecker:
Pat = TypeVar("Pat", bound="LocStackPattern")
-class LocStackPattern:
- ANY = AnyLocStackChecker()
+_ANY = AnyLocStackChecker()
+
+class LocStackPattern:
def __init__(self, stack: VarTuple[LocStackChecker]):
self._stack = stack
+ @property
+ def ANY(self) -> AnyLocStackChecker: # noqa: N802
+ if self._stack:
+ raise AttributeError("You must access to ANY only via `P.ANY`, other usage is misleading")
+ return _ANY
+
@classmethod
def _from_lsc(cls: Type[Pat], lsc: LocStackChecker) -> Pat:
return cls((lsc, ))
diff --git a/src/adaptix/_internal/provider/shape_provider.py b/src/adaptix/_internal/provider/shape_provider.py
index dff50769..f32a970a 100644
--- a/src/adaptix/_internal/provider/shape_provider.py
+++ b/src/adaptix/_internal/provider/shape_provider.py
@@ -18,6 +18,7 @@
from ..model_tools.introspection.class_init import get_class_init_shape
from ..model_tools.introspection.dataclass import get_dataclass_shape
from ..model_tools.introspection.named_tuple import get_named_tuple_shape
+from ..model_tools.introspection.pydantic import get_pydantic_shape
from ..model_tools.introspection.sqlalchemy import get_sqlalchemy_shape
from ..model_tools.introspection.typed_dict import get_typed_dict_shape
from ..provider.essential import CannotProvide, Mediator
@@ -79,6 +80,7 @@ def _provide_output_shape(self, mediator: Mediator, request: OutputShapeRequest)
ShapeProvider(get_dataclass_shape),
ShapeProvider(get_attrs_shape),
ShapeProvider(get_sqlalchemy_shape),
+ ShapeProvider(get_pydantic_shape),
# class init introspection must be the last
ShapeProvider(get_class_init_shape),
)
diff --git a/src/adaptix/_internal/retort/operating_retort.py b/src/adaptix/_internal/retort/operating_retort.py
index 7aacc71b..3dcfe343 100644
--- a/src/adaptix/_internal/retort/operating_retort.py
+++ b/src/adaptix/_internal/retort/operating_retort.py
@@ -8,7 +8,7 @@
from ..provider.location import AnyLoc, FieldLoc
from ..provider.request_cls import LocatedRequest, LocStack
from ..type_tools import is_parametrized
-from ..utils import copy_exception_dunders, with_module
+from ..utils import add_note, copy_exception_dunders, with_module
from .base_retort import BaseRetort
from .mediator import ErrorRepresentor, RecursionResolver, T
@@ -129,7 +129,10 @@ def _facade_provide(self, request: Request[T], *, error_message: str) -> T:
return self._provide_from_recipe(request)
except CannotProvide as e:
cause = self._get_exception_cause(e)
- raise NoSuitableProvider(error_message) from cause
+ exception = NoSuitableProvider(error_message)
+ if cause is not None:
+ add_note(exception, "Note: The attached exception above contains verbose description of the problem")
+ raise exception from cause
def _get_exception_cause(self, exc: CannotProvide) -> Optional[CannotProvide]:
if isinstance(exc, AggregateCannotProvide):
diff --git a/src/adaptix/_internal/type_tools/__init__.py b/src/adaptix/_internal/type_tools/__init__.py
index 63df61d6..79d25e62 100644
--- a/src/adaptix/_internal/type_tools/__init__.py
+++ b/src/adaptix/_internal/type_tools/__init__.py
@@ -1,6 +1,5 @@
from .basic_utils import (
create_union,
- get_all_type_hints,
is_bare_generic,
is_generic,
is_generic_class,
@@ -11,8 +10,8 @@
is_subclass_soft,
is_typed_dict_class,
is_user_defined_generic,
- strip_alias,
)
+from .fundamentals import get_all_type_hints, get_generic_args, get_type_vars, is_pydantic_class, strip_alias
from .norm_utils import is_class_var, strip_tags
from .normalize_type import (
AnyNormTypeVarLike,
diff --git a/src/adaptix/_internal/type_tools/basic_utils.py b/src/adaptix/_internal/type_tools/basic_utils.py
index 5e853d9d..198b8aca 100644
--- a/src/adaptix/_internal/type_tools/basic_utils.py
+++ b/src/adaptix/_internal/type_tools/basic_utils.py
@@ -1,65 +1,45 @@
import types
import typing
-from typing import (
- Any,
- Dict,
- ForwardRef,
- Generic,
- Iterable,
- Protocol,
- TypedDict,
- TypeVar,
- Union,
- get_args,
- get_origin,
- get_type_hints,
-)
+from typing import Any, Dict, ForwardRef, Generic, NewType, Protocol, TypedDict, TypeVar, Union
from ..common import TypeHint, VarTuple
from ..feature_requirement import HAS_ANNOTATED, HAS_PY_39, HAS_PY_312, HAS_STD_CLASSES_GENERICS
from .constants import BUILTIN_ORIGIN_TO_TYPEVARS
-
-TYPED_DICT_MCS = type(types.new_class("_TypedDictSample", (TypedDict,), {}))
-
-
-def strip_alias(type_hint: TypeHint) -> TypeHint:
- origin = get_origin(type_hint)
- return type_hint if origin is None else origin
+from .fundamentals import get_generic_args, get_type_vars, strip_alias
def is_subclass_soft(cls, classinfo) -> bool:
- """Acts like builtin issubclass,
- but returns False instead of rising TypeError
- """
+ """Acts like builtin issubclass, but returns False instead of rising TypeError"""
try:
return issubclass(cls, classinfo)
except TypeError:
return False
-def has_attrs(obj, attrs: Iterable[str]) -> bool:
- return all(
- hasattr(obj, attr_name)
- for attr_name in attrs
- )
+_NEW_TYPE_CLS = type(NewType("", None))
def is_new_type(tp) -> bool:
- return has_attrs(tp, ["__supertype__", "__name__"])
+ return isinstance(tp, _NEW_TYPE_CLS)
+
+
+_TYPED_DICT_MCS = type(types.new_class("_TypedDictSample", (TypedDict,), {}))
def is_typed_dict_class(tp) -> bool:
- return isinstance(tp, TYPED_DICT_MCS)
+ return isinstance(tp, _TYPED_DICT_MCS)
-NAMED_TUPLE_METHODS = ("_fields", "_field_defaults", "_make", "_replace", "_asdict")
+_NAMED_TUPLE_METHODS = ("_fields", "_field_defaults", "_make", "_replace", "_asdict")
def is_named_tuple_class(tp) -> bool:
return (
is_subclass_soft(tp, tuple)
- and
- has_attrs(tp, NAMED_TUPLE_METHODS)
+ and all(
+ hasattr(tp, attr_name)
+ for attr_name in _NAMED_TUPLE_METHODS
+ )
)
@@ -74,23 +54,8 @@ def create_union(args: tuple):
return Union[args]
-if HAS_ANNOTATED:
- def get_all_type_hints(obj, globalns=None, localns=None):
- return get_type_hints(obj, globalns, localns, include_extras=True)
-else:
- get_all_type_hints = get_type_hints
-
-
def is_parametrized(tp: TypeHint) -> bool:
- return bool(get_args(tp))
-
-
-def get_type_vars(tp: TypeHint) -> VarTuple[TypeVar]:
- type_vars = getattr(tp, "__parameters__", ())
- # UnionType object contains descriptor inside `__parameters__`
- if not isinstance(type_vars, tuple):
- return ()
- return type_vars
+ return bool(get_generic_args(tp))
if HAS_PY_312:
@@ -124,7 +89,8 @@ def is_generic(tp: TypeHint) -> bool:
)
or (
bool(HAS_ANNOTATED)
- and get_origin(tp) == typing.Annotated
+ and strip_alias(tp) == typing.Annotated
+ and tp != typing.Annotated
and is_generic(tp.__origin__)
)
)
@@ -159,16 +125,14 @@ def is_generic_class(cls: type) -> bool:
def get_type_vars_of_parametrized(tp: TypeHint) -> VarTuple[TypeVar]:
- try:
- params = tp.__parameters__
- except AttributeError:
+ params = get_type_vars(tp)
+ if not params:
return ()
-
if isinstance(tp, type):
if HAS_STD_CLASSES_GENERICS and isinstance(tp, types.GenericAlias):
return params
return ()
- if get_origin(tp) is not None and get_args(tp) == ():
+ if strip_alias(tp) != tp and get_generic_args(tp) == ():
return ()
return params
diff --git a/src/adaptix/_internal/type_tools/fundamentals.py b/src/adaptix/_internal/type_tools/fundamentals.py
new file mode 100644
index 00000000..f83bfc3b
--- /dev/null
+++ b/src/adaptix/_internal/type_tools/fundamentals.py
@@ -0,0 +1,48 @@
+import types
+from typing import TypeVar, get_args, get_origin, get_type_hints
+
+from ..common import TypeHint, VarTuple
+from ..feature_requirement import HAS_ANNOTATED, HAS_SUPPORTED_PYDANTIC_PKG
+
+__all__ = ("is_pydantic_class", "strip_alias", "get_type_vars", "get_generic_args", "get_all_type_hints")
+
+
+if HAS_SUPPORTED_PYDANTIC_PKG:
+ from pydantic import BaseModel
+
+ _PYDANTIC_MCS = type(types.new_class("_PydanticSample", (BaseModel,), {}))
+
+ def is_pydantic_class(tp) -> bool:
+ return isinstance(tp, _PYDANTIC_MCS) and tp != BaseModel
+else:
+ def is_pydantic_class(tp) -> bool:
+ return False
+
+
+def strip_alias(tp: TypeHint) -> TypeHint:
+ origin = tp.__pydantic_generic_metadata__["origin"] if is_pydantic_class(tp) else get_origin(tp)
+ return tp if origin is None else origin
+
+
+def get_type_vars(tp: TypeHint) -> VarTuple[TypeVar]:
+ if is_pydantic_class(tp):
+ return tp.__pydantic_generic_metadata__["parameters"]
+
+ type_vars = getattr(tp, "__parameters__", ())
+ # UnionType object contains descriptor inside `__parameters__`
+ if not isinstance(type_vars, tuple):
+ return ()
+ return type_vars
+
+
+def get_generic_args(tp: TypeHint) -> VarTuple[TypeHint]:
+ if is_pydantic_class(tp):
+ return tp.__pydantic_generic_metadata__["args"]
+ return get_args(tp)
+
+
+if HAS_ANNOTATED:
+ def get_all_type_hints(obj, globalns=None, localns=None):
+ return get_type_hints(obj, globalns, localns, include_extras=True)
+else:
+ get_all_type_hints = get_type_hints
diff --git a/src/adaptix/_internal/type_tools/generic_resolver.py b/src/adaptix/_internal/type_tools/generic_resolver.py
index 7ff8e47a..90233a31 100644
--- a/src/adaptix/_internal/type_tools/generic_resolver.py
+++ b/src/adaptix/_internal/type_tools/generic_resolver.py
@@ -1,10 +1,11 @@
import typing
from dataclasses import dataclass, replace
from itertools import chain
-from typing import Callable, Collection, Dict, Generic, Hashable, Mapping, TypeVar, get_args
+from typing import Callable, Collection, Dict, Generic, Hashable, Mapping, TypeVar
from ..common import TypeHint
from ..feature_requirement import HAS_TV_TUPLE, HAS_UNPACK
+from . import get_generic_args
from .basic_utils import get_type_vars, get_type_vars_of_parametrized, is_generic, is_parametrized, strip_alias
from .implicit_params import fill_implicit_params
from .normalize_type import normalize_type
@@ -36,7 +37,7 @@ def _get_members_of_parametrized_generic(self, parametrized_generic) -> MembersS
members_storage = self._get_members_by_parents(origin)
type_var_to_actual = self._get_type_var_to_actual(
get_type_vars(origin),
- self._unpack_args(get_args(parametrized_generic)),
+ self._unpack_args(get_generic_args(parametrized_generic)),
)
return replace(
members_storage,
diff --git a/src/adaptix/_internal/type_tools/normalize_type.py b/src/adaptix/_internal/type_tools/normalize_type.py
index b0fa0b23..34e8b142 100644
--- a/src/adaptix/_internal/type_tools/normalize_type.py
+++ b/src/adaptix/_internal/type_tools/normalize_type.py
@@ -29,7 +29,6 @@
Type,
TypeVar,
Union,
- get_args,
overload,
)
@@ -49,6 +48,7 @@
HAS_UNPACK,
)
from .basic_utils import create_union, eval_forward_ref, is_new_type, is_subclass_soft, strip_alias
+from .fundamentals import get_generic_args
from .implicit_params import ImplicitParamsGetter
@@ -527,7 +527,7 @@ def normalize(self, tp: TypeHint) -> BaseNormType:
def normalize(self, tp: TypeHint) -> BaseNormType:
origin = strip_alias(tp)
- args = get_args(tp)
+ args = get_generic_args(tp)
result = self._norm_forward_ref(tp)
if result is not None:
diff --git a/src/adaptix/integrations/__init__.py b/src/adaptix/integrations/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/src/adaptix/integrations/pydantic/__init__.py b/src/adaptix/integrations/pydantic/__init__.py
new file mode 100644
index 00000000..9a1293af
--- /dev/null
+++ b/src/adaptix/integrations/pydantic/__init__.py
@@ -0,0 +1,5 @@
+from adaptix._internal.integrations.pydantic.native import native_pydantic
+
+__all__ = (
+ "native_pydantic",
+)
diff --git a/tests/conftest.py b/tests/conftest.py
index a9dbd7e0..afed1904 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -1,8 +1,8 @@
import pytest
-from tests_helpers import ByTrailSelector, ModelSpecSchema, parametrize_model_spec
+from tests_helpers import ByTrailSelector, ModelSpecSchema, cond_list, parametrize_model_spec
from adaptix import DebugTrail
-from adaptix._internal.feature_requirement import HAS_PY_312
+from adaptix._internal.feature_requirement import HAS_ATTRS_PKG, HAS_PY_312, HAS_PYDANTIC_PKG
@pytest.fixture(params=[False, True], ids=lambda x: f"strict_coercion={x}")
@@ -41,4 +41,8 @@ def pytest_generate_tests(metafunc):
parametrize_model_spec("dst_model_spec", metafunc)
-collect_ignore_glob = [] if HAS_PY_312 else ["*_312.py"]
+collect_ignore_glob = [
+ *cond_list(not HAS_PY_312, ["*_312.py"]),
+ *cond_list(not HAS_ATTRS_PKG, ["*_attrs.py", "*_attrs_*.py"]),
+ *cond_list(not HAS_PYDANTIC_PKG, ["*_pydantic.py", "*_pydantic_*.py", "**/pydantic/**"]),
+]
diff --git a/tests/integration/conversion/test_coercer.py b/tests/integration/conversion/test_coercer.py
index c06ee968..be0c2a8c 100644
--- a/tests/integration/conversion/test_coercer.py
+++ b/tests/integration/conversion/test_coercer.py
@@ -3,6 +3,7 @@
import pytest
from tests_helpers import cond_list
+from tests_helpers.model_spec import ModelSpec
from adaptix import P
from adaptix._internal.conversion.facade.provider import from_param, link
@@ -266,7 +267,18 @@ class DestModel(*model_spec.bases):
def convert(a: SourceModel) -> DestModel:
...
- assert convert(SourceModel(field1=1, field2=src_value)) == DestModel(field1=1, field2=dst_value)
+ if model_spec.kind == ModelSpec.PYDANTIC:
+ def mutate_iterable_field(model):
+ model.field2 = type(dst_value)(model.field2)
+ return model
+
+ assert (
+ mutate_iterable_field(convert(SourceModel(field1=1, field2=src_value)))
+ ==
+ mutate_iterable_field(DestModel(field1=1, field2=dst_value))
+ )
+ else:
+ assert convert(SourceModel(field1=1, field2=src_value)) == DestModel(field1=1, field2=dst_value)
def test_iterable_with_model(model_spec):
diff --git a/tests/integration/morphing/test_attrs.py b/tests/integration/morphing/test_attrs.py
index 196dafdf..f9e9e1bb 100644
--- a/tests/integration/morphing/test_attrs.py
+++ b/tests/integration/morphing/test_attrs.py
@@ -1,11 +1,7 @@
-import pytest
+from attr import Factory, define, field
from adaptix import Retort, name_mapping
-pytest.importorskip("attrs")
-
-from attr import Factory, define, field # noqa: E402
-
@define
class Coordinates:
diff --git a/tests/integration/morphing/test_dump_order.py b/tests/integration/morphing/test_dump_order.py
index ae3bbbf7..1b760a61 100644
--- a/tests/integration/morphing/test_dump_order.py
+++ b/tests/integration/morphing/test_dump_order.py
@@ -16,7 +16,7 @@ class Example:
assert list(dumper(Example(c=1, a=2, b=3)).items()) == [("c", 1), ("a", 2), ("b", 3)]
-def test_name_flatenning(accum):
+def test_name_flattening(accum):
@dataclass
class Example:
c: int
diff --git a/tests/integration/morphing/test_pydantic.py b/tests/integration/morphing/test_pydantic.py
new file mode 100644
index 00000000..8d955101
--- /dev/null
+++ b/tests/integration/morphing/test_pydantic.py
@@ -0,0 +1,33 @@
+from pydantic import BaseModel, computed_field
+
+from adaptix import Retort
+
+
+def test_basic(accum):
+ class MyModel(BaseModel):
+ f1: int
+ f2: str
+
+ retort = Retort(recipe=[accum])
+ assert retort.load({"f1": 0, "f2": "a"}, MyModel) == MyModel(f1=0, f2="a")
+ assert retort.dump(MyModel(f1=0, f2="a")) == {"f1": 0, "f2": "a"}
+
+
+def test_all_field_kinds(accum):
+ class MyModel(BaseModel):
+ a: int
+
+ @computed_field
+ @property
+ def b(self) -> str:
+ return "b_value"
+
+ _c: int
+
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+ self._c = 2
+
+ retort = Retort(recipe=[accum])
+ assert retort.load({"a": 0}, MyModel) == MyModel(a=0)
+ assert retort.dump(MyModel(a=0)) == {"a": 0, "b": "b_value"}
diff --git a/tests/test_doc.py b/tests/test_doc.py
index 1d8caeab..2f076329 100644
--- a/tests/test_doc.py
+++ b/tests/test_doc.py
@@ -3,7 +3,7 @@
import pytest
-from adaptix._internal.feature_requirement import HAS_PY_311
+from adaptix._internal.feature_requirement import HAS_PY_311, HAS_SUPPORTED_PYDANTIC_PKG
REPO_ROOT = Path(__file__).parent.parent
DOCS_EXAMPLES_ROOT = REPO_ROOT / "docs" / "examples"
@@ -33,9 +33,20 @@ def pytest_generate_tests(metafunc):
)
+CASES_REQUIREMENTS = {
+ "loading-and-dumping/tutorial/unexpected_error": HAS_PY_311,
+ "reference/integrations/native_pydantic": HAS_SUPPORTED_PYDANTIC_PKG,
+ "loading-and-dumping/extended_usage/private_fields_including_no_rename_pydantic": HAS_SUPPORTED_PYDANTIC_PKG,
+ "loading-and-dumping/extended_usage/private_fields_including_pydantic": HAS_SUPPORTED_PYDANTIC_PKG,
+ "loading-and-dumping/extended_usage/private_fields_skipping_pydantic": HAS_SUPPORTED_PYDANTIC_PKG,
+}
+
+
def test_example(import_path: str, case_id: str):
- if case_id == "loading-and-dumping/tutorial/unexpected_error" and not HAS_PY_311:
- pytest.skip("Need Python >= 3.11")
+ if case_id in CASES_REQUIREMENTS:
+ requirement = CASES_REQUIREMENTS[case_id]
+ if not requirement:
+ pytest.skip(requirement.fail_reason)
pytest.register_assert_rewrite(import_path)
importlib.import_module(import_path)
diff --git a/tests/test_meta.py b/tests/test_meta.py
new file mode 100644
index 00000000..aee895a8
--- /dev/null
+++ b/tests/test_meta.py
@@ -0,0 +1,9 @@
+import importlib
+from pkgutil import walk_packages
+
+import adaptix
+
+
+def test_all_modules_is_importable():
+ for module_info in walk_packages(adaptix.__path__, f"{adaptix.__name__}."):
+ importlib.import_module(module_info.name)
diff --git a/tests/tests_helpers/tests_helpers/__init__.py b/tests/tests_helpers/tests_helpers/__init__.py
index 1ffd89a9..5f3f5cca 100644
--- a/tests/tests_helpers/tests_helpers/__init__.py
+++ b/tests/tests_helpers/tests_helpers/__init__.py
@@ -4,7 +4,6 @@
DebugCtx,
FailedRequirement,
PlaceholderProvider,
- TestRetort,
cond_list,
create_sa_engine,
full_match,
@@ -23,6 +22,7 @@
ModelSpecSchema,
exclude_model_spec,
only_generic_models,
+ only_model_spec,
parametrize_model_spec,
sqlalchemy_equals,
)
diff --git a/tests/tests_helpers/tests_helpers/misc.py b/tests/tests_helpers/tests_helpers/misc.py
index a0c62ec5..64baa044 100644
--- a/tests/tests_helpers/tests_helpers/misc.py
+++ b/tests/tests_helpers/tests_helpers/misc.py
@@ -13,7 +13,7 @@
import pytest
from sqlalchemy import Engine, create_engine
-from adaptix import AdornedRetort, CannotProvide, DebugTrail, Mediator, NoSuitableProvider, Provider, Request
+from adaptix import CannotProvide, DebugTrail, Mediator, NoSuitableProvider, Provider, Request
from adaptix._internal.compat import CompatExceptionGroup
from adaptix._internal.feature_requirement import DistributionVersionRequirement, Requirement
from adaptix._internal.morphing.model.basic_gen import CodeGenAccumulator
@@ -37,11 +37,6 @@ def wrapper(func):
return wrapper
-class TestRetort(AdornedRetort):
- def provide(self, request: Request[T]) -> T:
- return self._facade_provide(request, error_message=f"cannot provide {request}")
-
-
E = TypeVar("E", bound=Exception)
diff --git a/tests/tests_helpers/tests_helpers/model_spec.py b/tests/tests_helpers/tests_helpers/model_spec.py
index 68a222b4..5caf493e 100644
--- a/tests/tests_helpers/tests_helpers/model_spec.py
+++ b/tests/tests_helpers/tests_helpers/model_spec.py
@@ -7,7 +7,13 @@
import pytest
from _pytest.python import Metafunc
-from adaptix._internal.feature_requirement import HAS_ATTRS_PKG, HAS_PY_311, HAS_SQLALCHEMY_PKG, Requirement
+from adaptix._internal.feature_requirement import (
+ HAS_ATTRS_PKG,
+ HAS_PY_311,
+ HAS_PYDANTIC_PKG,
+ HAS_SQLALCHEMY_PKG,
+ Requirement,
+)
from adaptix._internal.type_tools import get_all_type_hints
from .misc import FailedRequirement
@@ -30,12 +36,14 @@ class ModelSpec(Enum):
NAMED_TUPLE = "named_tuple"
ATTRS = "attrs"
SQLALCHEMY = "sqlalchemy"
+ PYDANTIC = "pydantic"
@classmethod
def default_requirements(cls):
return {
cls.ATTRS: HAS_ATTRS_PKG,
cls.SQLALCHEMY: HAS_SQLALCHEMY_PKG,
+ cls.PYDANTIC: HAS_PYDANTIC_PKG,
}
@@ -89,6 +97,13 @@ def model_spec_to_schema(spec: ModelSpec):
return ModelSpecSchema(decorator=define, bases=(), get_field=getattr, kind=spec)
if spec == ModelSpec.SQLALCHEMY:
return ModelSpecSchema(decorator=create_sqlalchemy_decorator(), bases=(), get_field=getattr, kind=spec)
+ if spec == ModelSpec.PYDANTIC:
+ from pydantic import BaseModel, ConfigDict
+
+ class CustomBaseModel(BaseModel):
+ model_config = ConfigDict(arbitrary_types_allowed=True)
+
+ return ModelSpecSchema(decorator=lambda x: x, bases=(CustomBaseModel, ), get_field=getattr, kind=spec)
raise ValueError
@@ -102,6 +117,19 @@ def decorator(func):
return decorator
+def only_model_spec(first_spec: ModelSpec, *other_specs: ModelSpec):
+ specs = [first_spec, *other_specs]
+ return exclude_model_spec(*[model_spec for model_spec in ModelSpec if model_spec not in specs])
+
+
+def with_model_spec_requirement(requirements: Mapping[ModelSpec, Requirement]):
+ def decorator(func):
+ func.adaptix_model_spec_requirements = requirements
+ return func
+
+ return decorator
+
+
GENERIC_MODELS_REQUIREMENTS: Mapping[ModelSpec, Requirement] = {
ModelSpec.TYPED_DICT: HAS_PY_311,
ModelSpec.NAMED_TUPLE: HAS_PY_311,
diff --git a/tests/unit/integrations/__init__.py b/tests/unit/integrations/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/unit/integrations/pydantic/__init__.py b/tests/unit/integrations/pydantic/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/unit/integrations/pydantic/test_native.py b/tests/unit/integrations/pydantic/test_native.py
new file mode 100644
index 00000000..affb29e4
--- /dev/null
+++ b/tests/unit/integrations/pydantic/test_native.py
@@ -0,0 +1,67 @@
+from pydantic import BaseModel, ValidationError
+from tests_helpers import raises_exc, with_cause
+
+from adaptix import Retort
+from adaptix._internal.integrations.pydantic.native import native_pydantic
+from adaptix._internal.morphing.load_error import LoadError
+
+
+def create_stub_validation_error():
+ return ValidationError.from_exception_data(title="", line_errors=[])
+
+
+def test_validation_without_params():
+ class MyModel(BaseModel):
+ a: int
+ b: str
+
+ retort = Retort(
+ recipe=[native_pydantic(MyModel)],
+ )
+
+ loader_ = retort.get_loader(MyModel)
+ assert loader_({"a": 1, "b": "value"}) == MyModel(a=1, b="value")
+ raises_exc(
+ with_cause(LoadError(), create_stub_validation_error()),
+ lambda: loader_({"a": "abc", "b": "value"}),
+ )
+
+
+def test_with_params():
+ class MyModel(BaseModel):
+ a: int
+ b: str
+
+ retort = Retort(
+ recipe=[native_pydantic(MyModel, strict=True)],
+ )
+
+ loader_ = retort.get_loader(MyModel)
+ assert loader_({"a": 1, "b": "value"}) == MyModel(a=1, b="value")
+ raises_exc(
+ with_cause(LoadError(), create_stub_validation_error()),
+ lambda: loader_({"a": "1", "b": "value"}),
+ )
+
+ dumper_ = retort.get_dumper(MyModel)
+ assert dumper_(MyModel(a=1, b="value")) == {"a": 1, "b": "value"}
+
+
+def test_serialization_with_params():
+ class MyModel(BaseModel):
+ a: int
+ b: str
+
+ retort = Retort(
+ recipe=[native_pydantic(MyModel, strict=True)],
+ )
+
+ loader_ = retort.get_loader(MyModel)
+ assert loader_({"a": 1, "b": "value"}) == MyModel(a=1, b="value")
+ raises_exc(
+ with_cause(LoadError(), create_stub_validation_error()),
+ lambda: loader_({"a": "1", "b": "value"}),
+ )
+
+ dumper_ = retort.get_dumper(MyModel)
+ assert dumper_(MyModel(a=1, b="value")) == {"a": 1, "b": "value"}
diff --git a/tests/unit/model_tools/introspection/test_attrs.py b/tests/unit/model_tools/introspection/test_attrs.py
index c81a3b83..10bdcd3b 100644
--- a/tests/unit/model_tools/introspection/test_attrs.py
+++ b/tests/unit/model_tools/introspection/test_attrs.py
@@ -4,7 +4,9 @@
from typing import Any, Tuple
from unittest.mock import ANY
+import attr
import pytest
+from attrs import Factory, define, field
from tests_helpers import ATTRS_WITH_ALIAS, requires
from adaptix._internal.feature_requirement import HAS_ANNOTATED
@@ -26,11 +28,6 @@
)
from adaptix._internal.model_tools.introspection.attrs import get_attrs_shape
-pytest.importorskip("attrs")
-
-import attr # noqa: E402
-from attrs import Factory, define, field # noqa: E402
-
def int_factory_with_self(x):
return 0
diff --git a/tests/unit/model_tools/introspection/test_pydantic.py b/tests/unit/model_tools/introspection/test_pydantic.py
new file mode 100644
index 00000000..09b0de22
--- /dev/null
+++ b/tests/unit/model_tools/introspection/test_pydantic.py
@@ -0,0 +1,1036 @@
+from functools import cached_property
+from typing import Any
+from unittest.mock import ANY
+
+import pytest
+from annotated_types import Ge, Gt
+from pydantic import BaseModel, ConfigDict, Field, PrivateAttr, computed_field
+from pydantic.fields import AliasChoices, AliasPath, ModelPrivateAttr
+from pydantic_core import PydanticUndefined
+from tests_helpers import parametrize_bool, raises_exc, requires
+
+from adaptix._internal.feature_requirement import HAS_ANNOTATED
+from adaptix._internal.model_tools.definitions import (
+ ClarifiedIntrospectionError,
+ DefaultFactory,
+ DefaultValue,
+ InputField,
+ InputShape,
+ NoDefault,
+ OutputField,
+ OutputShape,
+ Param,
+ ParamKind,
+ ParamKwargs,
+ Shape,
+ create_attr_accessor,
+)
+from adaptix._internal.model_tools.introspection.pydantic import get_pydantic_shape
+
+
+def test_basic():
+ class MyModel(BaseModel):
+ a: str
+ b: str = Field()
+ c: str = "foo"
+ d: str = Field("foo")
+ e: list = Field(default_factory=list)
+
+ assert get_pydantic_shape(MyModel) == Shape(
+ input=InputShape(
+ kwargs=ParamKwargs(type=Any),
+ constructor=MyModel,
+ fields=(
+ InputField(
+ id="a",
+ type=str,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ is_required=True,
+ ),
+ InputField(id="b", type=str, default=NoDefault(), metadata={}, original=ANY, is_required=True),
+ InputField(
+ id="c",
+ type=str,
+ default=DefaultValue(value="foo"),
+ metadata={},
+ original=ANY,
+ is_required=False,
+ ),
+ InputField(
+ id="d",
+ type=str,
+ default=DefaultValue(value="foo"),
+ metadata={},
+ original=ANY,
+ is_required=False,
+ ),
+ InputField(
+ id="e",
+ type=list,
+ default=DefaultFactory(factory=list),
+ metadata={},
+ original=ANY,
+ is_required=False,
+ ),
+ ),
+ overriden_types=frozenset({"c", "e", "d", "a", "b"}),
+ params=(
+ Param(field_id="a", name="a", kind=ParamKind.KW_ONLY),
+ Param(field_id="b", name="b", kind=ParamKind.KW_ONLY),
+ Param(field_id="c", name="c", kind=ParamKind.KW_ONLY),
+ Param(field_id="d", name="d", kind=ParamKind.KW_ONLY),
+ Param(field_id="e", name="e", kind=ParamKind.KW_ONLY),
+ ),
+ ),
+ output=OutputShape(
+ fields=(
+ OutputField(
+ id="a",
+ type=str,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="a", is_required=True),
+ ),
+ OutputField(
+ id="b",
+ type=str,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="b", is_required=True),
+ ),
+ OutputField(
+ id="c",
+ type=str,
+ default=DefaultValue(value="foo"),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="c", is_required=True),
+ ),
+ OutputField(
+ id="d",
+ type=str,
+ default=DefaultValue(value="foo"),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="d", is_required=True),
+ ),
+ OutputField(
+ id="e",
+ type=list,
+ default=DefaultFactory(factory=list),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="e", is_required=True),
+ ),
+ ),
+ overriden_types=frozenset({"c", "e", "d", "a", "b"}),
+ ),
+ )
+
+
+def test_fields_with_ellipsis_default():
+ class MyModel(BaseModel):
+ a: int
+ b: int = ...
+ c: int = Field(...)
+
+ assert get_pydantic_shape(MyModel) == Shape(
+ input=InputShape(
+ fields=(
+ InputField(
+ id="a",
+ type=int,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ is_required=True,
+ ),
+ InputField(
+ id="b",
+ type=int,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ is_required=True,
+ ),
+ InputField(
+ id="c",
+ type=int,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ is_required=True,
+ ),
+ ),
+ overriden_types=frozenset({"a", "b", "c"}),
+ params=(
+ Param(field_id="a", name="a", kind=ParamKind.KW_ONLY),
+ Param(field_id="b", name="b", kind=ParamKind.KW_ONLY),
+ Param(field_id="c", name="c", kind=ParamKind.KW_ONLY),
+ ),
+ kwargs=ParamKwargs(type=Any),
+ constructor=MyModel,
+ ),
+ output=OutputShape(
+ fields=(
+ OutputField(
+ id="a",
+ type=int,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="a", is_required=True),
+ ),
+ OutputField(
+ id="b",
+ type=int,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="b", is_required=True),
+ ),
+ OutputField(
+ id="c",
+ type=int,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="c", is_required=True),
+ ),
+ ),
+ overriden_types=frozenset({"a", "b", "c"}),
+ ),
+ )
+
+
+def test_private_attrs():
+ class MyModel(BaseModel):
+ a: str
+ _b: int
+ _c: int = PrivateAttr()
+ _d: int = 1
+ _e: int = PrivateAttr(2)
+ _f: list = PrivateAttr(default_factory=list)
+ _g = PrivateAttr(default_factory=list)
+
+ assert get_pydantic_shape(MyModel) == Shape(
+ input=InputShape(
+ fields=(
+ InputField(
+ id="a",
+ type=str,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ is_required=True,
+ ),
+ ),
+ overriden_types=frozenset({"a"}),
+ params=(Param(field_id="a", name="a", kind=ParamKind.KW_ONLY),),
+ kwargs=ParamKwargs(type=Any),
+ constructor=MyModel,
+ ),
+ output=OutputShape(
+ fields=(
+ OutputField(
+ id="a",
+ type=str,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="a", is_required=True),
+ ),
+ OutputField(
+ id="_c",
+ type=int,
+ default=NoDefault(),
+ metadata={},
+ original=ModelPrivateAttr(),
+ accessor=create_attr_accessor(attr_name="_c", is_required=True),
+ ),
+ OutputField(
+ id="_d",
+ type=int,
+ default=DefaultValue(value=1),
+ metadata={},
+ original=ModelPrivateAttr(default=1),
+ accessor=create_attr_accessor(attr_name="_d", is_required=True),
+ ),
+ OutputField(
+ id="_e",
+ type=int,
+ default=DefaultValue(value=2),
+ metadata={},
+ original=ModelPrivateAttr(default=2),
+ accessor=create_attr_accessor(attr_name="_e", is_required=True),
+ ),
+ OutputField(
+ id="_f",
+ type=list,
+ default=DefaultFactory(factory=list),
+ metadata={},
+ original=ModelPrivateAttr(default_factory=list),
+ accessor=create_attr_accessor(attr_name="_f", is_required=True),
+ ),
+ OutputField(
+ id="_g",
+ type=Any,
+ default=DefaultFactory(factory=list),
+ metadata={},
+ original=ModelPrivateAttr(default_factory=list),
+ accessor=create_attr_accessor(attr_name="_g", is_required=True),
+ ),
+ OutputField(
+ id="_b",
+ type=int,
+ default=NoDefault(),
+ metadata={},
+ original=ModelPrivateAttr(),
+ accessor=create_attr_accessor(attr_name="_b", is_required=True),
+ ),
+ ),
+ overriden_types=frozenset({"_b", "_c", "_d", "_e", "_f", "a", "_g"}),
+ ),
+ )
+
+
+def test_computed_fields():
+ class MyModel(BaseModel):
+ a: str
+ _b: int
+
+ @computed_field
+ @property
+ def simple(self) -> int:
+ return 1
+
+ @computed_field
+ def simple_no_prop(self) -> int:
+ return 1
+
+ @computed_field
+ @cached_property
+ def simple_cached_prop(self) -> int:
+ return 1
+
+ @computed_field
+ @property
+ def _private(self) -> int:
+ return 1
+
+ @computed_field(return_type=str)
+ @property
+ def override_return_type(self) -> int:
+ return 1
+
+ @computed_field(return_type=str)
+ @property
+ def no_type_return_type(self):
+ return 1
+
+ assert get_pydantic_shape(MyModel) == Shape(
+ input=InputShape(
+ fields=(InputField(id="a", type=str, default=NoDefault(), metadata={}, original=ANY, is_required=True),),
+ overriden_types=frozenset({"a"}),
+ params=(Param(field_id="a", name="a", kind=ParamKind.KW_ONLY),),
+ kwargs=ParamKwargs(type=Any),
+ constructor=MyModel,
+ ),
+ output=OutputShape(
+ fields=(
+ OutputField(
+ id="a",
+ type=str,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="a", is_required=True),
+ ),
+ OutputField(
+ id="simple",
+ type=int,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="simple", is_required=True),
+ ),
+ OutputField(
+ id="simple_no_prop",
+ type=int,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="simple_no_prop", is_required=True),
+ ),
+ OutputField(
+ id="simple_cached_prop",
+ type=int,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="simple_cached_prop", is_required=True),
+ ),
+ OutputField(
+ id="_private",
+ type=int,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="_private", is_required=True),
+ ),
+ OutputField(
+ id="override_return_type",
+ type=str,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="override_return_type", is_required=True),
+ ),
+ OutputField(
+ id="no_type_return_type",
+ type=str,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="no_type_return_type", is_required=True),
+ ),
+ OutputField(
+ id="_b",
+ type=int,
+ default=NoDefault(),
+ metadata={},
+ original=ModelPrivateAttr(default=PydanticUndefined),
+ accessor=create_attr_accessor(attr_name="_b", is_required=True),
+ ),
+ ),
+ overriden_types=frozenset(
+ {
+ "_b",
+ "_private",
+ "a",
+ "no_type_return_type",
+ "simple",
+ "simple_cached_prop",
+ "simple_no_prop",
+ "override_return_type",
+ },
+ ),
+ ),
+ )
+
+
+def test_order():
+ class MyModel(BaseModel):
+ f1: str
+
+ @computed_field
+ @property
+ def f2(self) -> str:
+ return ""
+
+ _f3: str
+
+ f4: str
+
+ @computed_field
+ @property
+ def f5(self) -> str:
+ return ""
+
+ _f6: str
+
+ assert get_pydantic_shape(MyModel) == Shape(
+ input=InputShape(
+ fields=(
+ InputField(
+ id="f1",
+ type=str,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ is_required=True,
+ ),
+ InputField(
+ id="f4",
+ type=str,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ is_required=True,
+ ),
+ ),
+ overriden_types=frozenset({"f1", "f4"}),
+ params=(
+ Param(field_id="f1", name="f1", kind=ParamKind.KW_ONLY),
+ Param(field_id="f4", name="f4", kind=ParamKind.KW_ONLY),
+ ),
+ kwargs=ParamKwargs(type=Any),
+ constructor=MyModel,
+ ),
+ output=OutputShape(
+ fields=(
+ OutputField(
+ id="f1",
+ type=str,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="f1", is_required=True),
+ ),
+ OutputField(
+ id="f4",
+ type=str,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="f4", is_required=True),
+ ),
+ OutputField(
+ id="f2",
+ type=str,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="f2", is_required=True),
+ ),
+ OutputField(
+ id="f5",
+ type=str,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="f5", is_required=True),
+ ),
+ OutputField(
+ id="_f3",
+ type=str,
+ default=NoDefault(),
+ metadata={},
+ original=ModelPrivateAttr(),
+ accessor=create_attr_accessor(attr_name="_f3", is_required=True),
+ ),
+ OutputField(
+ id="_f6",
+ type=str,
+ default=NoDefault(),
+ metadata={},
+ original=ModelPrivateAttr(),
+ accessor=create_attr_accessor(attr_name="_f6", is_required=True),
+ ),
+ ),
+ overriden_types=frozenset({"_f3", "_f6", "f1", "f2", "f4", "f5"}),
+ ),
+ )
+
+
+@pytest.mark.parametrize(
+ ["extra", "param_kwargs"],
+ [
+ ("allow", ParamKwargs(Any)),
+ ("ignore", ParamKwargs(Any)),
+ ("forbid", None),
+ ],
+)
+def test_kwargs(extra, param_kwargs):
+ class MyModel(BaseModel):
+ f1: str
+
+ model_config = ConfigDict(extra=extra)
+
+ assert get_pydantic_shape(MyModel) == Shape(
+ input=InputShape(
+ fields=(
+ InputField(
+ id="f1",
+ type=str,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ is_required=True,
+ ),
+ ),
+ overriden_types=frozenset({"f1"}),
+ params=(Param(field_id="f1", name="f1", kind=ParamKind.KW_ONLY),),
+ kwargs=param_kwargs,
+ constructor=MyModel,
+ ),
+ output=OutputShape(
+ fields=(
+ OutputField(
+ id="f1",
+ type=str,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="f1", is_required=True),
+ ),
+ ),
+ overriden_types=frozenset({"f1"}),
+ ),
+ )
+
+
+def test_allowed_custom_init():
+ class MyModel(BaseModel):
+ f1: str
+
+ def __init__(self, **kwargs):
+ super().__init__(self, **kwargs)
+
+ assert get_pydantic_shape(MyModel) == Shape(
+ input=InputShape(
+ fields=(InputField(id="f1", type=str, default=NoDefault(), metadata={}, original=ANY, is_required=True),),
+ overriden_types=frozenset({"f1"}),
+ params=(Param(field_id="f1", name="f1", kind=ParamKind.KW_ONLY),),
+ kwargs=ParamKwargs(type=Any),
+ constructor=MyModel,
+ ),
+ output=OutputShape(
+ fields=(
+ OutputField(
+ id="f1",
+ type=str,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="f1", is_required=True),
+ ),
+ ),
+ overriden_types=frozenset({"f1"}),
+ ),
+ )
+
+
+def test_forbidden_custom_init():
+ class MyModel(BaseModel):
+ f1: str
+
+ def __init__(self, f1: int):
+ super().__init__(self, f1=str(f1))
+
+ raises_exc(
+ ClarifiedIntrospectionError(
+ "Pydantic model `__init__` must takes only self and one variable keyword parameter",
+ ),
+ lambda: get_pydantic_shape(MyModel),
+ )
+
+
+def test_forbidden_custom_init_with_extra_arg():
+ class MyModel(BaseModel):
+ f1: str
+
+ def __init__(self, f1: int, **kwargs):
+ super().__init__(self, f1=str(f1), **kwargs)
+
+ raises_exc(
+ ClarifiedIntrospectionError(
+ "Pydantic model `__init__` must takes only self and one variable keyword parameter",
+ ),
+ lambda: get_pydantic_shape(MyModel),
+ )
+
+
+@requires(HAS_ANNOTATED)
+def test_annotated():
+ from typing import Annotated
+
+ class MyModel(BaseModel):
+ f1: Annotated[str, "meta"]
+
+ @computed_field
+ @property
+ def f2(self) -> Annotated[str, "meta"]:
+ return ""
+
+ @computed_field(return_type=Annotated[str, "meta"])
+ @property
+ def f3(self) -> str:
+ return ""
+
+ _f4: Annotated[str, "meta"]
+
+ assert get_pydantic_shape(MyModel) == Shape(
+ input=InputShape(
+ fields=(
+ InputField(
+ id="f1",
+ type=Annotated[str, "meta"],
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ is_required=True,
+ ),
+ ),
+ overriden_types=frozenset({"f1"}),
+ params=(Param(field_id="f1", name="f1", kind=ParamKind.KW_ONLY),),
+ kwargs=ParamKwargs(type=Any),
+ constructor=MyModel,
+ ),
+ output=OutputShape(
+ fields=(
+ OutputField(
+ id="f1",
+ type=Annotated[str, "meta"],
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="f1", is_required=True),
+ ),
+ OutputField(
+ id="f2",
+ type=Annotated[str, "meta"],
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="f2", is_required=True),
+ ),
+ OutputField(
+ id="f3",
+ type=Annotated[str, "meta"],
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="f3", is_required=True),
+ ),
+ OutputField(
+ id="_f4",
+ type=Annotated[str, "meta"],
+ default=NoDefault(),
+ metadata={},
+ original=ModelPrivateAttr(),
+ accessor=create_attr_accessor(attr_name="_f4", is_required=True),
+ ),
+ ),
+ overriden_types=frozenset({"f3", "_f4", "f1", "f2"}),
+ ),
+ )
+
+
+@requires(HAS_ANNOTATED)
+def test_field_constraints():
+ from typing import Annotated
+
+ class MyModel(BaseModel):
+ f1: int = Field(gt=1, ge=10)
+
+ assert get_pydantic_shape(MyModel) == Shape(
+ input=InputShape(
+ fields=(
+ InputField(
+ id="f1",
+ type=Annotated[int, Gt(gt=1), Ge(ge=10)],
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ is_required=True,
+ ),
+ ),
+ overriden_types=frozenset({"f1"}),
+ params=(Param(field_id="f1", name="f1", kind=ParamKind.KW_ONLY),),
+ kwargs=ParamKwargs(type=Any),
+ constructor=MyModel,
+ ),
+ output=OutputShape(
+ fields=(
+ OutputField(
+ id="f1",
+ type=Annotated[int, Gt(gt=1), Ge(ge=10)],
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="f1", is_required=True),
+ ),
+ ),
+ overriden_types=frozenset({"f1"}),
+ ),
+ )
+
+
+@parametrize_bool("populate_by_name")
+def test_simple_aliases(populate_by_name):
+ class MyModel(BaseModel):
+ f1: int
+ f2: int = Field(alias="a2")
+ f3: int = Field(validation_alias="a3")
+ f4: int = Field(serialization_alias="a4")
+
+ model_config = ConfigDict(populate_by_name=populate_by_name)
+
+ assert get_pydantic_shape(MyModel) == Shape(
+ input=InputShape(
+ fields=(
+ InputField(
+ id="f1",
+ type=int,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ is_required=True,
+ ),
+ InputField(
+ id="f2",
+ type=int,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ is_required=True,
+ ),
+ InputField(
+ id="f3",
+ type=int,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ is_required=True,
+ ),
+ InputField(
+ id="f4",
+ type=int,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ is_required=True,
+ ),
+ ),
+ overriden_types=frozenset({"f3", "f2", "f4", "f1"}),
+ params=(
+ Param(field_id="f1", name="f1", kind=ParamKind.KW_ONLY),
+ Param(field_id="f2", name="f2" if populate_by_name else "a2", kind=ParamKind.KW_ONLY),
+ Param(field_id="f3", name="f3" if populate_by_name else "a3", kind=ParamKind.KW_ONLY),
+ Param(field_id="f4", name="f4", kind=ParamKind.KW_ONLY),
+ ),
+ kwargs=ParamKwargs(type=Any),
+ constructor=MyModel,
+ ),
+ output=OutputShape(
+ fields=(
+ OutputField(
+ id="f1",
+ type=int,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="f1", is_required=True),
+ ),
+ OutputField(
+ id="f2",
+ type=int,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="f2", is_required=True),
+ ),
+ OutputField(
+ id="f3",
+ type=int,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="f3", is_required=True),
+ ),
+ OutputField(
+ id="f4",
+ type=int,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="f4", is_required=True),
+ ),
+ ),
+ overriden_types=frozenset({"f3", "f2", "f4", "f1"}),
+ ),
+ )
+
+
+def test_alias_choices():
+ class MyModel(BaseModel):
+ f1: int
+ f2: int = Field(validation_alias=AliasChoices("a2", "a3"))
+ f3: int = Field(validation_alias=AliasChoices("a3", "a2"))
+
+ assert get_pydantic_shape(MyModel) == Shape(
+ input=InputShape(
+ fields=(
+ InputField(id="f1", type=int, default=NoDefault(), metadata={}, original=ANY, is_required=True),
+ InputField(id="f2", type=int, default=NoDefault(), metadata={}, original=ANY, is_required=True),
+ InputField(id="f3", type=int, default=NoDefault(), metadata={}, original=ANY, is_required=True),
+ ),
+ overriden_types=frozenset({"f1", "f3", "f2"}),
+ params=(
+ Param(field_id="f1", name="f1", kind=ParamKind.KW_ONLY),
+ Param(field_id="f2", name="a2", kind=ParamKind.KW_ONLY),
+ Param(field_id="f3", name="a3", kind=ParamKind.KW_ONLY),
+ ),
+ kwargs=ParamKwargs(type=Any),
+ constructor=MyModel,
+ ),
+ output=OutputShape(
+ fields=(
+ OutputField(
+ id="f1",
+ type=int,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="f1", is_required=True),
+ ),
+ OutputField(
+ id="f2",
+ type=int,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="f2", is_required=True),
+ ),
+ OutputField(
+ id="f3",
+ type=int,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="f3", is_required=True),
+ ),
+ ),
+ overriden_types=frozenset({"f1", "f3", "f2"}),
+ ),
+ )
+
+
+def test_alias_choices_with_alias_path():
+ class MyModel(BaseModel):
+ f1: int
+ f2: int = Field(validation_alias=AliasChoices(AliasPath("b", 0), "a2"))
+
+ assert get_pydantic_shape(MyModel) == Shape(
+ input=InputShape(
+ fields=(
+ InputField(id="f1", type=int, default=NoDefault(), metadata={}, original=ANY, is_required=True),
+ InputField(id="f2", type=int, default=NoDefault(), metadata={}, original=ANY, is_required=True),
+ ),
+ overriden_types=frozenset({"f1", "f2"}),
+ params=(
+ Param(field_id="f1", name="f1", kind=ParamKind.KW_ONLY),
+ Param(field_id="f2", name="a2", kind=ParamKind.KW_ONLY),
+ ),
+ kwargs=ParamKwargs(type=Any),
+ constructor=MyModel,
+ ),
+ output=OutputShape(
+ fields=(
+ OutputField(
+ id="f1",
+ type=int,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="f1", is_required=True),
+ ),
+ OutputField(
+ id="f2",
+ type=int,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="f2", is_required=True),
+ ),
+ ),
+ overriden_types=frozenset({"f1", "f2"}),
+ ),
+ )
+
+
+def test_no_parameter_name_for_field():
+ class MyModel(BaseModel):
+ f1: int
+ f2: int = Field(validation_alias=AliasPath("b", 0))
+
+ raises_exc(
+ ClarifiedIntrospectionError(
+ "Can not fetch parameter name for field 'f2'."
+ " This means that field has only AliasPath aliases or non-python-identifier aliases"
+ " and populate_by_name is disabled",
+ ),
+ lambda: get_pydantic_shape(MyModel),
+ )
+
+
+def test_alias_generator_is_resolved_by_pydantic():
+ class MyModel(BaseModel):
+ f1: int
+ f2: int = Field(alias="a2", validation_alias="va2")
+ f3: int = Field(alias="a3", serialization_alias="sa3")
+
+ model_config = ConfigDict(alias_generator=lambda x: f"{x}_gen")
+
+ assert get_pydantic_shape(MyModel) == Shape(
+ input=InputShape(
+ fields=(
+ InputField(
+ id="f1",
+ type=int,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ is_required=True,
+ ),
+ InputField(
+ id="f2",
+ type=int,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ is_required=True,
+ ),
+ InputField(
+ id="f3",
+ type=int,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ is_required=True,
+ ),
+ ),
+ overriden_types=frozenset({"f3", "f1", "f2"}),
+ params=(
+ Param(field_id="f1", name="f1_gen", kind=ParamKind.KW_ONLY),
+ Param(field_id="f2", name="va2", kind=ParamKind.KW_ONLY),
+ Param(field_id="f3", name="a3", kind=ParamKind.KW_ONLY),
+ ),
+ kwargs=ParamKwargs(type=Any),
+ constructor=MyModel,
+ ),
+ output=OutputShape(
+ fields=(
+ OutputField(
+ id="f1",
+ type=int,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="f1", is_required=True),
+ ),
+ OutputField(
+ id="f2",
+ type=int,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="f2", is_required=True),
+ ),
+ OutputField(
+ id="f3",
+ type=int,
+ default=NoDefault(),
+ metadata={},
+ original=ANY,
+ accessor=create_attr_accessor(attr_name="f3", is_required=True),
+ ),
+ ),
+ overriden_types=frozenset({"f3", "f1", "f2"}),
+ ),
+ )
diff --git a/tests/unit/morphing/generic_provider/test_literal_provider.py b/tests/unit/morphing/generic_provider/test_literal_provider.py
index c05a1b4f..3c132577 100644
--- a/tests/unit/morphing/generic_provider/test_literal_provider.py
+++ b/tests/unit/morphing/generic_provider/test_literal_provider.py
@@ -3,26 +3,14 @@
from typing import Literal
from uuid import uuid4
-import pytest
-from tests_helpers import TestRetort, raises_exc
+from tests_helpers import raises_exc
-from adaptix._internal.morphing.enum_provider import EnumExactValueProvider
-from adaptix._internal.morphing.generic_provider import LiteralProvider, UnionProvider
+from adaptix import Retort
from adaptix._internal.morphing.load_error import BadVariantLoadError
-@pytest.fixture()
-def retort():
- return TestRetort(
- recipe=[
- LiteralProvider(),
- EnumExactValueProvider(),
- UnionProvider(),
- ],
- )
-
-
-def test_loader_base(retort, strict_coercion, debug_trail):
+def test_loader_base(strict_coercion, debug_trail):
+ retort = Retort()
loader = retort.replace(
strict_coercion=strict_coercion,
debug_trail=debug_trail,
@@ -48,12 +36,14 @@ def _is_exact_one(arg):
return type(arg) is int and arg == 1 # noqa: E721
-def test_strict_coercion(retort, debug_trail):
+def test_strict_coercion(debug_trail):
# Literal definition could have very strange behavior
# due to type cache and 0 == False, 1 == True,
# so Literal[0, 1] sometimes returns Literal[False, True]
# and vice versa.
# We add a random string at the end to suppress caching
+
+ retort = Retort()
rnd_val1 = uuid4().hex
literal_loader = retort.replace(
strict_coercion=True,
@@ -95,7 +85,9 @@ def test_strict_coercion(retort, debug_trail):
)
-def test_loader_with_enums(retort, strict_coercion, debug_trail):
+def test_loader_with_enums(strict_coercion, debug_trail):
+ retort = Retort()
+
class Enum1(Enum):
CASE1 = 1
CASE2 = 2
@@ -132,7 +124,9 @@ class Enum2(Enum):
)
-def test_dumper_with_enums(retort, strict_coercion, debug_trail):
+def test_dumper_with_enums(strict_coercion, debug_trail):
+ retort = Retort()
+
class Enum1(Enum):
CASE1 = 1
CASE2 = 2
diff --git a/tests/unit/morphing/generic_provider/test_union_provider.py b/tests/unit/morphing/generic_provider/test_union_provider.py
index 286fac14..5ada26cb 100644
--- a/tests/unit/morphing/generic_provider/test_union_provider.py
+++ b/tests/unit/morphing/generic_provider/test_union_provider.py
@@ -1,12 +1,12 @@
from dataclasses import dataclass
+from decimal import Decimal
from typing import Callable, List, Literal, Optional, Union
import pytest
-from tests_helpers import TestRetort, raises_exc, with_cause, with_notes
+from tests_helpers import raises_exc, with_cause, with_notes
-from adaptix import CannotProvide, DebugTrail, NoSuitableProvider, Retort, dumper, loader
+from adaptix import CannotProvide, DebugTrail, NoSuitableProvider, Retort, loader
from adaptix._internal.compat import CompatExceptionGroup
-from adaptix._internal.morphing.generic_provider import LiteralProvider, UnionProvider
from adaptix._internal.morphing.load_error import BadVariantLoadError, LoadError, TypeLoadError, UnionLoadError
@@ -16,39 +16,8 @@ class Book:
author: Union[str, List[str]]
-def make_loader(tp: type):
- def tp_loader(data):
- if isinstance(data, tp):
- return data
- raise TypeLoadError(tp, data)
-
- return loader(tp, tp_loader)
-
-
-def make_dumper(tp: type):
- def tp_dumper(data):
- if isinstance(data, tp):
- return data
- raise TypeError(type(data))
-
- return dumper(tp, tp_dumper)
-
-
-@pytest.fixture()
-def retort():
- return TestRetort(
- recipe=[
- UnionProvider(),
- make_loader(str),
- make_loader(int),
- make_dumper(str),
- make_dumper(int),
- make_dumper(type(None)),
- ],
- )
-
-
-def test_loading(retort, strict_coercion, debug_trail):
+def test_loading(strict_coercion, debug_trail):
+ retort = Retort()
loader_ = retort.replace(
strict_coercion=strict_coercion,
debug_trail=debug_trail,
@@ -59,6 +28,9 @@ def test_loading(retort, strict_coercion, debug_trail):
assert loader_(1) == 1
assert loader_("a") == "a"
+ if not strict_coercion:
+ return
+
if debug_trail == DebugTrail.DISABLE:
raises_exc(
LoadError(),
@@ -89,7 +61,8 @@ def bad_int_loader(data):
raise TypeError # must raise LoadError instance (TypeLoadError)
-def test_loading_unexpected_error(retort, strict_coercion, debug_trail):
+def test_loading_unexpected_error(strict_coercion, debug_trail):
+ retort = Retort()
loader_ = retort.replace(
strict_coercion=strict_coercion,
debug_trail=debug_trail,
@@ -120,7 +93,8 @@ def test_loading_unexpected_error(retort, strict_coercion, debug_trail):
)
-def test_dumping(retort, debug_trail):
+def test_dumping(debug_trail):
+ retort = Retort()
dumper_ = retort.replace(
debug_trail=debug_trail,
).get_dumper(
@@ -130,13 +104,9 @@ def test_dumping(retort, debug_trail):
assert dumper_(1) == 1
assert dumper_("a") == "a"
- raises_exc(
- KeyError(list),
- lambda: dumper_([]),
- )
-
-def test_dumping_of_none(retort, debug_trail):
+def test_dumping_of_none(debug_trail):
+ retort = Retort()
dumper_ = retort.replace(
debug_trail=debug_trail,
).get_dumper(
@@ -147,13 +117,8 @@ def test_dumping_of_none(retort, debug_trail):
assert dumper_("a") == "a"
assert dumper_(None) is None
- raises_exc(
- KeyError(list),
- lambda: dumper_([]),
- )
-
-def test_dumping_subclass(retort, debug_trail):
+def test_dumping_subclass(debug_trail):
@dataclass
class Parent:
foo: int
@@ -178,7 +143,8 @@ class Child(Parent):
)
-def test_optional_dumping(retort, debug_trail):
+def test_optional_dumping(debug_trail):
+ retort = Retort()
opt_dumper = retort.replace(
debug_trail=debug_trail,
).get_dumper(
@@ -188,21 +154,20 @@ def test_optional_dumping(retort, debug_trail):
assert opt_dumper("a") == "a"
assert opt_dumper(None) is None
- raises_exc(
- TypeError(list),
- lambda: opt_dumper([]),
- )
-
-def test_bad_optional_dumping(retort, debug_trail):
+def test_bad_optional_dumping(debug_trail):
+ retort = Retort()
raises_exc(
with_cause(
- NoSuitableProvider(
- f"Cannot produce dumper for type {Union[int, Callable[[int], str]]}",
+ with_notes(
+ NoSuitableProvider(
+ f"Cannot produce dumper for type {Union[int, Callable[[int], str]]}",
+ ),
+ "Note: The attached exception above contains verbose description of the problem",
),
with_notes(
CannotProvide(
- message=f"All cases of union must be class, but found {[Callable[[int], str]]}",
+ message=f"All cases of union must be class or Literal, but found {[Callable[[int], str]]}",
is_demonstrative=True,
is_terminal=True,
),
@@ -220,14 +185,7 @@ def test_bad_optional_dumping(retort, debug_trail):
def test_literal(strict_coercion, debug_trail):
- retort = TestRetort(
- recipe=[
- LiteralProvider(),
- UnionProvider(),
- make_loader(type(None)),
- make_dumper(type(None)),
- ],
- )
+ retort = Retort()
loader_ = retort.replace(
strict_coercion=strict_coercion,
@@ -262,3 +220,38 @@ def test_literal(strict_coercion, debug_trail):
assert dumper_("a") == "a"
assert dumper_(None) is None
assert dumper_("b") == "b"
+
+
+@pytest.mark.parametrize(
+ ["other_type", "value", "expected", "wrong_value"],
+ [
+ (
+ Decimal, Decimal(200.5), "200.5", [1, 2, 3],
+ ),
+ (
+ Union[str, Decimal], "some string", "some string", [1, 2, 3],
+ ),
+ ],
+)
+def test_dump_literal_in_union(
+ strict_coercion,
+ debug_trail,
+ other_type,
+ value,
+ expected,
+ wrong_value,
+):
+ retort = Retort()
+
+ dumper_ = retort.replace(
+ debug_trail=debug_trail,
+ ).get_dumper(
+ Union[Literal[200, 300], other_type],
+ )
+
+ assert dumper_(200) == 200
+ assert dumper_(300) == 300
+ assert dumper_(value) == expected
+
+ with pytest.raises(KeyError):
+ dumper_(wrong_value)
diff --git a/tests/unit/morphing/model/shape_provider/data_gen_models_312.py b/tests/unit/morphing/model/shape_provider/data_gen_models_312.py
deleted file mode 100644
index 2add0ab9..00000000
--- a/tests/unit/morphing/model/shape_provider/data_gen_models_312.py
+++ /dev/null
@@ -1,26 +0,0 @@
-from typing import Tuple, Unpack
-
-
-@model_spec.decorator
-class WithTVField[_T](*model_spec.bases):
- a: int
- b: _T
-
-
-@model_spec.decorator
-class WithTVTupleBegin[*ShapeT, T](*model_spec.bases):
- a: Tuple[Unpack[ShapeT]]
- b: T
-
-
-@model_spec.decorator
-class WithTVTupleEnd[T, *ShapeT](*model_spec.bases):
- a: T
- b: Tuple[Unpack[ShapeT]]
-
-
-@model_spec.decorator
-class WithTVTupleMiddle[T1, *ShapeT, T2](*model_spec.bases):
- a: T1
- b: Tuple[Unpack[ShapeT]]
- c: T2
diff --git a/tests/unit/morphing/model/test_dumper_provider.py b/tests/unit/morphing/model/test_dumper_provider.py
index 065f44dd..7b1d6023 100644
--- a/tests/unit/morphing/model/test_dumper_provider.py
+++ b/tests/unit/morphing/model/test_dumper_provider.py
@@ -5,9 +5,9 @@
from unittest.mock import ANY
import pytest
-from tests_helpers import DebugCtx, TestRetort, full_match, parametrize_bool, raises_exc, with_trail
+from tests_helpers import DebugCtx, full_match, parametrize_bool, raises_exc, with_trail
-from adaptix import DebugTrail, Dumper, bound
+from adaptix import DebugTrail, Dumper, Retort, bound
from adaptix._internal.common import Catchable
from adaptix._internal.compat import CompatExceptionGroup
from adaptix._internal.model_tools.definitions import (
@@ -92,7 +92,7 @@ def make_dumper_getter(
debug_ctx: DebugCtx,
) -> Callable[[], Dumper]:
def getter():
- retort = TestRetort(
+ retort = Retort(
recipe=[
ValueProvider(OutputShapeRequest, shape),
ValueProvider(OutputNameLayoutRequest, name_layout),
diff --git a/tests/unit/morphing/model/test_loader_provider.py b/tests/unit/morphing/model/test_loader_provider.py
index ba7cfe00..70aa17da 100644
--- a/tests/unit/morphing/model/test_loader_provider.py
+++ b/tests/unit/morphing/model/test_loader_provider.py
@@ -5,9 +5,9 @@
from typing import Any, Callable, Dict, Optional
import pytest
-from tests_helpers import DebugCtx, TestRetort, full_match, parametrize_bool, raises_exc, with_trail
+from tests_helpers import DebugCtx, full_match, parametrize_bool, raises_exc, with_trail
-from adaptix import DebugTrail, ExtraKwargs, Loader, bound
+from adaptix import DebugTrail, ExtraKwargs, Loader, Retort, bound
from adaptix._internal.common import VarTuple
from adaptix._internal.model_tools.definitions import (
Default,
@@ -33,7 +33,6 @@
InputNameLayout,
InputNameLayoutRequest,
)
-from adaptix._internal.morphing.model.loader_provider import ModelLoaderProvider
from adaptix._internal.morphing.request_cls import LoaderRequest
from adaptix._internal.provider.provider_template import ValueProvider
from adaptix._internal.provider.shape_provider import InputShapeRequest
@@ -115,13 +114,11 @@ def make_loader_getter(
debug_ctx: DebugCtx,
) -> Callable[[], Loader]:
def getter():
- retort = TestRetort(
+ retort = Retort(
recipe=[
ValueProvider(InputShapeRequest, shape),
ValueProvider(InputNameLayoutRequest, name_layout),
bound(int, ValueProvider(LoaderRequest, int_loader)),
- ModelLoaderProvider(),
- debug_ctx.accum,
],
)
return retort.replace(
diff --git a/tests/unit/morphing/name_layout/test_provider.py b/tests/unit/morphing/name_layout/test_provider.py
index b25591f8..20888eb5 100644
--- a/tests/unit/morphing/name_layout/test_provider.py
+++ b/tests/unit/morphing/name_layout/test_provider.py
@@ -4,7 +4,7 @@
import pytest
from dirty_equals import IsInstance
-from tests_helpers import TestRetort, raises_exc, with_cause, with_notes
+from tests_helpers import raises_exc, with_cause, with_notes
from adaptix import (
AggregateCannotProvide,
@@ -13,7 +13,7 @@
NameStyle,
NoSuitableProvider,
Provider,
- bound,
+ Retort,
name_mapping,
)
from adaptix._internal.model_tools.definitions import (
@@ -53,14 +53,6 @@
OutputNameLayout,
OutputNameLayoutRequest,
)
-from adaptix._internal.morphing.model.dumper_provider import ModelDumperProvider
-from adaptix._internal.morphing.model.loader_provider import ModelLoaderProvider
-from adaptix._internal.morphing.name_layout.component import (
- BuiltinExtraMoveAndPoliciesMaker,
- BuiltinSievesMaker,
- BuiltinStructureMaker,
-)
-from adaptix._internal.morphing.name_layout.provider import BuiltinNameLayoutProvider
from adaptix._internal.morphing.request_cls import DumperRequest, LoaderRequest
from adaptix._internal.provider.loc_stack_filtering import P
from adaptix._internal.provider.provider_template import ValueProvider
@@ -136,21 +128,11 @@ def make_layouts(
),
overriden_types=frozenset(fld.id for fld in fields),
)
- retort = TestRetort(
+ retort = Retort(
recipe=[
*providers,
- BuiltinNameLayoutProvider(
- structure_maker=BuiltinStructureMaker(),
- sieves_maker=BuiltinSievesMaker(),
- extra_move_maker=BuiltinExtraMoveAndPoliciesMaker(),
- extra_policies_maker=BuiltinExtraMoveAndPoliciesMaker(),
- ),
- bound(Any, ValueProvider(DumperRequest, stub)),
- bound(Any, ValueProvider(LoaderRequest, stub)),
ValueProvider(InputShapeRequest, input_shape),
ValueProvider(OutputShapeRequest, output_shape),
- ModelLoaderProvider(),
- ModelDumperProvider(),
],
).replace(
strict_coercion=True,
@@ -169,18 +151,16 @@ def make_layouts(
loc_stack=LocStack(loc),
shape=output_shape,
)
- inp_name_layout = retort.provide(inp_request)
- out_name_layout = retort.provide(out_request)
- retort.provide(
- LoaderRequest(
- loc_stack=LocStack(loc),
- ),
- )
- retort.provide(
- DumperRequest(
- loc_stack=LocStack(loc),
- ),
- )
+
+ cannot_provide_text = "cannot provide {}"
+ inp_name_layout = retort._facade_provide(inp_request, error_message=cannot_provide_text.format(inp_request))
+ out_name_layout = retort._facade_provide(out_request, error_message=cannot_provide_text.format(out_request))
+
+ loader_request = LoaderRequest(loc_stack=LocStack(loc))
+ retort._facade_provide(loader_request, error_message=cannot_provide_text.format(loader_request))
+
+ dumper_request = DumperRequest(loc_stack=LocStack(loc))
+ retort._facade_provide(dumper_request, error_message=cannot_provide_text.format(dumper_request))
return Layouts(inp_name_layout, out_name_layout)
@@ -902,7 +882,10 @@ def test_extra_at_list():
raises_exc(
with_cause(
- NoSuitableProvider(f"Cannot produce loader for type {Stub}"),
+ with_notes(
+ NoSuitableProvider(f"Cannot produce loader for type {Stub}"),
+ "Note: The attached exception above contains verbose description of the problem",
+ ),
with_notes(
AggregateCannotProvide(
"Cannot create loader for model. Cannot fetch InputNameLayout",
@@ -940,7 +923,10 @@ def test_extra_at_list():
def test_required_field_skip():
raises_exc(
with_cause(
- NoSuitableProvider(f"Cannot produce loader for type {Stub}"),
+ with_notes(
+ NoSuitableProvider(f"Cannot produce loader for type {Stub}"),
+ "Note: The attached exception above contains verbose description of the problem",
+ ),
with_notes(
AggregateCannotProvide(
"Cannot create loader for model. Cannot fetch InputNameLayout",
@@ -974,7 +960,10 @@ def test_required_field_skip():
def test_inconsistent_path_elements():
raises_exc(
with_cause(
- NoSuitableProvider(f"Cannot produce loader for type {Stub}"),
+ with_notes(
+ NoSuitableProvider(f"Cannot produce loader for type {Stub}"),
+ "Note: The attached exception above contains verbose description of the problem",
+ ),
with_notes(
AggregateCannotProvide(
"Cannot create loader for model. Cannot fetch InputNameLayout",
@@ -1011,7 +1000,10 @@ def test_inconsistent_path_elements():
def test_duplicated_path():
raises_exc(
with_cause(
- NoSuitableProvider(f"Cannot produce loader for type {Stub}"),
+ with_notes(
+ NoSuitableProvider(f"Cannot produce loader for type {Stub}"),
+ "Note: The attached exception above contains verbose description of the problem",
+ ),
with_notes(
AggregateCannotProvide(
"Cannot create loader for model. Cannot fetch InputNameLayout",
@@ -1048,7 +1040,10 @@ def test_duplicated_path():
def test_optional_field_at_list():
raises_exc(
with_cause(
- NoSuitableProvider(f"Cannot produce loader for type {Stub}"),
+ with_notes(
+ NoSuitableProvider(f"Cannot produce loader for type {Stub}"),
+ "Note: The attached exception above contains verbose description of the problem",
+ ),
with_notes(
AggregateCannotProvide(
"Cannot create loader for model. Cannot fetch InputNameLayout",
@@ -1085,7 +1080,10 @@ def test_optional_field_at_list():
def test_one_path_is_prefix_of_another():
raises_exc(
with_cause(
- NoSuitableProvider(f"Cannot produce loader for type {Stub}"),
+ with_notes(
+ NoSuitableProvider(f"Cannot produce loader for type {Stub}"),
+ "Note: The attached exception above contains verbose description of the problem",
+ ),
with_notes(
AggregateCannotProvide(
"Cannot create loader for model. Cannot fetch InputNameLayout",
diff --git a/tests/unit/morphing/test_concrete_provider.py b/tests/unit/morphing/test_concrete_provider.py
index c960f9de..17d195e9 100644
--- a/tests/unit/morphing/test_concrete_provider.py
+++ b/tests/unit/morphing/test_concrete_provider.py
@@ -4,6 +4,7 @@
from datetime import date, datetime, time, timedelta, timezone
from decimal import Decimal
from fractions import Fraction
+from io import BytesIO
from typing import Union
import pytest
@@ -162,15 +163,32 @@ def test_none_provider(strict_coercion, debug_trail):
assert dumper(None) is None
-def test_bytes_provider(strict_coercion, debug_trail):
+@pytest.mark.parametrize(
+ ["provider_type", "get_string", "get_bytes"],
+ [
+ (bytes, lambda x: x.decode(), lambda x: x.encode()),
+ (bytearray, lambda x: x.decode(), lambda x: bytearray(x.encode())),
+ (BytesIO, lambda x: x.getvalue().decode(), lambda x: BytesIO(x.encode())),
+ (typing.IO[bytes], lambda x: x.read().decode(), lambda x: BytesIO(x.encode())),
+ ],
+)
+def test_bytes_like_provider(
+ strict_coercion,
+ debug_trail,
+ provider_type,
+ get_string,
+ get_bytes,
+):
retort = Retort(
strict_coercion=strict_coercion,
debug_trail=debug_trail,
)
- loader = retort.get_loader(bytes)
+ loader = retort.get_loader(provider_type)
+ string = "abcd"
+ b64_string = b"YWJjZA=="
- assert loader("YWJjZA==") == b"abcd"
+ assert get_string(loader(b64_string.decode())) == string
raises_exc(
ValueLoadError("Bad base64 string", "Hello, world"),
@@ -180,7 +198,7 @@ def test_bytes_provider(strict_coercion, debug_trail):
raises_exc(
ValueLoadError(
msg="Invalid base64-encoded string: number of data characters (5)"
- " cannot be 1 more than a multiple of 4",
+ " cannot be 1 more than a multiple of 4",
input_value="aaaaa=",
),
lambda: loader("aaaaa="),
@@ -196,46 +214,8 @@ def test_bytes_provider(strict_coercion, debug_trail):
lambda: loader(108),
)
- dumper = retort.get_dumper(bytes)
- assert dumper(b"abcd") == "YWJjZA=="
-
-
-def test_bytearray_provider(strict_coercion, debug_trail):
- retort = Retort(
- strict_coercion=strict_coercion,
- debug_trail=debug_trail,
- )
-
- loader = retort.get_loader(bytearray)
-
- assert loader("YWJjZA==") == bytearray(b"abcd")
-
- raises_exc(
- ValueLoadError("Bad base64 string", "Hello, world"),
- lambda: loader("Hello, world"),
- )
-
- raises_exc(
- ValueLoadError(
- msg="Invalid base64-encoded string: number of data characters (5)"
- " cannot be 1 more than a multiple of 4",
- input_value="aaaaa=",
- ),
- lambda: loader("aaaaa="),
- )
-
- raises_exc(
- ValueLoadError("Incorrect padding", "YWJjZA"),
- lambda: loader("YWJjZA"),
- )
-
- raises_exc(
- TypeLoadError(str, 108),
- lambda: loader(108),
- )
-
- dumper = retort.get_dumper(bytearray)
- assert dumper(bytearray(b"abcd")) == "YWJjZA=="
+ dumper = retort.get_dumper(provider_type)
+ assert dumper(get_bytes(string)) == b64_string.decode()
def test_regex_provider(strict_coercion, debug_trail):
diff --git a/tests/unit/morphing/test_constant_length_tuple_provider.py b/tests/unit/morphing/test_constant_length_tuple_provider.py
index 4bb1ed18..1746f77c 100644
--- a/tests/unit/morphing/test_constant_length_tuple_provider.py
+++ b/tests/unit/morphing/test_constant_length_tuple_provider.py
@@ -4,9 +4,9 @@
from typing import Mapping, Tuple
import pytest
-from tests_helpers import TestRetort, raises_exc, requires, with_trail
+from tests_helpers import raises_exc, requires, with_trail
-from adaptix import DebugTrail, NoSuitableProvider, dumper, loader
+from adaptix import AdornedRetort, DebugTrail, NoSuitableProvider, dumper, loader
from adaptix._internal.compat import CompatExceptionGroup
from adaptix._internal.feature_requirement import HAS_UNPACK
from adaptix._internal.morphing.concrete_provider import INT_LOADER_PROVIDER, STR_LOADER_PROVIDER
@@ -29,7 +29,7 @@ def int_dumper(data):
@pytest.fixture()
def retort():
- return TestRetort(
+ return AdornedRetort(
recipe=[
ConstantLengthTupleProvider(),
STR_LOADER_PROVIDER,
diff --git a/tests/unit/morphing/test_dict_provider.py b/tests/unit/morphing/test_dict_provider.py
index 2d6853b5..049d7e80 100644
--- a/tests/unit/morphing/test_dict_provider.py
+++ b/tests/unit/morphing/test_dict_provider.py
@@ -3,13 +3,10 @@
from typing import DefaultDict, Dict, List
import pytest
-from tests_helpers import TestRetort, raises_exc, with_trail
+from tests_helpers import raises_exc, with_trail
-from adaptix import DebugTrail, default_dict, dumper, loader
+from adaptix import DebugTrail, Retort, default_dict, dumper, loader
from adaptix._internal.compat import CompatExceptionGroup
-from adaptix._internal.morphing.concrete_provider import STR_LOADER_PROVIDER
-from adaptix._internal.morphing.dict_provider import DefaultDictProvider, DictProvider
-from adaptix._internal.morphing.iterable_provider import IterableProvider
from adaptix._internal.morphing.load_error import AggregateLoadError
from adaptix._internal.struct_trail import ItemKey
from adaptix.load_error import TypeLoadError
@@ -23,13 +20,9 @@ def string_dumper(data):
@pytest.fixture()
def retort():
- return TestRetort(
+ return Retort(
recipe=[
- DictProvider(),
- DefaultDictProvider(),
- STR_LOADER_PROVIDER,
dumper(str, string_dumper),
- IterableProvider(),
],
)
@@ -161,16 +154,7 @@ def test_dumping(retort, debug_trail):
assert dumper_({"a": "b", "c": "d"}) == {"a": "b", "c": "d"}
- if debug_trail == DebugTrail.DISABLE:
- raises_exc(
- TypeError(),
- lambda: dumper_({"a": "b", "c": 0}),
- )
- raises_exc(
- TypeError(),
- lambda: dumper_({"a": "b", 0: "d"}),
- )
- elif debug_trail == DebugTrail.FIRST:
+ if debug_trail == DebugTrail.FIRST:
raises_exc(
with_trail(TypeError(), ["c"]),
lambda: dumper_({"a": "b", "c": 0}),
diff --git a/tests/unit/morphing/test_enum_provider.py b/tests/unit/morphing/test_enum_provider.py
index 04c42379..e9e96dd4 100644
--- a/tests/unit/morphing/test_enum_provider.py
+++ b/tests/unit/morphing/test_enum_provider.py
@@ -2,7 +2,7 @@
from typing import Iterable, Mapping, Union
import pytest
-from tests_helpers import TestRetort, parametrize_bool, raises_exc, with_cause, with_notes
+from tests_helpers import parametrize_bool, raises_exc, with_cause, with_notes
from adaptix import (
CannotProvide,
@@ -13,7 +13,6 @@
enum_by_name,
enum_by_value,
flag_by_member_names,
- loader,
)
from adaptix._internal.morphing.enum_provider import EnumExactValueProvider
from adaptix._internal.morphing.load_error import (
@@ -115,12 +114,9 @@ def test_name_provider_with_mapping(strict_coercion, debug_trail, mapping_option
@pytest.mark.parametrize("enum_cls", [MyEnum, MyEnumWithMissingHook])
def test_exact_value_provider(strict_coercion, debug_trail, enum_cls):
- retort = TestRetort(
+ retort = Retort(
strict_coercion=strict_coercion,
debug_trail=debug_trail,
- recipe=[
- EnumExactValueProvider(),
- ],
)
loader = retort.get_loader(enum_cls)
@@ -148,12 +144,9 @@ def test_exact_value_provider(strict_coercion, debug_trail, enum_cls):
def test_exact_value_provider_int_enum(strict_coercion, debug_trail):
- retort = TestRetort(
+ retort = Retort(
strict_coercion=strict_coercion,
debug_trail=debug_trail,
- recipe=[
- EnumExactValueProvider(),
- ],
)
int_enum_loader = retort.get_loader(MyIntEnum)
@@ -180,12 +173,11 @@ def custom_string_dumper(value: str):
def test_value_provider(strict_coercion, debug_trail):
- retort = TestRetort(
+ retort = Retort(
strict_coercion=strict_coercion,
debug_trail=debug_trail,
recipe=[
enum_by_value(MyEnum, tp=str),
- loader(str, str),
dumper(str, custom_string_dumper),
],
)
@@ -193,18 +185,20 @@ def test_value_provider(strict_coercion, debug_trail):
enum_loader = retort.get_loader(MyEnum)
assert enum_loader("1") == MyEnum.V1
- assert enum_loader(1) == MyEnum.V1
+
+ if not strict_coercion:
+ assert enum_loader(1) == MyEnum.V1
+
+ raises_exc(
+ MsgLoadError("Bad enum value", MyEnum.V1),
+ lambda: enum_loader(MyEnum.V1),
+ )
raises_exc(
MsgLoadError("Bad enum value", "V1"),
lambda: enum_loader("V1"),
)
- raises_exc(
- MsgLoadError("Bad enum value", MyEnum.V1),
- lambda: enum_loader(MyEnum.V1),
- )
-
enum_dumper = retort.get_dumper(MyEnum)
assert enum_dumper(MyEnum.V1) == "PREFIX 1"
@@ -251,32 +245,34 @@ def test_flag_by_exact_value_loader_creation_fail(strict_coercion, debug_trail):
raises_exc(
with_cause(
- NoSuitableProvider(f"Cannot produce loader for type {FlagEnumWithSkippedBit}"),
with_notes(
- with_notes(
- CannotProvide(
- "Cannot create a loader for flag with skipped bits",
- is_terminal=True,
- is_demonstrative=True,
- ),
- f"Location: `{FlagEnumWithSkippedBit.__name__}`",
+ NoSuitableProvider(f"Cannot produce loader for type {FlagEnumWithSkippedBit}"),
+ "Note: The attached exception above contains verbose description of the problem",
+ ),
+ with_notes(
+ CannotProvide(
+ "Cannot create a loader for flag with skipped bits",
+ is_terminal=True,
+ is_demonstrative=True,
),
+ f"Location: `{FlagEnumWithSkippedBit.__name__}`",
),
),
lambda: retort.get_loader(FlagEnumWithSkippedBit),
)
raises_exc(
with_cause(
- NoSuitableProvider(f"Cannot produce loader for type {FlagEnumWithNegativeValue}"),
with_notes(
- with_notes(
- CannotProvide(
- "Cannot create a loader for flag with negative values",
- is_terminal=True,
- is_demonstrative=True,
- ),
- f"Location: `{FlagEnumWithNegativeValue.__name__}`",
+ NoSuitableProvider(f"Cannot produce loader for type {FlagEnumWithNegativeValue}"),
+ "Note: The attached exception above contains verbose description of the problem",
+ ),
+ with_notes(
+ CannotProvide(
+ "Cannot create a loader for flag with negative values",
+ is_terminal=True,
+ is_demonstrative=True,
),
+ f"Location: `{FlagEnumWithNegativeValue.__name__}`",
),
),
lambda: retort.get_loader(FlagEnumWithNegativeValue),
diff --git a/tests/unit/morphing/test_iterable_provider.py b/tests/unit/morphing/test_iterable_provider.py
index 0be2d561..fa5552a4 100644
--- a/tests/unit/morphing/test_iterable_provider.py
+++ b/tests/unit/morphing/test_iterable_provider.py
@@ -19,11 +19,10 @@
)
import pytest
-from tests_helpers import TestRetort, raises_exc, with_trail
+from tests_helpers import raises_exc, with_trail
-from adaptix import DebugTrail, NoSuitableProvider, dumper, loader
+from adaptix import AdornedRetort, DebugTrail, NoSuitableProvider, Retort, dumper, loader
from adaptix._internal.compat import CompatExceptionGroup
-from adaptix._internal.morphing.concrete_provider import STR_LOADER_PROVIDER
from adaptix._internal.morphing.iterable_provider import IterableProvider
from adaptix._internal.morphing.load_error import AggregateLoadError
from adaptix.load_error import ExcludedTypeLoadError, TypeLoadError
@@ -37,32 +36,26 @@ def string_dumper(data):
@pytest.fixture()
def retort():
- return TestRetort(
+ return Retort(
recipe=[
- IterableProvider(),
- STR_LOADER_PROVIDER,
dumper(str, string_dumper),
],
)
-def test_mapping_providing(retort, strict_coercion, debug_trail):
- retort = retort.replace(
+@pytest.mark.parametrize("mapping_type", [dict, Dict, Mapping, collections.Counter])
+def test_mapping_providing(strict_coercion, debug_trail, mapping_type):
+ retort = AdornedRetort(
+ recipe=[
+ IterableProvider(),
+ ],
+ ).replace(
strict_coercion=strict_coercion,
debug_trail=debug_trail,
)
with pytest.raises(NoSuitableProvider):
- retort.get_loader(dict)
-
- with pytest.raises(NoSuitableProvider):
- retort.get_loader(Dict)
-
- with pytest.raises(NoSuitableProvider):
- retort.get_loader(Mapping)
-
- with pytest.raises(NoSuitableProvider):
- retort.get_loader(collections.Counter)
+ retort.get_loader(mapping_type)
def test_loading(retort, strict_coercion, debug_trail):
diff --git a/tests/unit/provider/shape_provider/__init__.py b/tests/unit/provider/shape_provider/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/unit/morphing/model/shape_provider/conftest.py b/tests/unit/provider/shape_provider/conftest.py
similarity index 100%
rename from tests/unit/morphing/model/shape_provider/conftest.py
rename to tests/unit/provider/shape_provider/conftest.py
diff --git a/tests/unit/morphing/model/shape_provider/data_gen_models.py b/tests/unit/provider/shape_provider/data_gen_models.py
similarity index 90%
rename from tests/unit/morphing/model/shape_provider/data_gen_models.py
rename to tests/unit/provider/shape_provider/data_gen_models.py
index 3a6e5153..b1710a90 100644
--- a/tests/unit/morphing/model/shape_provider/data_gen_models.py
+++ b/tests/unit/provider/shape_provider/data_gen_models.py
@@ -1,6 +1,8 @@
# mypy: disable-error-code="name-defined, misc"
from typing import Generic, Tuple, TypeVar
+from tests_helpers import ModelSpec
+
from adaptix._internal.feature_requirement import HAS_TV_TUPLE
_T = TypeVar("_T")
@@ -12,7 +14,7 @@ class WithTVField(*model_spec.bases, Generic[_T]):
b: _T
-if HAS_TV_TUPLE:
+if HAS_TV_TUPLE and model_spec.kind != ModelSpec.PYDANTIC:
from typing import TypeVarTuple, Unpack
ShapeT = TypeVarTuple("ShapeT")
diff --git a/tests/unit/provider/shape_provider/data_gen_models_312.py b/tests/unit/provider/shape_provider/data_gen_models_312.py
new file mode 100644
index 00000000..08575b74
--- /dev/null
+++ b/tests/unit/provider/shape_provider/data_gen_models_312.py
@@ -0,0 +1,29 @@
+from typing import Tuple, Unpack
+
+from tests_helpers import ModelSpec
+
+
+@model_spec.decorator
+class WithTVField[_T](*model_spec.bases):
+ a: int
+ b: _T
+
+
+if model_spec.kind != ModelSpec.PYDANTIC:
+ @model_spec.decorator
+ class WithTVTupleBegin[*ShapeT, T](*model_spec.bases):
+ a: Tuple[Unpack[ShapeT]]
+ b: T
+
+
+ @model_spec.decorator
+ class WithTVTupleEnd[T, *ShapeT](*model_spec.bases):
+ a: T
+ b: Tuple[Unpack[ShapeT]]
+
+
+ @model_spec.decorator
+ class WithTVTupleMiddle[T1, *ShapeT, T2](*model_spec.bases):
+ a: T1
+ b: Tuple[Unpack[ShapeT]]
+ c: T2
diff --git a/tests/unit/morphing/model/shape_provider/local_helpers.py b/tests/unit/provider/shape_provider/local_helpers.py
similarity index 52%
rename from tests/unit/morphing/model/shape_provider/local_helpers.py
rename to tests/unit/provider/shape_provider/local_helpers.py
index a9182045..7a68c768 100644
--- a/tests/unit/morphing/model/shape_provider/local_helpers.py
+++ b/tests/unit/provider/shape_provider/local_helpers.py
@@ -1,4 +1,4 @@
-from typing import Mapping
+from typing import Mapping, Optional
from adaptix import Retort, TypeHint
from adaptix._internal.provider.request_cls import LocStack, TypeHintLoc
@@ -7,9 +7,15 @@
OutputShapeRequest,
provide_generic_resolved_shape,
)
+from adaptix._internal.type_tools import is_pydantic_class
-def assert_fields_types(tp: TypeHint, expected: Mapping[str, TypeHint]) -> None:
+def assert_distinct_fields_types(
+ tp: TypeHint,
+ *,
+ input: Mapping[str, TypeHint], # noqa: A002
+ output: Mapping[str, TypeHint],
+) -> None:
retort = Retort()
mediator = retort._create_mediator()
@@ -18,11 +24,25 @@ def assert_fields_types(tp: TypeHint, expected: Mapping[str, TypeHint]) -> None:
InputShapeRequest(loc_stack=LocStack(TypeHintLoc(type=tp))),
)
input_field_types = {field.id: field.type for field in input_shape.fields}
- assert input_field_types == expected
+ assert input_field_types == input
output_shape = provide_generic_resolved_shape(
mediator,
OutputShapeRequest(loc_stack=LocStack(TypeHintLoc(type=tp))),
)
output_field_types = {field.id: field.type for field in output_shape.fields}
- assert output_field_types == expected
+ assert output_field_types == output
+
+
+def assert_fields_types(
+ tp: TypeHint,
+ expected: Mapping[str, TypeHint],
+ *,
+ pydantic: Optional[Mapping[str, TypeHint]] = None,
+) -> None:
+ final_expected = pydantic if pydantic is not None and is_pydantic_class(tp) else expected
+ assert_distinct_fields_types(
+ tp,
+ input=final_expected,
+ output=final_expected,
+ )
diff --git a/tests/unit/morphing/model/shape_provider/test_generic_resolving.py b/tests/unit/provider/shape_provider/test_generic_resolving.py
similarity index 86%
rename from tests/unit/morphing/model/shape_provider/test_generic_resolving.py
rename to tests/unit/provider/shape_provider/test_generic_resolving.py
index f6741d7c..92f9c120 100644
--- a/tests/unit/morphing/model/shape_provider/test_generic_resolving.py
+++ b/tests/unit/provider/shape_provider/test_generic_resolving.py
@@ -3,7 +3,7 @@
import pytest
from tests_helpers import ModelSpec, cond_list, exclude_model_spec, load_namespace_keeping_module, requires
-from tests_helpers.model_spec import only_generic_models
+from tests_helpers.model_spec import only_generic_models, with_model_spec_requirement
from adaptix import CannotProvide, Retort
from adaptix._internal.feature_requirement import (
@@ -12,8 +12,10 @@
HAS_PY_312,
HAS_SELF_TYPE,
HAS_STD_CLASSES_GENERICS,
+ HAS_SUPPORTED_PYDANTIC_PKG,
HAS_TV_TUPLE,
IS_PYPY,
+ DistributionVersionRequirement,
)
from adaptix._internal.provider.request_cls import LocStack, TypeHintLoc
from adaptix._internal.provider.shape_provider import (
@@ -22,12 +24,13 @@
provide_generic_resolved_shape,
)
-from .local_helpers import assert_fields_types
+from .local_helpers import assert_distinct_fields_types, assert_fields_types
T = TypeVar("T")
K = TypeVar("K")
V = TypeVar("V")
-
+K2 = TypeVar("K2")
+V2 = TypeVar("V2")
only_generic_models(sys.modules[__name__])
@@ -73,7 +76,8 @@ class WithGenField(*model_spec.bases, Generic[T]):
assert_fields_types(WithGenField, {"a": int, "b": tp[Any]})
assert_fields_types(WithGenField[str], {"a": int, "b": tp[str]})
- assert_fields_types(WithGenField[T], {"a": int, "b": tp[T]})
+ assert_fields_types(WithGenField[K], {"a": int, "b": tp[K]})
+ assert_fields_types(WithGenField[T], {"a": int, "b": tp[T]}, pydantic={"a": int, "b": tp[Any]})
@pytest.mark.parametrize("tp1", [List, list] if HAS_STD_CLASSES_GENERICS else [List])
@@ -93,9 +97,14 @@ class WithStdGenField(*model_spec.bases, Generic[K, V]):
WithStdGenField[str, int],
{"a": int, "b": tp1[str], "c": tp2[str, int]},
)
+ assert_fields_types(
+ WithStdGenField[K2, V2],
+ {"a": int, "b": tp1[K2], "c": tp2[K2, V2]},
+ )
assert_fields_types(
WithStdGenField[K, V],
{"a": int, "b": tp1[K], "c": tp2[K, V]},
+ pydantic={"a": int, "b": tp1[Any], "c": tp2[Any, Any]},
)
@@ -159,9 +168,14 @@ class Child(Parent[int], Generic[T]):
Child,
{"a": int, "b": str, "c": Any},
)
+ assert_fields_types(
+ Child[K],
+ {"a": int, "b": str, "c": K},
+ )
assert_fields_types(
Child[T],
{"a": int, "b": str, "c": T},
+ pydantic={"a": int, "b": str, "c": Any},
)
@@ -184,14 +198,19 @@ class Child(Parent1[int], Parent2[bool], Generic[T]):
Child[bool],
{"a": int, "b": str, "c": bool},
)
- assert_fields_types(
- Child[T],
- {"a": int, "b": str, "c": T},
- )
assert_fields_types(
Child,
{"a": int, "b": str, "c": Any},
)
+ assert_fields_types(
+ Child[K],
+ {"a": int, "b": str, "c": K},
+ )
+ assert_fields_types(
+ Child[T],
+ {"a": int, "b": str, "c": T},
+ pydantic={"a": int, "b": str, "c": Any},
+ )
T1 = TypeVar("T1")
@@ -335,17 +354,23 @@ class Child(Parent[int], Generic[T]):
Child,
{"a": bool, "b": Any},
)
- assert_fields_types(
- Child[T],
- {"a": bool, "b": T},
- )
assert_fields_types(
Child[str],
{"a": bool, "b": str},
)
+ assert_fields_types(
+ Child[K],
+ {"a": bool, "b": K},
+ )
+ assert_fields_types(
+ Child[T],
+ {"a": bool, "b": T},
+ pydantic={"a": bool, "b": Any},
+ )
@requires(HAS_SELF_TYPE)
+@with_model_spec_requirement({ModelSpec.PYDANTIC: DistributionVersionRequirement("pydantic", "2.0.3")})
def test_self_type(model_spec):
from typing import Self
@@ -360,6 +385,7 @@ class WithSelf(*model_spec.bases):
@requires(HAS_TV_TUPLE)
+@exclude_model_spec(ModelSpec.PYDANTIC)
def test_type_var_tuple_begin(model_spec, gen_models_ns):
from typing import Unpack
@@ -403,6 +429,7 @@ def test_type_var_tuple_begin(model_spec, gen_models_ns):
@requires(HAS_TV_TUPLE)
+@exclude_model_spec(ModelSpec.PYDANTIC)
def test_type_var_tuple_end(model_spec, gen_models_ns):
from typing import Unpack
@@ -446,6 +473,7 @@ def test_type_var_tuple_end(model_spec, gen_models_ns):
@requires(HAS_TV_TUPLE)
+@exclude_model_spec(ModelSpec.PYDANTIC)
def test_type_var_tuple_middle(model_spec, gen_models_ns):
from typing import Unpack
@@ -515,3 +543,25 @@ def test_type_var_tuple_middle(model_spec, gen_models_ns):
"c": int,
},
)
+
+
+@requires(HAS_SUPPORTED_PYDANTIC_PKG)
+def test_pydantic():
+ from pydantic import BaseModel, computed_field
+
+ class MyModel(BaseModel, Generic[T]):
+ a: T
+
+ @computed_field
+ @property
+ def b(self) -> T:
+ return ""
+
+ _c: T
+
+ assert_distinct_fields_types(MyModel, input={"a": Any}, output={"a": Any, "b": Any, "_c": Any})
+ assert_distinct_fields_types(MyModel[str], input={"a": str}, output={"a": str, "b": str, "_c": str})
+ assert_distinct_fields_types(MyModel[K], input={"a": K}, output={"a": K, "b": K, "_c": K})
+
+ # a limitation of pydantic implementation
+ assert_distinct_fields_types(MyModel[T], input={"a": Any}, output={"a": Any, "b": Any, "_c": Any})
diff --git a/tests/unit/provider/test_overlay_schema.py b/tests/unit/provider/test_overlay_schema.py
index ff3ad08a..17a919fa 100644
--- a/tests/unit/provider/test_overlay_schema.py
+++ b/tests/unit/provider/test_overlay_schema.py
@@ -2,9 +2,9 @@
from typing import Callable, Iterable
import pytest
-from tests_helpers import TestRetort, full_match
+from tests_helpers import full_match
-from adaptix import Chain, Mediator, Omittable, Omitted, Provider, Request, bound
+from adaptix import AdornedRetort, Chain, Mediator, Omittable, Omitted, Provider, Request, bound
from adaptix._internal.common import VarTuple
from adaptix._internal.provider.overlay_schema import Overlay, OverlayProvider, Schema, provide_schema
from adaptix._internal.provider.request_cls import LocStack, TypeHintLoc
@@ -41,13 +41,15 @@ def _provide_overlay(self, mediator: Mediator, request: SampleRequest):
def provide_overlay_schema(recipe: Iterable[Provider], provide_action: Callable[[Mediator], MySchema]) -> MySchema:
- retort = TestRetort(
+ retort = AdornedRetort(
recipe=[
*recipe,
SampleRequestProvider(provide_action),
],
)
- return retort.provide(SampleRequest())
+
+ request = SampleRequest()
+ return retort._facade_provide(request, error_message=f"cannot provide {request}")
class MyClass1:
diff --git a/tests/unit/retort/test_operating_retort.py b/tests/unit/retort/test_operating_retort.py
index d33b72cf..f64d0eac 100644
--- a/tests/unit/retort/test_operating_retort.py
+++ b/tests/unit/retort/test_operating_retort.py
@@ -20,7 +20,10 @@ class Stub:
raises_exc(
with_cause(
- NoSuitableProvider(f"Cannot produce loader for type {Stub}"),
+ with_notes(
+ NoSuitableProvider(f"Cannot produce loader for type {Stub}"),
+ "Note: The attached exception above contains verbose description of the problem",
+ ),
with_notes(
AggregateCannotProvide(
"Cannot create loader for model. Loaders for some fields cannot be created",
@@ -63,7 +66,10 @@ class Stub:
raises_exc(
with_cause(
- NoSuitableProvider(f"Cannot produce dumper for type {Stub}"),
+ with_notes(
+ NoSuitableProvider(f"Cannot produce dumper for type {Stub}"),
+ "Note: The attached exception above contains verbose description of the problem",
+ ),
with_notes(
AggregateCannotProvide(
"Cannot create dumper for model. Dumpers for some fields cannot be created",
@@ -109,10 +115,13 @@ class BookDTO:
raises_exc(
with_cause(
- NoSuitableProvider(
- f"Cannot produce converter for"
- f" {BookDTO.__module__}.{BookDTO.__qualname__}>",
+ with_notes(
+ NoSuitableProvider(
+ f"Cannot produce converter for"
+ f" {BookDTO.__module__}.{BookDTO.__qualname__}>",
+ ),
+ "Note: The attached exception above contains verbose description of the problem",
),
AggregateCannotProvide(
"Cannot create top-level coercer",
@@ -158,10 +167,13 @@ class BookDTO:
raises_exc(
with_cause(
- NoSuitableProvider(
- f"Cannot produce converter for"
- f" {BookDTO.__module__}.{BookDTO.__qualname__}>",
+ with_notes(
+ NoSuitableProvider(
+ f"Cannot produce converter for"
+ f" {BookDTO.__module__}.{BookDTO.__qualname__}>",
+ ),
+ "Note: The attached exception above contains verbose description of the problem",
),
AggregateCannotProvide(
"Cannot create top-level coercer",
@@ -210,10 +222,13 @@ class BookDTO:
raises_exc(
with_cause(
- NoSuitableProvider(
- f"Cannot produce converter for"
- f" {BookDTO.__module__}.{BookDTO.__qualname__}>",
+ with_notes(
+ NoSuitableProvider(
+ f"Cannot produce converter for"
+ f" {BookDTO.__module__}.{BookDTO.__qualname__}>",
+ ),
+ "Note: The attached exception above contains verbose description of the problem",
),
AggregateCannotProvide(
"Cannot create top-level coercer",
@@ -267,10 +282,13 @@ class BookDTO:
raises_exc(
with_cause(
- NoSuitableProvider(
- f"Cannot produce converter for"
- f" {BookDTO.__module__}.{BookDTO.__qualname__}>",
+ with_notes(
+ NoSuitableProvider(
+ f"Cannot produce converter for"
+ f" {BookDTO.__module__}.{BookDTO.__qualname__}>",
+ ),
+ "Note: The attached exception above contains verbose description of the problem",
),
AggregateCannotProvide(
"Cannot create top-level coercer",
diff --git a/tox.ini b/tox.ini
index 68c012f1..ec6dd32d 100644
--- a/tox.ini
+++ b/tox.ini
@@ -10,9 +10,11 @@ env_list = {py38, py39, py310, py311, py312, pypy38, pypy39, pypy310}-extra_{non
[testenv]
deps =
- extra_none: -r requirements/test_extra_none.txt
- extra_old: -r requirements/test_extra_old.txt
- extra_new: -r requirements/test_extra_new.txt
+ extra_none: -r requirements/test_extra_none.txt
+ extra_old: -r requirements/test_extra_old.txt
+
+ {py38, py39, py310, py311, py312, pypy39, pypy310}-extra_new: -r requirements/test_extra_new.txt
+ pypy38-extra_new: -r requirements/test_extra_new_pypy38.txt
use_develop = true
@@ -21,7 +23,9 @@ commands =
[testenv:{py38, py39, py310, py311, py312, pypy38, pypy39, pypy310}-bench]
-deps = -r requirements/bench.txt
+deps =
+ {py38, py39, py310, py311, py312, pypy39, pypy310}: -r requirements/bench.txt
+ pypy38: -r requirements/bench_pypy38.txt
use_develop = true