Skip to content

Commit

Permalink
Allow to deepcopy a pickled schema (#173)
Browse files Browse the repository at this point in the history
  • Loading branch information
Cito committed Sep 25, 2022
1 parent 9aca33c commit 39b8db4
Show file tree
Hide file tree
Showing 3 changed files with 101 additions and 49 deletions.
22 changes: 16 additions & 6 deletions src/graphql/type/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,8 +208,8 @@ def __init__(
# Provide specified directives (e.g. @include and @skip) by default
self.directives = specified_directives if directives is None else directives

# To preserve order of user-provided types, we add first to add them to
# the set of "collected" types, so `collect_referenced_types` ignore them.
# To preserve order of user-provided types, we first add them to the set
# of "collected" types, so `collect_referenced_types` ignores them.
if types:
all_referenced_types = TypeSet.with_initial_types(types)
collect_referenced_types = all_referenced_types.collect_referenced_types
Expand Down Expand Up @@ -258,10 +258,20 @@ def __init__(
" is missing a name.",
)
if type_name in type_map:
raise TypeError(
"Schema must contain uniquely named types"
f" but contains multiple types named '{type_name}'."
)
from ..type import specified_scalar_types

if (
type_name in specified_scalar_types
and type_map[type_name] is not specified_scalar_types[type_name]
):
# allow replacing a copy of a specified scalar type
named_type = specified_scalar_types[type_name]
else:
raise TypeError(
"Schema must contain uniquely named types"
f" but contains multiple types named '{type_name}'."
)

type_map[type_name] = named_type

if is_interface_type(named_type):
Expand Down
51 changes: 35 additions & 16 deletions tests/language/test_schema_parser.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import pickle
from copy import deepcopy
from textwrap import dedent
from typing import List, Optional, Tuple

Expand Down Expand Up @@ -797,19 +799,36 @@ def directive_with_incorrect_locations():
def parses_kitchen_sink_schema(kitchen_sink_sdl): # noqa: F811
assert parse(kitchen_sink_sdl)

def can_pickle_and_unpickle_kitchen_sink_schema_ast(kitchen_sink_sdl): # noqa: F811
import pickle

# create a schema AST from the kitchen sink SDL
doc = parse(kitchen_sink_sdl)
# check that the schema AST can be pickled
# (particularly, there should be no recursion error)
dumped = pickle.dumps(doc)
# check that the pickle size is reasonable
assert len(dumped) < 50 * len(kitchen_sink_sdl)
loaded = pickle.loads(dumped)
# check that the un-pickled schema AST is still the same
assert loaded == doc
# check that pickling again creates the same result
dumped_again = pickle.dumps(doc)
assert dumped_again == dumped
def describe_deepcopy_and_pickle():
def can_deep_copy_ast(kitchen_sink_sdl): # noqa: F811
# create a schema AST from the kitchen sink SDL
doc = parse(kitchen_sink_sdl)
# make a deepcopy of the schema AST
copied_doc = deepcopy(doc)
# check that the copied AST is equal to the original one
assert copied_doc == doc

def can_pickle_and_unpickle_ast(kitchen_sink_sdl): # noqa: F811
# create a schema AST from the kitchen sink SDL
doc = parse(kitchen_sink_sdl)
# check that the schema AST can be pickled
# (particularly, there should be no recursion error)
dumped = pickle.dumps(doc)
# check that the pickle size is reasonable
assert len(dumped) < 50 * len(kitchen_sink_sdl)
loaded = pickle.loads(dumped)
# check that the un-pickled schema AST is still the same
assert loaded == doc
# check that pickling again creates the same result
dumped_again = pickle.dumps(doc)
assert dumped_again == dumped

def can_deep_copy_pickled_ast(kitchen_sink_sdl): # noqa: F811
# create a schema AST from the kitchen sink SDL
doc = parse(kitchen_sink_sdl)
# pickle and unpickle the schema AST
loaded_doc = pickle.loads(pickle.dumps(doc))
# make a deepcopy of this
copied_doc = deepcopy(loaded_doc)
# check that the result is still equal to the original schema AST
assert copied_doc == doc
77 changes: 50 additions & 27 deletions tests/utilities/test_build_ast_schema.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
import pickle
from collections import namedtuple
from copy import deepcopy
from typing import Union

from pytest import raises
Expand Down Expand Up @@ -1186,30 +1188,51 @@ def rejects_invalid_ast():
build_ast_schema({}) # type: ignore
assert str(exc_info.value) == "Must provide valid Document AST."

def can_pickle_and_unpickle_big_schema(
big_schema_sdl, # noqa: F811
): # pragma: no cover
import pickle

# use our printing conventions
big_schema_sdl = cycle_sdl(big_schema_sdl)

# create a schema from the kitchen sink SDL
schema = build_schema(big_schema_sdl, assume_valid_sdl=True)
# check that the schema can be pickled
# (particularly, there should be no recursion error,
# or errors because of trying to pickle lambdas or local functions)
dumped = pickle.dumps(schema)

# check that the pickle size is reasonable
assert len(dumped) < 25 * len(big_schema_sdl)
loaded = pickle.loads(dumped)

# check that printing the unpickled schema gives the same SDL
assert print_schema(loaded) == big_schema_sdl

# check that pickling again creates the same result
dumped = pickle.dumps(schema)
assert len(dumped) < 25 * len(big_schema_sdl)
loaded = pickle.loads(dumped)
assert print_schema(loaded) == big_schema_sdl
def describe_deepcopy_and_pickle():
def can_deep_copy_big_schema(big_schema_sdl): # noqa: F811
# use our printing conventions
big_schema_sdl = cycle_sdl(big_schema_sdl)

# create a schema from the kitchen sink SDL
schema = build_schema(big_schema_sdl, assume_valid_sdl=True)
# create a deepcopy of the schema
copied = deepcopy(schema)
# check that printing the copied schema gives the same SDL
assert print_schema(copied) == big_schema_sdl

def can_pickle_and_unpickle_big_schema(big_schema_sdl): # noqa: F811
# use our printing conventions
big_schema_sdl = cycle_sdl(big_schema_sdl)

# create a schema from the kitchen sink SDL
schema = build_schema(big_schema_sdl, assume_valid_sdl=True)
# check that the schema can be pickled
# (particularly, there should be no recursion error,
# or errors because of trying to pickle lambdas or local functions)
dumped = pickle.dumps(schema)

# check that the pickle size is reasonable
assert len(dumped) < 25 * len(big_schema_sdl)
loaded = pickle.loads(dumped)

# check that printing the unpickled schema gives the same SDL
assert print_schema(loaded) == big_schema_sdl

# check that pickling again creates the same result
dumped = pickle.dumps(schema)
assert len(dumped) < 25 * len(big_schema_sdl)
loaded = pickle.loads(dumped)
assert print_schema(loaded) == big_schema_sdl

def can_deep_copy_pickled_big_schema(big_schema_sdl): # noqa: F811
# use our printing conventions
big_schema_sdl = cycle_sdl(big_schema_sdl)

# create a schema from the kitchen sink SDL
schema = build_schema(big_schema_sdl, assume_valid_sdl=True)
# pickle and unpickle the schema
loaded = pickle.loads(pickle.dumps(schema))
# create a deepcopy of the unpickled schema
copied = deepcopy(loaded)
# check that printing the copied schema gives the same SDL
assert print_schema(copied) == big_schema_sdl

0 comments on commit 39b8db4

Please sign in to comment.