diff --git a/.binder/environment.yml b/.binder/environment.yml index daa1a5cae..68a69a0cd 100644 --- a/.binder/environment.yml +++ b/.binder/environment.yml @@ -5,7 +5,7 @@ dependencies: - coverage - bidict =0.23.1 - cloudpickle =3.1.1 -- executorlib =0.4.1 +- executorlib =0.4.2 - graphviz =9.0.0 - pandas =2.2.3 - pint =0.24.4 diff --git a/.ci_support/environment.yml b/.ci_support/environment.yml index daa1a5cae..68a69a0cd 100644 --- a/.ci_support/environment.yml +++ b/.ci_support/environment.yml @@ -5,7 +5,7 @@ dependencies: - coverage - bidict =0.23.1 - cloudpickle =3.1.1 -- executorlib =0.4.1 +- executorlib =0.4.2 - graphviz =9.0.0 - pandas =2.2.3 - pint =0.24.4 diff --git a/docs/environment.yml b/docs/environment.yml index 1dcab15c2..bd4e641d7 100644 --- a/docs/environment.yml +++ b/docs/environment.yml @@ -11,7 +11,7 @@ dependencies: - coverage - bidict =0.23.1 - cloudpickle =3.1.1 -- executorlib =0.4.1 +- executorlib =0.4.2 - graphviz =9.0.0 - pandas =2.2.3 - pint =0.24.4 diff --git a/pyiron_workflow/__init__.py b/pyiron_workflow/__init__.py index d98b5bcc8..505bd4807 100644 --- a/pyiron_workflow/__init__.py +++ b/pyiron_workflow/__init__.py @@ -45,7 +45,12 @@ from pyiron_workflow.nodes import standard as standard_nodes from pyiron_workflow.nodes.composite import FailedChildError from pyiron_workflow.nodes.for_loop import For, for_node, for_node_factory -from pyiron_workflow.nodes.function import Function, as_function_node, function_node +from pyiron_workflow.nodes.function import ( + Function, + as_function_node, + function_node, + to_function_node, +) from pyiron_workflow.nodes.macro import Macro, as_macro_node, macro_node from pyiron_workflow.nodes.transform import ( as_dataclass_node, diff --git a/pyiron_workflow/mixin/preview.py b/pyiron_workflow/mixin/preview.py index 4d27bf079..7b6aae175 100644 --- a/pyiron_workflow/mixin/preview.py +++ b/pyiron_workflow/mixin/preview.py @@ -111,6 +111,8 @@ class ScrapesIO(HasIOPreview, ABC): thus be left static from the time of class definition onwards. """ + _extra_type_hint_scope: ClassVar[dict[str, type] | None] = None + @classmethod @abstractmethod def _io_defining_function(cls) -> Callable: @@ -202,7 +204,11 @@ def _get_function_signature(cls) -> inspect.Signature: """ The result of :func:`inspect.signature` on the io-defining function """ - return inspect.signature(cls._io_defining_function(), eval_str=True) + return inspect.signature( + cls._io_defining_function(), + eval_str=True, + locals=cls._extra_type_hint_scope, + ) @classmethod @lru_cache(maxsize=1) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index bed659ba0..cc5dab445 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -522,20 +522,30 @@ def _before_run( self.inputs.fetch() if self.use_cache and self.cache_hit: # Read and use cache - if self.parent is None and emit_ran_signal: + self._on_cache_hit() + if (self.parent is None or not self.parent.running) and emit_ran_signal: self.emit() - elif self.parent is not None: + elif self.parent is not None and self.parent.running: self.parent.register_child_starting(self) self.parent.register_child_finished(self) if emit_ran_signal: self.parent.register_child_emitting(self) - return True, self._outputs_to_run_return() - elif self.use_cache: # Write cache and continue - self._cached_inputs = self.inputs.to_value_dict() + else: + self._on_cache_miss() + if self.use_cache: # Write cache and continue + self._cached_inputs = self.inputs.to_value_dict() return super()._before_run(check_readiness=check_readiness) + def _on_cache_hit(self) -> None: + """A hook for subclasses to act on cache hits""" + return + + def _on_cache_miss(self) -> None: + """A hook for subclasses to act on cache misses""" + return + def _run( self, executor: Executor | None, @@ -544,7 +554,7 @@ def _run( run_finally_kwargs: dict, finish_run_kwargs: dict, ) -> Any | tuple | Future: - if self.parent is not None: + if self.parent is not None and self.parent.running: self.parent.register_child_starting(self) return super()._run( executor=executor, @@ -556,13 +566,13 @@ def _run( def _run_finally(self, /, emit_ran_signal: bool, raise_run_exceptions: bool): super()._run_finally() - if self.parent is not None: + if self.parent is not None and self.parent.running: self.parent.register_child_finished(self) if self.checkpoint is not None: self.save_checkpoint(self.checkpoint) if emit_ran_signal: - if self.parent is None: + if self.parent is None or not self.parent.running: self.emit() else: self.parent.register_child_emitting(self) @@ -760,6 +770,34 @@ def pull(self, *args, run_parent_trees_too=False, **kwargs): **kwargs, ) + def push(self, *args, **kwargs): + """ + Exactly like :meth:`run` with all the same flags, _except_ it handles an edge + case where you are trying to directly run the child node of a + :class:`pyiron_workflow.workflow.Workflow` before it has had any chance to + configure its execution signals. + _If_ the parent is a workflow set up to automate execution flow, does that + _first_ then runs as usual. + """ + # Alright, time for more egregious hacking + # Normally, running will work in a push-like mode _BUT_, because Workflow's are + # a flexible dynamic thing, they normally construct their execution signals on + # the fly at each run invocation. This is not the case for Macros, where the + # run configuration, if automated at all, happens right at macro instantiation. + # So there's this horrible edge case where you build a workflow, then + # immediately try to run one of its children directly, naively expecting that + # the run will push downstream executions like it does in a macro -- except it + # _doesn't_ because there are _no signal connections at all yet!_ + # Building these on _every_ run would be needlessly expensive, so this method + # exists as a hacky guaranteed way to secure push-like run behaviour regardless + # of the context you're calling from. + from pyiron_workflow.workflow import Workflow + + if isinstance(self.parent, Workflow) and self.parent.automate_execution: + self.parent.set_run_signals_to_dag_execution() + + return self.run(*args, **kwargs) + def __call__(self, *args, **kwargs) -> None: """ A shortcut for :meth:`pull` that automatically runs the entire set of upstream data diff --git a/pyiron_workflow/nodes/for_loop.py b/pyiron_workflow/nodes/for_loop.py index 7ce94e273..bdc53f94e 100644 --- a/pyiron_workflow/nodes/for_loop.py +++ b/pyiron_workflow/nodes/for_loop.py @@ -91,7 +91,7 @@ def zipped_generator(): return range(n_zip) def zipped_index_map(zipped_index): - return {key: zipped_index for key in zipped_keys} + return dict.fromkeys(zipped_keys, zipped_index) def merge(d1, d2): d1.update(d2) @@ -232,9 +232,9 @@ def _setup_node(self) -> None: self.starting_nodes = input_nodes self._input_node_labels = tuple(n.label for n in input_nodes) - def _on_run(self): - self._build_body() - return super()._on_run() + def _on_cache_miss(self) -> None: + if self.ready: + self._build_body() def _build_body(self): """ @@ -271,6 +271,7 @@ def _clean_existing_subgraph(self): run_data_tree=False, run_parent_trees_too=False, fetch_input=False, + emit_ran_signal=False, # Data should simply be coming from the value link # We just want to refresh the output ) diff --git a/pyiron_workflow/nodes/function.py b/pyiron_workflow/nodes/function.py index 75fb31b14..95438e24e 100644 --- a/pyiron_workflow/nodes/function.py +++ b/pyiron_workflow/nodes/function.py @@ -1,8 +1,8 @@ from __future__ import annotations +import inspect from abc import ABC, abstractmethod from collections.abc import Callable -from inspect import getsource from typing import Any from pyiron_snippets.colors import SeabornColors @@ -347,11 +347,13 @@ def color(self) -> str: @classmethod def _extra_info(cls) -> str: - return getsource(cls.node_function) + return inspect.getsource(cls.node_function) @classfactory def function_node_factory( + node_class_qualname: str, + node_class_module_name: str, node_function: Callable, validate_output_labels: bool, use_cache: bool = True, @@ -373,13 +375,14 @@ def function_node_factory( Returns: type[Node]: A new node class. """ + node_class_name = node_class_qualname.rsplit(".", 1)[-1] return ( # type: ignore[return-value] - node_function.__name__, + node_class_name, (Function,), # Define parentage { "node_function": staticmethod(node_function), - "__module__": node_function.__module__, - "__qualname__": node_function.__qualname__, + "__module__": node_class_module_name, + "__qualname__": node_class_qualname, "_output_labels": None if len(output_labels) == 0 else output_labels, "_validate_output_labels": validate_output_labels, "__doc__": Function._io_defining_documentation( @@ -414,10 +417,15 @@ def as_function_node( subclass. """ - def decorator(node_function): + def decorator(node_function) -> type[Function]: function_node_factory.clear(node_function.__name__) # Force a fresh class factory_made = function_node_factory( - node_function, validate_output_labels, use_cache, *output_labels + node_function.__qualname__, + node_function.__module__, + node_function, + validate_output_labels, + use_cache, + *output_labels, ) factory_made._reduce_imports_as = ( node_function.__module__, @@ -429,6 +437,93 @@ def decorator(node_function): return decorator +def to_function_node( + node_class_name, + node_function, + *output_labels, + validate_output_labels: bool = True, + use_cache: bool = True, + scope: dict[str, type] | None = None, +) -> type[Function]: + """ + Create a new :class:`Function` node class from an existing function. + Useful when the function does not exist in a context where you are free to + decorate it, e.g. + + >>> import numpy as np + >>> from pyiron_workflow.nodes.function import to_function_node + >>> + >>> Trapz = to_function_node("Trapz", np.trapz, "trapz") + >>> Trapz.preview_io() + {'inputs': {'y': (None, NOT_DATA), 'x': (None, None), 'dx': (None, 1.0), 'axis': (None, -1)}, 'outputs': {'trapz': None}} + + We still have two requirements on functions converted in this way: + - The function must be inspectable + - e.g. :func:`numpy.arange` fails this requirement + - The function must not use protected or argument names (as with decorated + functions) + - e.g. variadics `*args` and `**kwargs` + - The function must have a single return value (as with decorated functions) + + Otherwise you will need to explicitly write a decorated function that wraps your + desired function. + + Because nodes convert type hints to actual python objects for strict type checking, + we also need to provide non-builting type hints in the scope of the new node class + (for the benefit of an underlying `inspect.signature(..., eval_str=True)` call). + E.g., this function hints that it returns `set[Node]`, so while the new class is + being created it will need to know how to parse the `"Node"` string type hint + into an object. We do this by providing the `Node` class in its `scope` dictionary + (it already knows what a `set` is because this is just a python built-in type): + + >>> from pyiron_workflow.topology import get_nodes_in_data_tree + >>> from pyiron_workflow.node import Node + >>> + >>> GetNodesInDataTree = to_function_node( + ... "GetNodesInDataTree", + ... get_nodes_in_data_tree, + ... "nodes_set", # Just a nice label for the output + ... scope={"Node": Node}, + ... ) + >>> + >>> print(GetNodesInDataTree.preview_io()) + {'inputs': {'node': (, NOT_DATA)}, 'outputs': {'nodes_set': set[pyiron_workflow.node.Node]}} + + + Args: + node_class_name (str): The name of the new class -- MUST be manually matched to + the variable name to which the class is being assigned, or the class won't + be importable. + node_function (Callable): The function to be wrapped by the node. + *output_labels (str): Optional labels for the function's output channels. + validate_output_labels (bool): Flag to indicate if output labels should be + validated against the return values in the function node source code. + Defaults to True. + use_cache (bool): Whether nodes of this type should default to caching their + values. (Default is True.) + + Returns: + type[Function]: A new node class subclassing :class:`Function`. + """ + # Inspect the caller's frame in order to extract the module where this is being used + frame = inspect.stack()[1] + module = inspect.getmodule(frame[0]) + node_class_module_name = module.__name__ if module else None + + function_node_factory.clear(node_class_name) # Force a fresh class + factory_made = function_node_factory( + node_class_name, + node_class_module_name, + node_function, + validate_output_labels, + use_cache, + *output_labels, + ) + factory_made._extra_type_hint_scope = scope + factory_made.preview_io() + return factory_made + + def function_node( node_function: Callable, *node_args, @@ -436,7 +531,7 @@ def function_node( validate_output_labels: bool = True, use_cache: bool = True, **node_kwargs, -): +) -> Function: """ Create and initialize a new instance of a :class:`Function` node. @@ -465,7 +560,12 @@ def function_node( output_labels = (output_labels,) function_node_factory.clear(node_function.__name__) # Force a fresh class factory_made = function_node_factory( - node_function, validate_output_labels, use_cache, *output_labels + node_function.__qualname__, + node_function.__module__, + node_function, + validate_output_labels, + use_cache, + *output_labels, ) factory_made.preview_io() return factory_made(*node_args, **node_kwargs) diff --git a/pyiron_workflow/nodes/transform.py b/pyiron_workflow/nodes/transform.py index 0b3de1167..28b886132 100644 --- a/pyiron_workflow/nodes/transform.py +++ b/pyiron_workflow/nodes/transform.py @@ -190,7 +190,7 @@ def _on_run(self, **inputs_to_value_dict): @classmethod def _build_inputs_preview(cls) -> dict[str, tuple[Any | None, Any | NotData]]: if isinstance(cls._input_specification, list): - return {key: (None, NOT_DATA) for key in cls._input_specification} + return dict.fromkeys(cls._input_specification, (None, NOT_DATA)) else: return cls._input_specification diff --git a/pyiron_workflow/storage.py b/pyiron_workflow/storage.py index 40cee293c..51b172247 100644 --- a/pyiron_workflow/storage.py +++ b/pyiron_workflow/storage.py @@ -231,14 +231,16 @@ def _save( if self._fallback(cloudpickle_fallback): attacks += [(self._CLOUDPICKLE, cloudpickle.dump)] - e = None + e: Exception | None = None for suffix, save_method in attacks: + e = None p = filename.with_suffix(suffix) try: with open(p, "wb") as filehandle: save_method(node, filehandle) return - except Exception: + except Exception as ee: + e = ee p.unlink(missing_ok=True) if e is not None: raise e diff --git a/pyproject.toml b/pyproject.toml index b6433fe29..b81bb6a4e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,6 +9,8 @@ requires = [ "typeguard", "setuptools", "versioneer[toml]==0.29", + "pyiron_snippets", + "pint", ] build-backend = "setuptools.build_meta" @@ -31,7 +33,7 @@ classifiers = [ dependencies = [ "bidict==0.23.1", "cloudpickle==3.1.1", - "executorlib==0.4.1", + "executorlib==0.4.2", "graphviz==0.20.3", "pandas==2.2.3", "pint==0.24.4", diff --git a/tests/integration/test_workflow.py b/tests/integration/test_workflow.py index fda6946e2..3339c6876 100644 --- a/tests/integration/test_workflow.py +++ b/tests/integration/test_workflow.py @@ -35,6 +35,17 @@ def Bar(x): return x * x +HISTORY: str = "" + + +@Workflow.wrap.as_function_node(use_cache=False) +def SideEffect(x): + y = x + 1 + global HISTORY # noqa: PLW0603 + HISTORY += f"{y}" + return y + + class TestWorkflow(unittest.TestCase): @classmethod def setUpClass(cls) -> None: @@ -332,6 +343,140 @@ def test_failure(self): f"{list(wf.as_path().iterdir()) if wf.as_path().is_dir() else None}", ) + def test_push_pull(self): + global HISTORY # noqa: PLW0603 + + wf = Workflow("push_pull") + wf.n1 = SideEffect(0) + wf.n2 = SideEffect(wf.n1) + wf.n3 = SideEffect(wf.n2) + + # Note that here we _FIRST RUN THE WORKFLOW_ + # This triggers the automatic construction of DAG signal connections + with self.subTest("Run parent"): + HISTORY = "" + wf() + self.assertEqual( + HISTORY, + "".join( + map( + str, + [ + wf.n1.outputs.y.value, + wf.n2.outputs.y.value, + wf.n3.outputs.y.value, + ], + ) + ), + msg="Expected all three to run", + ) + + with self.subTest("Pull"): + HISTORY = "" + wf.n2.pull() + self.assertEqual( + HISTORY, + "".join( + map( + str, + [ + wf.n1.outputs.y.value, + wf.n2.outputs.y.value, + ], + ) + ), + msg="Expected only upstream and this", + ) + + with self.subTest("Call"): + HISTORY = "" + wf.n2.__call__() + self.assertEqual( + HISTORY, + "".join( + map( + str, + [ + wf.n2.outputs.y.value, + ], + ) + ), + msg="Calling maps to a pull (+parent data tree)", # BUT IT DOESN'T?! + ) + + with self.subTest("Push"): + wf.n1.pull() + HISTORY = "" + wf.n2.run() + self.assertEqual( + HISTORY, + "".join( + map( + str, + [ + wf.n2.outputs.y.value, + wf.n3.outputs.y.value, + ], + ) + ), + msg="Expected only this and downstream", + ) + + def test_push_pull_with_unconfigured_workflows(self): + global HISTORY # noqa: PLW0603 + + wf = Workflow("push_pull") + wf.n1 = SideEffect(0) + wf.n2 = SideEffect(wf.n1) + wf.n3 = SideEffect(wf.n2) + + with self.subTest("Just run"): + self.assertListEqual( + [], + wf.n2.signals.output.ran.connections, + msg="Sanity check -- we have never run the workflow, so the parent " + "workflow has never had a chance to automatically configure its " + "execution flow.", + ) + wf.n1.pull() + HISTORY = "" + wf.n2.run() + self.assertEqual( + HISTORY, + "".join( + map( + str, + [ + wf.n2.outputs.y.value, + ], + ) + ), + msg="With no signals configured, we expect the run to go nowhere", + ) + + with self.subTest("Just run"): + self.assertListEqual( + [], + wf.n2.signals.output.ran.connections, + msg="Sanity check -- we have never run the workflow, so the parent " + "workflow has never had a chance to automatically configure its " + "execution flow.", + ) + wf.n1.pull() + HISTORY = "" + wf.n2.push() + self.assertEqual( + HISTORY, + "".join( + map( + str, + [wf.n2.outputs.y.value, wf.n3.outputs.y.value], + ) + ), + msg="Explicitly pushing should guarantee push-like behaviour even for " + "un-configured workflows.", + ) + if __name__ == "__main__": unittest.main() diff --git a/tests/unit/nodes/test_for_loop.py b/tests/unit/nodes/test_for_loop.py index 2efb5e659..b4da1d677 100644 --- a/tests/unit/nodes/test_for_loop.py +++ b/tests/unit/nodes/test_for_loop.py @@ -8,6 +8,7 @@ from pyiron_snippets.dotdict import DotDict from pyiron_workflow._tests import ensure_tests_in_python_path +from pyiron_workflow.mixin.run import ReadinessError from pyiron_workflow.nodes.for_loop import ( MapsToNonexistentOutputError, UnmappedConflictError, @@ -62,7 +63,7 @@ def test_valid_data_zipped_only(self): data = {"key1": [1, 2, 3], "key2": [4, 5]} zipped_keys = ("key1", "key2") expected_maps = tuple( - {key: idx for key in zipped_keys} + dict.fromkeys(zipped_keys, idx) for idx in range(min(len(data["key1"]), len(data["key2"]))) ) self.assertEqual( @@ -613,6 +614,38 @@ def test_executor_deserialization(self): finally: n.delete_storage() + def test_caching(self): + side_effect_counter = 0 + + @as_function_node("m") + def SideEffectNode(n: int): + nonlocal side_effect_counter + side_effect_counter += 1 + return n**2 + + n = [1, 2, 3, 4] + s = SideEffectNode.for_node(iter_on="n") + with self.assertRaises( + ReadinessError, + msg="Without input, we should raise a readiness error before we get to " + "building the body node", + ): + s() + + s.run(n=n) + self.assertEqual( + side_effect_counter, + len(n), + msg="Sanity check, it should have run once for each child node", + ) + s.run() + self.assertEqual( + side_effect_counter, + len(n), + msg="With identical input, children should only actually get run the first " + "time", + ) + if __name__ == "__main__": unittest.main() diff --git a/tests/unit/nodes/test_function.py b/tests/unit/nodes/test_function.py index 62e02cccc..8b536c9e1 100644 --- a/tests/unit/nodes/test_function.py +++ b/tests/unit/nodes/test_function.py @@ -2,10 +2,18 @@ import unittest from pathlib import Path +import numpy as np + from pyiron_workflow.channels import NOT_DATA from pyiron_workflow.io import ConnectionCopyError, ValueCopyError -from pyiron_workflow.nodes.function import Function, as_function_node, function_node +from pyiron_workflow.nodes.function import ( + Function, + as_function_node, + function_node, + to_function_node, +) from pyiron_workflow.nodes.multiple_distpatch import MultipleDispatchError +from pyiron_workflow.topology import get_nodes_in_data_tree def throw_error(x: int | None = None): @@ -539,6 +547,65 @@ def test_decoration(self): ): as_function_node(plus_one, "z") + def test_inline_creation(self): + with self.assertRaises( + ValueError, + msg="Known limitation: Can't have a bad signature, in this case '*args'", + ): + to_function_node("Sin", np.sin, "sinx") + + with self.assertRaises( + ValueError, + msg="Known limitation: Must be inspectable " + "https://github.com/numpy/numpy/issues/16384, " + "https://github.com/numpy/numpy/issues/8734", + ): + to_function_node("ARange", np.arange, "arange") + + with self.subTest("Non-builtin type hints should be ok via a scoping kwarg"): + from pyiron_workflow.node import Node as NonBuiltinTypeHint + + GetNodes = to_function_node( + "GetNodes", + get_nodes_in_data_tree, + "node_set", + scope={"Node": NonBuiltinTypeHint}, + ) + self.assertEqual( + set[NonBuiltinTypeHint], + GetNodes.preview_outputs()["node_set"], + msg="Although non-builtin type hints are not normally accessible to " + "the signature inspection, we should be able to provide them", + ) + self.assertEqual( + "Node", + GetNodes.preview_outputs()["node_set"].__args__[0].__name__, + msg="Sanity check: it shouldn't matter what we import it under, we are " + "providing the right class to the new node subclass.", + ) + + output_label = "trapz" + Trapz = to_function_node("Trapz", np.trapz, output_label) + self.assertIs( + np.trapz, + Trapz.node_function, + msg="We should be wrapping the requested function", + ) + self.assertEqual( + output_label, + list(Trapz.preview_outputs().keys())[0], + msg="We should be able to name the output however we want", + ) + + n = Trapz() + out = n(np.linspace(0, 1, 10)) + reloaded = pickle.loads(pickle.dumps(n)) + self.assertAlmostEqual( + out, + reloaded.outputs.trapz.value, + msg="Save load cycle should work fine like for any other node", + ) + if __name__ == "__main__": unittest.main() diff --git a/tests/unit/nodes/test_transform.py b/tests/unit/nodes/test_transform.py index d3401ee9c..d7a762b37 100644 --- a/tests/unit/nodes/test_transform.py +++ b/tests/unit/nodes/test_transform.py @@ -65,7 +65,7 @@ def test_inputs_to_dict(self): d = {"c1": 4, "c2": 5} default = 42 hint = int - spec = {k: (int, default) for k in d} + spec = dict.fromkeys(d, (int, default)) n = inputs_to_dict(spec, autorun=True) self.assertIs( n.inputs[list(d.keys())[0]].type_hint, @@ -73,7 +73,7 @@ def test_inputs_to_dict(self): msg="Spot check hint recognition", ) self.assertDictEqual( - {k: default for k in d}, + dict.fromkeys(d, default), n.outputs.dict.value, msg="Verify structure and ability to pass defaults", ) diff --git a/tests/unit/test_storage.py b/tests/unit/test_storage.py index 0883bd3e0..9c396bbe3 100644 --- a/tests/unit/test_storage.py +++ b/tests/unit/test_storage.py @@ -2,6 +2,9 @@ from pathlib import Path from tempfile import TemporaryDirectory +import cloudpickle +from pint import UnitRegistry + from pyiron_workflow.nodes.function import as_function_node from pyiron_workflow.nodes.standard import UserInput from pyiron_workflow.storage import PickleStorage, TypeNotFoundError, available_backends @@ -134,6 +137,20 @@ def Unimportable(x): finally: interface.delete(node=u, cloudpickle_fallback=True) + def test_uncloudpickleable(self): + ureg = UnitRegistry() + with self.assertRaises( + TypeError, msg="Sanity check that this can't even be cloudpickled" + ): + cloudpickle.dumps(ureg) + + interface = PickleStorage(cloudpickle_fallback=True) + n = UserInput(ureg, label="uncloudpicklable_node") + with self.assertRaises( + TypeError, msg="Exception should be caught and saving should fail" + ): + interface.save(n) + if __name__ == "__main__": unittest.main()