Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion airflow/serialization/serialized_objects.py
Original file line number Diff line number Diff line change
Expand Up @@ -1392,7 +1392,10 @@ def populate_operator(cls, op: Operator, encoded_op: dict[str, Any]) -> None:
elif k == "downstream_task_ids":
v = set(v)
elif k in {"retry_delay", "execution_timeout", "max_retry_delay"}:
v = cls._deserialize_timedelta(v)
# If operator's execution_timeout is None and core.default_task_execution_timeout is not None,
# v will be None so do not deserialize into timedelta
if v is not None:
v = cls._deserialize_timedelta(v)
elif k in encoded_op["template_fields"]:
pass
elif k == "resources":
Expand Down
32 changes: 32 additions & 0 deletions tests/serialization/test_dag_serialization.py
Original file line number Diff line number Diff line change
Expand Up @@ -2665,6 +2665,38 @@ def test_task_resources_serde():
}


@pytest.fixture(params=[None, timedelta(hours=1)])
def default_task_execution_timeout(request):
"""
Mock setting core.default_task_execution_timeout in airflow.cfg.
"""
from airflow.serialization.serialized_objects import SerializedBaseOperator

DEFAULT_TASK_EXECUTION_TIMEOUT = request.param
with mock.patch.dict(
SerializedBaseOperator._CONSTRUCTOR_PARAMS, {"execution_timeout": DEFAULT_TASK_EXECUTION_TIMEOUT}
):
yield DEFAULT_TASK_EXECUTION_TIMEOUT


@pytest.mark.parametrize("execution_timeout", [None, timedelta(hours=1)])
def test_task_execution_timeout_serde(execution_timeout, default_task_execution_timeout):
"""
Test task execution_timeout serialization/deserialization.
"""
from airflow.providers.standard.operators.empty import EmptyOperator

with DAG("test_task_execution_timeout", schedule=None, start_date=datetime(2020, 1, 1)) as _:
task = EmptyOperator(task_id="task1", execution_timeout=execution_timeout)

serialized = BaseSerialization.serialize(task)
if execution_timeout != default_task_execution_timeout:
assert "execution_timeout" in serialized["__var"]

deserialized = BaseSerialization.deserialize(serialized)
assert deserialized.execution_timeout == task.execution_timeout


def test_taskflow_expand_serde():
from airflow.decorators import task
from airflow.models.xcom_arg import XComArg
Expand Down
Loading