Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion airflow-core/tests/unit/cli/commands/test_dag_command.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@
from airflow.cli.commands import dag_command
from airflow.exceptions import AirflowException
from airflow.models import DagBag, DagModel, DagRun
from airflow.models.baseoperator import BaseOperator
from airflow.models.serialized_dag import SerializedDagModel
from airflow.providers.standard.triggers.temporal import DateTimeTrigger, TimeDeltaTrigger
from airflow.sdk import task
Expand All @@ -57,6 +56,11 @@
)
from unit.models import TEST_DAGS_FOLDER

try:
from airflow.sdk import BaseOperator
except ImportError:
from airflow.models.baseoperator import BaseOperator # type: ignore[no-redef]

DEFAULT_DATE = timezone.make_aware(datetime(2015, 1, 1), timezone=timezone.utc)
if pendulum.__version__.startswith("3"):
DEFAULT_DATE_REPR = DEFAULT_DATE.isoformat(sep=" ")
Expand Down
2 changes: 1 addition & 1 deletion airflow-core/tests/unit/cluster_policies/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@

from airflow.configuration import conf
from airflow.exceptions import AirflowClusterPolicySkipDag, AirflowClusterPolicyViolation
from airflow.sdk.bases.operator import BaseOperator
from airflow.sdk import BaseOperator

if TYPE_CHECKING:
from airflow.models.dag import DAG
Expand Down
8 changes: 4 additions & 4 deletions devel-common/src/tests_common/test_utils/mock_operators.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,17 +21,17 @@

import attr

from airflow.models.baseoperator import BaseOperator

from tests_common.test_utils.compat import BaseOperatorLink
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS

if TYPE_CHECKING:
from airflow.sdk.definitions.context import Context

if AIRFLOW_V_3_0_PLUS:
try:
from airflow.models.xcom import XComModel as XCom
else:
from airflow.sdk import BaseOperator
except ImportError:
from airflow.models.baseoperator import BaseOperator # type: ignore[no-redef]
from airflow.models.xcom import XCom # type: ignore[no-redef]


Expand Down
6 changes: 6 additions & 0 deletions devel-common/src/tests_common/test_utils/version_compat.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,12 +37,16 @@ def get_base_airflow_version_tuple() -> tuple[int, int, int]:
AIRFLOW_V_3_0_3_PLUS = get_base_airflow_version_tuple() >= (3, 0, 3)
AIRFLOW_V_3_1_PLUS = get_base_airflow_version_tuple() >= (3, 1, 0)


if AIRFLOW_V_3_1_PLUS:
from airflow.sdk import PokeReturnValue, timezone
from airflow.sdk.bases.xcom import BaseXCom
from airflow.sdk.definitions._internal.decorators import remove_task_decorator

XCOM_RETURN_KEY = BaseXCom.XCOM_RETURN_KEY
else:
from airflow.sensors.base import PokeReturnValue # type: ignore[no-redef]
from airflow.utils import timezone # type: ignore[attr-defined,no-redef]
from airflow.utils.decorators import remove_task_decorator # type: ignore[no-redef]
from airflow.utils.xcom import XCOM_RETURN_KEY # type: ignore[no-redef]

Expand All @@ -66,5 +70,7 @@ def get_sqlalchemy_version_tuple() -> tuple[int, int, int]:
"SQLALCHEMY_V_1_4",
"SQLALCHEMY_V_2_0",
"XCOM_RETURN_KEY",
"PokeReturnValue",
"remove_task_decorator",
"timezone",
]
3 changes: 2 additions & 1 deletion performance/tests/test_performance_dag.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,10 @@

import json
import os
import re

import pytest
import re

from airflow.configuration import conf
from airflow.models import DagBag
from airflow.utils.trigger_rule import TriggerRule
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,8 +70,7 @@

from airflow.models import TaskInstance
from airflow.providers.common.compat.assets import Asset
from airflow.sdk import DAG
from airflow.sdk.bases.operator import BaseOperator
from airflow.sdk import DAG, BaseOperator
from airflow.sdk.definitions.mappedoperator import MappedOperator
from airflow.sdk.execution_time.secrets_masker import (
Redactable,
Expand All @@ -83,8 +82,7 @@
from airflow.utils.state import DagRunState, TaskInstanceState
else:
try:
from airflow.sdk import DAG
from airflow.sdk.bases.operator import BaseOperator
from airflow.sdk import DAG, BaseOperator
from airflow.sdk.definitions.mappedoperator import MappedOperator
except ImportError:
from airflow.models import DAG, BaseOperator, MappedOperator
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,9 +69,8 @@ def hook_lineage_collector():


if AIRFLOW_V_3_0_PLUS:
from airflow.sdk import ObjectStoragePath
from airflow.sdk import BaseOperator, ObjectStoragePath
from airflow.sdk.api.datamodels._generated import TaskInstance as SDKTaskInstance
from airflow.sdk.bases.operator import BaseOperator
from airflow.sdk.execution_time import task_runner
from airflow.sdk.execution_time.comms import StartupDetails
from airflow.sdk.execution_time.task_runner import RuntimeTaskInstance, parse
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ def test_lineage_parent_id(mock_run_id):
@pytest.mark.skipif(not AIRFLOW_V_3_0_PLUS, reason="Test only for Airflow 3.0+")
def test_lineage_root_run_id_with_runtime_task_instance(create_runtime_ti):
"""Test lineage_root_run_id with real RuntimeTaskInstance object doesn't throw AttributeError."""
from airflow.sdk.bases.operator import BaseOperator
from airflow.sdk import BaseOperator

task = BaseOperator(task_id="test_task")

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,11 @@
from airflow.providers.standard.operators.empty import EmptyOperator
from airflow.providers.standard.operators.trigger_dagrun import TriggerDagRunOperator
from airflow.providers.standard.sensors.external_task import ExternalTaskSensor
from airflow.utils.timezone import datetime

try:
from airflow.sdk.timezone import datetime
except ImportError:
from airflow.utils.timezone import datetime # type: ignore[no-redef]

with DAG(
dag_id="example_external_task",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,10 @@ class StartTriggerArgs: # type: ignore[no-redef]
timeout: datetime.timedelta | None = None


from airflow.utils import timezone
try:
from airflow.sdk import timezone
except ImportError:
from airflow.utils import timezone # type: ignore[attr-defined,no-redef]

if TYPE_CHECKING:
try:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,8 @@
from airflow import macros
from airflow.models.dag import DAG
from airflow.providers.standard.sensors.date_time import DateTimeSensor
from airflow.utils import timezone

from tests_common.test_utils.version_compat import timezone

DEFAULT_DATE = timezone.datetime(2015, 1, 1)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@

from airflow.exceptions import AirflowSensorTimeout
from airflow.providers.standard.sensors.python import PythonSensor
from airflow.sensors.base import PokeReturnValue

from tests_common.test_utils.version_compat import PokeReturnValue
from unit.standard.operators.test_python import BasePythonTest

pytestmark = pytest.mark.db_test
Expand Down
27 changes: 12 additions & 15 deletions providers/standard/tests/unit/standard/sensors/test_time_delta.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

from datetime import timedelta
from typing import Any
from unittest import mock

import pendulum
import pytest
Expand All @@ -34,16 +33,14 @@
WaitSensor,
)
from airflow.providers.standard.triggers.temporal import DateTimeTrigger
from airflow.utils import timezone
from airflow.utils.timezone import datetime
from airflow.utils.types import DagRunType

from tests_common.test_utils import db
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS, timezone

pytestmark = pytest.mark.db_test

DEFAULT_DATE = datetime(2015, 1, 1)
DEFAULT_DATE = timezone.datetime(2015, 1, 1)
DEV_NULL = "/dev/null"
TEST_DAG_ID = "unit_tests"

Expand All @@ -64,13 +61,13 @@ def setup_method(self):
self.dagbag = DagBag(dag_folder=DEV_NULL, include_examples=False)
self.dag = DAG(TEST_DAG_ID, schedule=timedelta(days=1), start_date=DEFAULT_DATE)

def test_timedelta_sensor(self):
def test_timedelta_sensor(self, mocker):
op = TimeDeltaSensor(task_id="timedelta_sensor_check", delta=timedelta(seconds=2), dag=self.dag)
op.execute({"dag_run": mock.MagicMock(run_after=DEFAULT_DATE), "data_interval_end": DEFAULT_DATE})
op.execute({"dag_run": mocker.MagicMock(run_after=DEFAULT_DATE), "data_interval_end": DEFAULT_DATE})


@pytest.mark.parametrize(
"run_after, interval_end",
("run_after", "interval_end"),
[
(timezone.utcnow() + timedelta(days=1), timezone.utcnow() + timedelta(days=2)),
(timezone.utcnow() + timedelta(days=1), None),
Expand Down Expand Up @@ -108,7 +105,7 @@ def test_timedelta_sensor_run_after_vs_interval(run_after, interval_end, dag_mak


@pytest.mark.parametrize(
"run_after, interval_end",
("run_after", "interval_end"),
[
(timezone.utcnow() + timedelta(days=1), timezone.utcnow() + timedelta(days=2)),
(timezone.utcnow() + timedelta(days=1), None),
Expand Down Expand Up @@ -168,8 +165,8 @@ def setup_method(self):
"should_defer",
[False, True],
)
@mock.patch(DEFER_PATH)
def test_timedelta_sensor(self, defer_mock, should_defer):
def test_timedelta_sensor(self, mocker, should_defer):
defer_mock = mocker.patch(DEFER_PATH)
delta = timedelta(hours=1)
with pytest.warns(AirflowProviderDeprecationWarning):
op = TimeDeltaSensorAsync(task_id="timedelta_sensor_check", delta=delta, dag=self.dag)
Expand All @@ -187,9 +184,9 @@ def test_timedelta_sensor(self, defer_mock, should_defer):
"should_defer",
[False, True],
)
@mock.patch(DEFER_PATH)
@mock.patch("airflow.providers.standard.sensors.time_delta.sleep")
def test_wait_sensor(self, sleep_mock, defer_mock, should_defer):
def test_wait_sensor(self, mocker, should_defer):
defer_mock = mocker.patch(DEFER_PATH)
sleep_mock = mocker.patch("airflow.providers.standard.sensors.time_delta.sleep")
wait_time = timedelta(seconds=30)
op = WaitSensor(
task_id="wait_sensor_check", time_to_wait=wait_time, dag=self.dag, deferrable=should_defer
Expand All @@ -203,7 +200,7 @@ def test_wait_sensor(self, sleep_mock, defer_mock, should_defer):
sleep_mock.assert_called_once_with(30)

@pytest.mark.parametrize(
"run_after, interval_end",
("run_after", "interval_end"),
[
(timezone.utcnow() + timedelta(days=1), timezone.utcnow() + timedelta(days=2)),
(timezone.utcnow() + timedelta(days=1), None),
Expand Down
10 changes: 4 additions & 6 deletions providers/standard/tests/unit/standard/sensors/test_weekday.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,18 +26,16 @@
from airflow.models.dag import DAG
from airflow.providers.standard.sensors.weekday import DayOfWeekSensor
from airflow.providers.standard.utils.weekday import WeekDay
from airflow.utils import timezone
from airflow.utils.timezone import datetime

from tests_common.test_utils import db
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS, timezone

pytestmark = pytest.mark.db_test


DEFAULT_DATE = datetime(2018, 12, 10)
WEEKDAY_DATE = datetime(2018, 12, 20)
WEEKEND_DATE = datetime(2018, 12, 22)
DEFAULT_DATE = timezone.datetime(2018, 12, 10)
WEEKDAY_DATE = timezone.datetime(2018, 12, 20)
WEEKEND_DATE = timezone.datetime(2018, 12, 22)
TEST_DAG_ID = "weekday_sensor_dag"
DEV_NULL = "/dev/null"
TEST_CASE_WEEKDAY_SENSOR_TRUE = {
Expand Down
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -754,6 +754,7 @@ testing = ["dev", "providers.tests", "tests_common", "tests", "system", "unit",
"kubernetes-tests/*" = ["D", "TID253", "S101", "TRY002"]
"helm-tests/*" = ["D", "TID253", "S101", "TRY002"]
"providers/**/tests/*" = ["D", "TID253", "S101", "TRY002"]
"performance/tests/*" = ["S101"]

# All of the modules which have an extra license header (i.e. that we copy from another project) need to
# ignore E402 -- module level import not at top level
Expand Down
Loading