Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions airflow/models/backfill.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,7 @@

from airflow.api_connexion.exceptions import Conflict, NotFound
from airflow.exceptions import AirflowException
from airflow.models import DagRun
from airflow.models.base import Base, StringID
from airflow.models.serialized_dag import SerializedDagModel
from airflow.settings import json
from airflow.utils import timezone
from airflow.utils.session import create_session
Expand Down Expand Up @@ -129,6 +127,8 @@ def _create_backfill(
reverse: bool,
dag_run_conf: dict | None,
) -> Backfill | None:
from airflow.models.serialized_dag import SerializedDagModel

with create_session() as session:
serdag = session.get(SerializedDagModel, dag_id)
if not serdag:
Expand Down Expand Up @@ -215,6 +215,8 @@ def _cancel_backfill(backfill_id) -> Backfill:

session.commit()

from airflow.models import DagRun

# now, let's mark all queued dag runs as failed
query = (
update(DagRun)
Expand Down
3 changes: 2 additions & 1 deletion airflow/models/dagrun.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@
from airflow.listeners.listener import get_listener_manager
from airflow.models import Log
from airflow.models.abstractoperator import NotMapped
from airflow.models.backfill import Backfill
from airflow.models.base import Base, StringID
from airflow.models.expandinput import NotFullyPopulated
from airflow.models.taskinstance import TaskInstance as TI
Expand Down Expand Up @@ -207,7 +208,7 @@ class DagRun(Base, LoggingMixin):
uselist=False,
cascade="all, delete, delete-orphan",
)
backfill = relationship("Backfill", uselist=False)
backfill = relationship(Backfill, uselist=False)
backfill_max_active_runs = association_proxy("backfill", "max_active_runs")
max_active_runs = association_proxy("dag_model", "max_active_runs")

Expand Down
4 changes: 2 additions & 2 deletions tests/cli/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,9 @@

import pytest

from airflow import models
from airflow.cli import cli_parser
from airflow.executors import local_executor
from airflow.models.dagbag import DagBag
from airflow.providers.celery.executors import celery_executor, celery_kubernetes_executor
from airflow.providers.cncf.kubernetes.executors import kubernetes_executor, local_kubernetes_executor
from tests.test_utils.config import conf_vars
Expand Down Expand Up @@ -56,7 +56,7 @@ def load_examples():

@pytest.fixture(scope="session")
def dagbag():
return models.DagBag(include_examples=True)
return DagBag(include_examples=True)


@pytest.fixture(scope="session")
Expand Down
6 changes: 0 additions & 6 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -412,12 +412,6 @@ def initialize_airflow_tests(request):
"Skipping initializing of the DB as it was initialized already.\n"
"You can re-initialize the database by adding --with-db-init flag when running tests."
)
else:
# if we are not initializing the database (due to skip db tests)
# we need to ensure Backfill is defined before DagRun
# otherwise we get this error:
# "sqlalchemy.exc.InvalidRequestError: When initializing mapper mapped class..."
from airflow.models.backfill import Backfill # noqa: F401
integration_kerberos = os.environ.get("INTEGRATION_KERBEROS")
if integration_kerberos == "true":
# Initialize kerberos
Expand Down