Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion airflow/providers/MANAGING_PROVIDERS_LIFECYCLE.rst
Original file line number Diff line number Diff line change
Expand Up @@ -454,7 +454,7 @@ If you have pre-commit installed, pre-commit will be run automatically on commit
manually after commit, you can run it via ``breeze static-checks --last-commit`` some of the tests might fail
because suspension of the provider might cause changes in the dependencies, so if you see errors about
missing dependencies imports, non-usable classes etc., you will need to build the CI image locally
via ``breeze build-image --python 3.8 --upgrade-to-newer-dependencies`` after the first pre-commit run
via ``breeze build-image --python 3.9 --upgrade-to-newer-dependencies`` after the first pre-commit run
and then run the static checks again.

If you want to be absolutely sure to run all static checks you can always do this via
Expand Down
3 changes: 1 addition & 2 deletions airflow/providers/amazon/aws/utils/mixins.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,11 @@

from __future__ import annotations

from functools import cached_property
from functools import cache, cached_property
from typing import Any, Generic, NamedTuple, TypeVar

from typing_extensions import final

from airflow.compat.functools import cache
from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook

AwsHookType = TypeVar("AwsHookType", bound=AwsGenericHook)
Expand Down
3 changes: 1 addition & 2 deletions airflow/providers/cloudant/provider.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -51,10 +51,9 @@ dependencies:

excluded-python-versions:
# ibmcloudant transitively brings in urllib3 2.x, but the snowflake provider has a dependency that pins
# urllib3 to 1.x on Python 3.8 and 3.9; thus we exclude those Python versions from taking the update
# urllib3 to 1.x on Python 3.9; thus we exclude those Python versions from taking the update
# to ibmcloudant.
# See #21004, #41555, and https://github.com/snowflakedb/snowflake-connector-python/issues/2016
- "3.8"
- "3.9"

integrations:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,14 @@
import logging
import secrets
import string
from functools import cache
from typing import TYPE_CHECKING

import pendulum
from deprecated import deprecated
from kubernetes.client.rest import ApiException
from slugify import slugify

from airflow.compat.functools import cache
from airflow.configuration import conf
from airflow.exceptions import AirflowProviderDeprecationWarning

Expand Down
21 changes: 3 additions & 18 deletions airflow/providers/common/io/xcom/backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,13 @@
import contextlib
import json
import uuid
from functools import cache
from pathlib import Path
from typing import TYPE_CHECKING, Any, TypeVar
from urllib.parse import urlsplit

import fsspec.utils

from airflow.compat.functools import cache
from airflow.configuration import conf
from airflow.io.path import ObjectStoragePath
from airflow.models.xcom import BaseXCom
Expand All @@ -40,22 +41,6 @@
SECTION = "common.io"


def _is_relative_to(o: ObjectStoragePath, other: ObjectStoragePath) -> bool:
"""
Return whether or not this path is relative to the other path.

This is a port of the pathlib.Path.is_relative_to method. It is not available in python 3.8.
"""
if hasattr(o, "is_relative_to"):
return o.is_relative_to(other)

try:
o.relative_to(other)
return True
except ValueError:
return False


def _get_compression_suffix(compression: str) -> str:
"""
Return the compression suffix for the given compression.
Expand Down Expand Up @@ -111,7 +96,7 @@ def _get_full_path(data: str) -> ObjectStoragePath:
raise TypeError(f"Not a valid url: {data}") from None

if url.scheme:
if not _is_relative_to(ObjectStoragePath(data), p):
if not Path.is_relative_to(ObjectStoragePath(data), p):
raise ValueError(f"Invalid key: {data}")
return p / data.replace(str(p), "", 1).lstrip("/")

Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/openlineage/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def decorator(func):

cache = decorator
else:
from airflow.compat.functools import cache
from functools import cache
from airflow.configuration import conf

_CONFIG_SECTION = "openlineage"
Expand Down
6 changes: 3 additions & 3 deletions docs/apache-airflow-providers-amazon/executors/general.rst
Original file line number Diff line number Diff line change
Expand Up @@ -138,9 +138,9 @@ which is running the Airflow scheduler process (and thus, the |executorName|
executor.) Apache Airflow images with specific python versions can be
downloaded from the Dockerhub registry, and filtering tags by the
`python
version <https://hub.docker.com/r/apache/airflow/tags?page=1&name=3.8>`__.
For example, the tag ``latest-python3.8`` specifies that the image will
have python 3.8 installed.
version <https://hub.docker.com/r/apache/airflow/tags?page=1&name=3.9>`__.
For example, the tag ``latest-python3.9`` specifies that the image will
have python 3.9 installed.


Loading DAGs
Expand Down
1 change: 0 additions & 1 deletion generated/provider_dependencies.json
Original file line number Diff line number Diff line change
Expand Up @@ -359,7 +359,6 @@
"plugins": [],
"cross-providers-deps": [],
"excluded-python-versions": [
"3.8",
"3.9"
],
"state": "ready"
Expand Down