diff --git a/Dockerfile b/Dockerfile index a9bc0c9a006b..11aad19a0747 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1406,7 +1406,7 @@ COPY --from=scripts install_from_docker_context_files.sh install_airflow.sh \ # an incorrect architecture. ARG TARGETARCH # Value to be able to easily change cache id and therefore use a bare new cache -ARG PIP_CACHE_EPOCH="0" +ARG PIP_CACHE_EPOCH="9" # hadolint ignore=SC2086, SC2010, DL3042 RUN --mount=type=cache,id=$PYTHON_BASE_IMAGE-$AIRFLOW_PIP_VERSION-$TARGETARCH-$PIP_CACHE_EPOCH,target=/tmp/.cache/pip,uid=${AIRFLOW_UID} \ diff --git a/Dockerfile.ci b/Dockerfile.ci index 05f438a8f068..a7a3396925e0 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -1072,7 +1072,7 @@ ARG AIRFLOW_CONSTRAINTS_LOCATION="" ARG DEFAULT_CONSTRAINTS_BRANCH="constraints-main" # By changing the epoch we can force reinstalling Airflow and pip all dependencies # It can also be overwritten manually by setting the AIRFLOW_CI_BUILD_EPOCH environment variable. -ARG AIRFLOW_CI_BUILD_EPOCH="6" +ARG AIRFLOW_CI_BUILD_EPOCH="7" ARG AIRFLOW_PRE_CACHED_PIP_PACKAGES="true" ARG AIRFLOW_PIP_VERSION=24.0 # Setup PIP diff --git a/airflow/providers/databricks/provider.yaml b/airflow/providers/databricks/provider.yaml index 60a4320ed970..a9c4deca9976 100644 --- a/airflow/providers/databricks/provider.yaml +++ b/airflow/providers/databricks/provider.yaml @@ -61,7 +61,7 @@ versions: dependencies: - apache-airflow>=2.6.0 - apache-airflow-providers-common-sql>=1.10.0 - - requests>=2.27,<3 + - requests>=2.27.0,<3 # The connector 2.9.0 released on Aug 10, 2023 has a bug that it does not properly declare urllib3 and # it needs to be excluded. See https://github.com/databricks/databricks-sql-python/issues/190 # The 2.9.1 (to be released soon) already contains the fix diff --git a/airflow/providers/http/provider.yaml b/airflow/providers/http/provider.yaml index 6b823aaa035e..d10e0abb19c5 100644 --- a/airflow/providers/http/provider.yaml +++ b/airflow/providers/http/provider.yaml @@ -55,7 +55,7 @@ dependencies: - apache-airflow>=2.6.0 # The 2.26.0 release of requests got rid of the chardet LGPL mandatory dependency, allowing us to # release it as a requirement for airflow - - requests>=2.26.0 + - requests>=2.27.0,<3 - requests_toolbelt - aiohttp>=3.9.2 - asgiref diff --git a/airflow/providers/influxdb/provider.yaml b/airflow/providers/influxdb/provider.yaml index 6820b4cfeb5c..097bed145f1b 100644 --- a/airflow/providers/influxdb/provider.yaml +++ b/airflow/providers/influxdb/provider.yaml @@ -26,7 +26,7 @@ description: | dependencies: - apache-airflow>=2.6.0 - influxdb-client>=1.19.0 - - requests>=2.26.0 + - requests>=2.27.0,<3 state: ready source-date-epoch: 1703288143 diff --git a/generated/provider_dependencies.json b/generated/provider_dependencies.json index e893ac591b64..158832b0abb8 100644 --- a/generated/provider_dependencies.json +++ b/generated/provider_dependencies.json @@ -362,7 +362,7 @@ "apache-airflow-providers-common-sql>=1.10.0", "apache-airflow>=2.6.0", "databricks-sql-connector>=2.0.0, <3.0.0, !=2.9.0", - "requests>=2.27,<3" + "requests>=2.27.0,<3" ], "devel-deps": [ "deltalake>=0.12.0" @@ -618,7 +618,7 @@ "aiohttp>=3.9.2", "apache-airflow>=2.6.0", "asgiref", - "requests>=2.26.0", + "requests>=2.27.0,<3", "requests_toolbelt" ], "devel-deps": [], @@ -639,7 +639,7 @@ "deps": [ "apache-airflow>=2.6.0", "influxdb-client>=1.19.0", - "requests>=2.26.0" + "requests>=2.27.0,<3" ], "devel-deps": [], "cross-providers-deps": [], diff --git a/pyproject.toml b/pyproject.toml index 2173176b5855..eb6f6b8322a3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -129,6 +129,8 @@ dependencies = [ "python-dateutil>=2.3", "python-nvd3>=0.15.0", "python-slugify>=5.0", + # Requests 3 if it will be released, will be heavily breaking. + "requests>=2.27.0,<3", "rfc3339-validator>=0.1.4", "rich-argparse>=1.0.0", "rich>=12.4.4", @@ -185,7 +187,7 @@ cgroups = [ "cgroupspy>=0.2.2", ] deprecated-api = [ - "requests>=2.26.0", + "requests>=2.27.0,<3", ] github-enterprise = [ "apache-airflow[fab]", @@ -340,7 +342,6 @@ devel-tests = [ "coverage>=7.2", "pytest-asyncio>=0.23.3", "pytest-cov>=4.1.0", - "pytest-httpx>=0.21.3", "pytest-icdiff>=0.9", "pytest-instafail>=0.5.0", "pytest-mock>=3.12.0", @@ -654,7 +655,7 @@ databricks = [ # source: airflow/providers/databricks/provider.yaml "aiohttp>=3.9.2, <4", "apache-airflow[common_sql]", "databricks-sql-connector>=2.0.0, <3.0.0, !=2.9.0", - "requests>=2.27,<3", + "requests>=2.27.0,<3", # Devel dependencies for the databricks provider "deltalake>=0.12.0", ] @@ -769,13 +770,13 @@ hashicorp = [ # source: airflow/providers/hashicorp/provider.yaml http = [ # source: airflow/providers/http/provider.yaml "aiohttp>=3.9.2", "asgiref", - "requests>=2.26.0", + "requests>=2.27.0,<3", "requests_toolbelt", ] imap = [] # source: airflow/providers/imap/provider.yaml influxdb = [ # source: airflow/providers/influxdb/provider.yaml "influxdb-client>=1.19.0", - "requests>=2.26.0", + "requests>=2.27.0,<3", ] jdbc = [ # source: airflow/providers/jdbc/provider.yaml "apache-airflow[common_sql]", diff --git a/tests/cli/commands/test_info_command.py b/tests/cli/commands/test_info_command.py index 74426f1225d2..9670f3de6a37 100644 --- a/tests/cli/commands/test_info_command.py +++ b/tests/cli/commands/test_info_command.py @@ -21,7 +21,9 @@ import logging import os from io import StringIO +from unittest import mock +import httpx import pytest from rich.console import Console @@ -181,18 +183,17 @@ class TestInfoCommandMockHttpx: ("database", "sql_alchemy_conn"): "postgresql+psycopg2://postgres:airflow@postgres/airflow", } ) - def test_show_info_anonymize_fileio(self, httpx_mock, setup_parser): - httpx_mock.add_response( - url="https://file.io", - method="post", - json={ - "success": True, - "key": "f9U3zs3I", - "link": "https://file.io/TEST", - "expiry": "14 days", - }, - status_code=200, - ) - with contextlib.redirect_stdout(StringIO()) as stdout: - info_command.show_info(setup_parser.parse_args(["info", "--file-io"])) - assert "https://file.io/TEST" in stdout.getvalue() + def test_show_info_anonymize_fileio(self, setup_parser): + with mock.patch("airflow.cli.commands.info_command.httpx.post") as post: + post.return_value = httpx.Response( + status_code=200, + json={ + "success": True, + "key": "f9U3zs3I", + "link": "https://file.io/TEST", + "expiry": "14 days", + }, + ) + with contextlib.redirect_stdout(StringIO()) as stdout: + info_command.show_info(setup_parser.parse_args(["info", "--file-io", "--anonymize"])) + assert "https://file.io/TEST" in stdout.getvalue()