Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion airflow-core/docs/howto/docker-compose/docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ x-airflow-common:
AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow
AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://airflow:airflow@postgres/airflow
AIRFLOW__CELERY__BROKER_URL: redis://:@redis:6379/0
AIRFLOW__CORE__FERNET_KEY: ''
AIRFLOW__CORE__FERNET_KEY: ${FERNET_KEY}
AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'true'
AIRFLOW__CORE__LOAD_EXAMPLES: 'true'
AIRFLOW__CORE__EXECUTION_API_SERVER_URL: 'http://airflow-apiserver:8080/execution/'
Expand Down
9 changes: 8 additions & 1 deletion airflow-ctl-tests/tests/airflowctl_tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,13 +30,15 @@
DOCKER_IMAGE,
)

from tests_common.test_utils.fernet import generate_fernet_key_string

docker_client = None


# Pytest hook to run at the start of the session
def pytest_sessionstart(session):
"""Install airflowctl at the very start of the pytest session."""
airflow_ctl_version = os.environ.get("AIRFLOW_CTL_VERSION", "1.0.0")
airflow_ctl_version = os.environ.get("AIRFLOW_CTL_VERSION", "0.1.0")
console.print(f"[yellow]Installing apache-airflow-ctl=={airflow_ctl_version} via pytest_sessionstart...")

airflow_ctl_path = AIRFLOW_ROOT_PATH / "airflow-ctl"
Expand Down Expand Up @@ -165,6 +167,11 @@ def docker_compose_up(tmp_path_factory):
os.environ["AIRFLOW_IMAGE_NAME"] = DOCKER_IMAGE
os.environ["AIRFLOW_CTL_VERSION"] = os.environ.get("AIRFLOW_CTL_VERSION", "1.0.0")
os.environ["ENV_FILE_PATH"] = str(tmp_dir / ".env")
#
# Please Do not use this Fernet key in any deployments! Please generate your own key.
# This is specifically generated for integration tests and not as default.
#
os.environ["FERNET_KEY"] = generate_fernet_key_string()

# Initialize Docker client
docker_client = DockerClient(compose_files=[str(tmp_docker_compose_file)])
Expand Down
8 changes: 8 additions & 0 deletions airflow-e2e-tests/tests/airflow_e2e_tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,8 @@
TEST_REPORT_FILE,
)

from tests_common.test_utils.fernet import generate_fernet_key_string

console = Console(width=400, color_system="standard")
compose_instance = None
airflow_logs_path = None
Expand Down Expand Up @@ -94,6 +96,12 @@ def spin_up_airflow_environment(tmp_path_factory):
compose_file_names.append("localstack.yml")
_setup_s3_integration(dot_env_file, tmp_dir)

#
# Please Do not use this Fernet key in any deployments! Please generate your own key.
# This is specifically generated for integration tests and not as default.
#
os.environ["FERNET_KEY"] = generate_fernet_key_string()

# If we are using the image from ghcr.io/apache/airflow/main we do not pull
# as it is already available and loaded using prepare_breeze_and_image step in workflow
pull = False if DOCKER_IMAGE.startswith("ghcr.io/apache/airflow/main/") else True
Expand Down
27 changes: 27 additions & 0 deletions devel-common/src/tests_common/test_utils/fernet.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations

import base64
import hashlib


def generate_fernet_key_string(string_key: str = "AIRFLOW_INTEGRATION_TEST") -> str:
"""Generate always the same Fernet key value as a URL-safe base64-encoded 32-byte key."""
raw = hashlib.sha256(string_key.encode()).digest() # 32 bytes
return base64.urlsafe_b64encode(raw).decode()
31 changes: 31 additions & 0 deletions devel-common/tests/unit/tests_common/test_utils/test_fernet.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations

from tests_common.test_utils.fernet import generate_fernet_key_string


class TestFernetUtils:
"""Test utils for Fernet encryption."""

def test_generate_fernet_key_string(self):
"""Test generating a Fernet key."""
key = generate_fernet_key_string("TEST_KEY")
assert key == "NBJC_zYX6NWNek9v7tVv64YZz4K5sAgpoC4WGkQYv6I="
default_key = generate_fernet_key_string()
assert default_key == "BMsag_V7iplH1SIxzrTIbhLRZYOAYd6p0_nPtGdmuxo="
4 changes: 3 additions & 1 deletion task-sdk-integration-tests/docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,16 @@
x-airflow-common:
&airflow-common
image: ${AIRFLOW_IMAGE_NAME}
env_file:
- ${ENV_FILE_PATH:-.env}
environment:
&airflow-common-env
AIRFLOW__CORE__EXECUTOR: LocalExecutor
# yamllint disable rule:line-length
AIRFLOW__CORE__AUTH_MANAGER: 'airflow.api_fastapi.auth.managers.simple.simple_auth_manager.SimpleAuthManager'
AIRFLOW__CORE__SIMPLE_AUTH_MANAGER_ALL_ADMINS: 'true'
AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow
AIRFLOW__CORE__FERNET_KEY: ''
AIRFLOW__CORE__FERNET_KEY: ${FERNET_KEY}
AIRFLOW__CORE__LOAD_EXAMPLES: 'false'
AIRFLOW__CORE__DAGS_FOLDER: '/opt/airflow/dags'
AIRFLOW__CORE__EXECUTION_API_SERVER_URL: 'http://airflow-apiserver:8080/execution/'
Expand Down
8 changes: 8 additions & 0 deletions task-sdk-integration-tests/tests/task_sdk_tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,8 @@
TASK_SDK_INTEGRATION_LOCAL_DOCKER_COMPOSE_FILE_PATH,
)

from tests_common.test_utils.fernet import generate_fernet_key_string


def print_diagnostics(compose, compose_version, docker_version):
"""Print diagnostic information when test fails."""
Expand Down Expand Up @@ -126,6 +128,12 @@ def docker_compose_setup(tmp_path_factory):
print(f"AIRFLOW_IMAGE_NAME={DOCKER_IMAGE}", file=f)
print(f"AIRFLOW_UID={os.getuid()}", file=f)
print(f"HOST_OS={platform.system().lower()}", file=f)
#
# Please Do not use this Fernet key in any deployments! Please generate your own key.
# This is specifically generated for integration tests and not as default.
#
print(f"FERNET_KEY={generate_fernet_key_string()}", file=f)

docker_compose_files = [TASK_SDK_INTEGRATION_DOCKER_COMPOSE_FILE_PATH.as_posix()]
log_level = "debug" if debugging_on else "info"
if mount_volumes:
Expand Down
Loading