Skip to content

Revert "Print configuration on scheduler startup. (#22588)" (#22851) #297

Revert "Print configuration on scheduler startup. (#22588)" (#22851)

Revert "Print configuration on scheduler startup. (#22588)" (#22851) #297

Workflow file for this run

# Licensed to the Apache Software Foundation (ASF) under one
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
---
name: Tests
on: # yamllint disable-line rule:truthy
schedule:
- cron: '28 0 * * *'
push:
pull_request:
branches: ['main', 'v[0-9]+-[0-9]+-test', 'v[0-9]+-[0-9]+-stable']
permissions:
# All other permissions are set to none
contents: read
env:
MOUNT_SELECTED_LOCAL_SOURCES: "false"
FORCE_ANSWER_TO_QUESTIONS: "yes"
CHECK_IMAGE_FOR_REBUILD: "true"
SKIP_CHECK_REMOTE_IMAGE: "true"
DEBIAN_VERSION: "bullseye"
DB_RESET: "true"
VERBOSE: "true"
GITHUB_REPOSITORY: ${{ github.repository }}
GITHUB_USERNAME: ${{ github.actor }}
# You can override CONSTRAINTS_GITHUB_REPOSITORY by setting secret in your repo but by default the
# Airflow one is going to be used
CONSTRAINTS_GITHUB_REPOSITORY: >-
${{ secrets.CONSTRAINTS_GITHUB_REPOSITORY != '' &&
secrets.CONSTRAINTS_GITHUB_REPOSITORY || 'apache/airflow' }}
# In builds from forks, this token is read-only. For scheduler/direct push it is WRITE one
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
AIRFLOW_LOGIN_TO_GITHUB_REGISTRY: "true"
ENABLE_TEST_COVERAGE: "${{ github.event_name == 'push' }}"
IMAGE_TAG_FOR_THE_BUILD: "${{ github.event.pull_request.head.sha || github.sha }}"
concurrency:
group: ci-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
build-info:
name: "Build info"
# The runs-on cannot refer to env. or secrets. context, so we have no
# option but to specify a hard-coded list here. This is "safe", as the list
# is checked again by the runner using it's own list, so a PR author cannot
# change this and get access to our self-hosted runners
#
# When changing this list, ensure that it is kept in sync with the
# /runners/apache/airflow/configOverlay
# parameter in AWS SSM ParameterStore (which is what the runner uses)
# and restart the self-hosted runners.
#
# This list of committers can be generated with:
# https://github.com/apache/airflow-ci-infra/blob/main/scripts/list_committers
runs-on: >-
${{ (
(
github.event_name == 'push' ||
github.event_name == 'schedule' ||
contains(fromJSON('[
"BasPH",
"Fokko",
"KevinYang21",
"XD-DENG",
"aijamalnk",
"alexvanboxel",
"aoen",
"artwr",
"ashb",
"bbovenzi",
"bolkedebruin",
"criccomini",
"dimberman",
"dstandish",
"eladkal",
"ephraimbuddy",
"feluelle",
"feng-tao",
"houqp",
"jedcunningham",
"jgao54",
"jghoman",
"jhtimmins",
"jmcarp",
"kaxil",
"leahecole",
"mik-laj",
"milton0825",
"mistercrunch",
"msumit",
"potiuk",
"r39132",
"ryanahamilton",
"ryw",
"saguziel",
"sekikn",
"turbaszek",
"uranusjr",
"vikramkoka",
"xinbinhuang",
"yuqian90",
"zhongjiajie"
]'), github.event.pull_request.user.login)
) && github.repository == 'apache/airflow'
) && 'self-hosted' || 'ubuntu-20.04' }}
env:
GITHUB_CONTEXT: ${{ toJson(github) }}
outputs:
defaultBranch: ${{ steps.selective-checks.outputs.default-branch }}
cacheDirective: ${{ steps.dynamic-outputs.outputs.cacheDirective }}
waitForImage: ${{ steps.wait-for-image.outputs.wait-for-image }}
allPythonVersions: ${{ steps.selective-checks.outputs.all-python-versions }}
upgradeToNewerDependencies: ${{ steps.selective-checks.outputs.upgrade-to-newer-dependencies }}
pythonVersions: ${{ steps.selective-checks.outputs.python-versions }}
pythonVersionsListAsString: ${{ steps.selective-checks.outputs.python-versions-list-as-string }}
defaultPythonVersion: ${{ steps.selective-checks.outputs.default-python-version }}
kubernetesVersions: ${{ steps.selective-checks.outputs.kubernetes-versions }}
kubernetesVersionsListAsString: ${{ steps.selective-checks.outputs.kubernetes-versions-list-as-string }}
defaultKubernetesVersion: ${{ steps.selective-checks.outputs.default-kubernetes-version }}
kubernetesModes: ${{ steps.selective-checks.outputs.kubernetes-modes }}
defaultKubernetesMode: ${{ steps.selective-checks.outputs.default-kubernetes-mode }}
postgresVersions: ${{ steps.selective-checks.outputs.postgres-versions }}
defaultPostgresVersion: ${{ steps.selective-checks.outputs.default-postgres-version }}
mysqlVersions: ${{ steps.selective-checks.outputs.mysql-versions }}
mssqlVersions: ${{ steps.selective-checks.outputs.mssql-versions }}
defaultMySQLVersion: ${{ steps.selective-checks.outputs.default-mysql-version }}
helmVersions: ${{ steps.selective-checks.outputs.helm-versions }}
defaultHelmVersion: ${{ steps.selective-checks.outputs.default-helm-version }}
kindVersions: ${{ steps.selective-checks.outputs.kind-versions }}
defaultKindVersion: ${{ steps.selective-checks.outputs.default-kind-version }}
testTypes: ${{ steps.selective-checks.outputs.test-types }}
postgresExclude: ${{ steps.selective-checks.outputs.postgres-exclude }}
mysqlExclude: ${{ steps.selective-checks.outputs.mysql-exclude }}
mssqlExclude: ${{ steps.selective-checks.outputs.mssql-exclude }}
sqliteExclude: ${{ steps.selective-checks.outputs.sqlite-exclude }}
run-tests: ${{ steps.selective-checks.outputs.run-tests }}
run-ui-tests: ${{ steps.selective-checks.outputs.run-ui-tests }}
run-www-tests: ${{ steps.selective-checks.outputs.run-www-tests }}
run-kubernetes-tests: ${{ steps.selective-checks.outputs.run-kubernetes-tests }}
basic-checks-only: ${{ steps.selective-checks.outputs.basic-checks-only }}
image-build: ${{ steps.selective-checks.outputs.image-build }}
docs-build: ${{ steps.selective-checks.outputs.docs-build }}
needs-helm-tests: ${{ steps.selective-checks.outputs.needs-helm-tests }}
needs-api-tests: ${{ steps.selective-checks.outputs.needs-api-tests }}
needs-api-codegen: ${{ steps.selective-checks.outputs.needs-api-codegen }}
default-branch: ${{ steps.selective-checks.outputs.default-branch }}
sourceHeadRepo: ${{ steps.source-run-info.outputs.sourceHeadRepo }}
pullRequestNumber: ${{ steps.source-run-info.outputs.pullRequestNumber }}
pullRequestLabels: ${{ steps.source-run-info.outputs.pullRequestLabels }}
runsOn: ${{ steps.set-runs-on.outputs.runsOn }}
runCoverage: ${{ steps.set-run-coverage.outputs.runCoverage }}
inWorkflowBuild: ${{ steps.set-in-workflow-build.outputs.inWorkflowBuild }}
buildJobDescription: ${{ steps.set-in-workflow-build.outputs.buildJobDescription }}
mergeRun: ${{ steps.set-merge-run.outputs.merge-run }}
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v2
with:
persist-credentials: false
submodules: recursive
- name: "Get information about the PR"
uses: ./.github/actions/get-workflow-origin
id: source-run-info
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Fetch incoming commit ${{ github.sha }} with its parent
uses: actions/checkout@v2
with:
ref: ${{ github.sha }}
fetch-depth: 2
persist-credentials: false
if: github.event_name == 'pull_request'
- name: Selective checks
id: selective-checks
env:
PR_LABELS: "${{ steps.source-run-info.outputs.pullRequestLabels }}"
run: |
if [[ ${GITHUB_EVENT_NAME} == "pull_request" ]]; then
# Run selective checks
./scripts/ci/selective_ci_checks.sh "${GITHUB_SHA}"
else
# Run all checks
./scripts/ci/selective_ci_checks.sh
fi
# Avoid having to specify the runs-on logic every time. We use the custom
# env var AIRFLOW_SELF_HOSTED_RUNNER set only on our runners, but never
# on the public runners
- name: Set runs-on
id: set-runs-on
env:
PR_LABELS: "${{ steps.source-run-info.outputs.pullRequestLabels }}"
run: |
if [[ ${PR_LABELS=} == *"use public runners"* ]]; then
echo "Forcing running on Public Runners via `use public runners` label"
echo "::set-output name=runsOn::\"ubuntu-20.04\""
elif [[ ${AIRFLOW_SELF_HOSTED_RUNNER} == "" ]]; then
echo "Regular PR running with Public Runner"
echo "::set-output name=runsOn::\"ubuntu-20.04\""
else
echo "Maintainer or main run running with self-hosted runner"
echo "::set-output name=runsOn::\"self-hosted\""
fi
# Avoid having to specify the coverage logic every time.
- name: Set run coverage
id: set-run-coverage
run: echo "::set-output name=runCoverage::true"
if: >
github.ref == 'refs/heads/main' && github.repository == 'apache/airflow' &&
github.event_name == 'push' &&
steps.selective-checks.outputs.default-branch == 'main'
- name: Determine where to run image builds
id: set-in-workflow-build
# Run in-workflow build image when:
# * direct push is run
# * schedule build is run
# * pull request is run not from fork
run: |
set -x
if [[ ${GITHUB_EVENT_NAME} == "push" || ${GITHUB_EVENT_NAME} == "push" || \
${{steps.source-run-info.outputs.sourceHeadRepo}} == "apache/airflow" ]]; then
echo "Images will be built in current workflow"
echo "::set-output name=inWorkflowBuild::true"
echo "::set-output name=buildJobDescription::Build"
else
echo "Images will be built in pull_request_target workflow"
echo "::set-output name=inWorkflowBuild::false"
echo "::set-output name=buildJobDescription::Skip Build (pull_request_target)"
fi
- name: Determine if this is merge run
id: set-merge-run
run: echo "::set-output name=merge-run::true"
# Only in Apache Airflow repo, when there is a merge run to main or any of v2*test branches
if: |
github.repository == 'apache/airflow' && github.event_name == 'push' &&
(
github.ref_name == 'main' ||
startsWith(github.ref_name, 'v2') && endsWith(github.ref_name, 'test')
)
- name: Compute dynamic outputs
id: dynamic-outputs
run: |
set -x
if [[ "${{ github.event_name }}" == 'schedule' ]]; then
echo "::set-output name=cacheDirective::disabled"
else
echo "::set-output name=cacheDirective::pulled"
fi
if [[ "$SELECTIVE_CHECKS_IMAGE_BUILD" == "true" ]]; then
echo "::set-output name=image-build::true"
else
echo "::set-output name=image-build::false"
fi
env:
SELECTIVE_CHECKS_IMAGE_BUILD: ${{ steps.selective-checks.outputs.image-build }}
- name: env
run: printenv
env:
dynamicOutputs: ${{ toJSON(steps.dynamic-outputs.outputs) }}
PR_LABELS: ${{ steps.get-latest-pr-labels.outputs.pullRequestLabels }}
GITHUB_CONTEXT: ${{ toJson(github) }}
build-ci-images:
permissions:
packages: write
timeout-minutes: 80
name: "${{needs.build-info.outputs.buildJobDescription}} CI image ${{matrix.python-version}}"
runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
needs: [build-info]
strategy:
matrix:
python-version: ${{ fromJson(needs.build-info.outputs.allPythonVersions) }}
fail-fast: true
env:
RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn)[0] }}
BACKEND: sqlite
PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }}
outputs: ${{toJSON(needs.build-info.outputs) }}
steps:
- uses: actions/checkout@v2
with:
ref: ${{ needs.build-info.outputs.targetCommitSha }}
persist-credentials: false
submodules: recursive
if: needs.build-info.outputs.inWorkflowBuild == 'true'
- name: "Setup python"
uses: actions/setup-python@v2
with:
python-version: ${{ needs.build-info.outputs.defaultPythonVersion }}
if: needs.build-info.outputs.inWorkflowBuild == 'true'
- run: python -m pip install --editable ./dev/breeze/
if: needs.build-info.outputs.inWorkflowBuild == 'true'
- name: "Retrieve DEFAULTS from the _initialization.sh"
# We cannot "source" the script here because that would be a security problem (we cannot run
# any code that comes from the sources coming from the PR. Therefore we extract the
# DEFAULT_BRANCH and DEFAULT_CONSTRAINTS_BRANCH and DEBIAN_VERSION via custom grep/awk/sed commands
id: defaults
run: |
DEFAULT_BRANCH=$(grep "export DEFAULT_BRANCH" scripts/ci/libraries/_initialization.sh | \
awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g')
echo "DEFAULT_BRANCH=${DEFAULT_BRANCH}" >> $GITHUB_ENV
DEFAULT_CONSTRAINTS_BRANCH=$(grep "export DEFAULT_CONSTRAINTS_BRANCH" \
scripts/ci/libraries/_initialization.sh | \
awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g')
echo "DEFAULT_CONSTRAINTS_BRANCH=${DEFAULT_CONSTRAINTS_BRANCH}" >> $GITHUB_ENV
DEBIAN_VERSION=$(grep "export DEBIAN_VERSION" scripts/ci/libraries/_initialization.sh | \
awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g')
echo "DEBIAN_VERSION=${DEBIAN_VERSION}" >> $GITHUB_ENV
if: needs.build-info.outputs.inWorkflowBuild == 'true'
- name: "Free space"
run: airflow-freespace
if: needs.build-info.outputs.inWorkflowBuild == 'true'
- name: "Build CI image ${{ matrix.python-version }}:${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
run: Breeze2 build-image
if: needs.build-info.outputs.inWorkflowBuild == 'true'
env:
GITHUB_REGISTRY_PULL_IMAGE_TAG: "latest"
GITHUB_REGISTRY_PUSH_IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }}
UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgradeToNewerDependencies }}
DOCKER_CACHE: ${{ needs.build-info.outputs.cacheDirective }}
- name: "Push CI image ${{ matrix.python-version }}:${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
run: ./scripts/ci/images/ci_push_ci_images.sh
if: needs.build-info.outputs.inWorkflowBuild == 'true'
env:
GITHUB_REGISTRY_PUSH_IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }}
- name: "Candidates for pip resolver backtrack triggers: ${{ matrix.python-version }}"
if: failure() || cancelled()
run: airflow-find-newer-dependencies --max-age 1 --python "${{ matrix.python-version }}"
build-prod-images:
permissions:
packages: write
timeout-minutes: 80
name: "${{needs.build-info.outputs.buildJobDescription}} PROD image ${{matrix.python-version}}"
runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
needs: [build-info, build-ci-images]
strategy:
matrix:
python-version: ${{ fromJson(needs.build-info.outputs.allPythonVersions) }}
fail-fast: true
env:
RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn)[0] }}
BACKEND: sqlite
PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }}
DOCKER_CACHE: ${{ needs.build-info.outputs.cacheDirective }}
VERSION_SUFFIX_FOR_PYPI: ".dev0"
steps:
- uses: actions/checkout@v2
with:
ref: ${{ needs.build-info.outputs.targetCommitSha }}
persist-credentials: false
submodules: recursive
if: needs.build-info.outputs.inWorkflowBuild == 'true'
- name: "Setup python"
uses: actions/setup-python@v2
with:
python-version: ${{ needs.build-info.outputs.defaultPythonVersion }}
if: needs.build-info.outputs.inWorkflowBuild == 'true'
- name: "Retrieve DEFAULTS from the _initialization.sh"
# We cannot "source" the script here because that would be a security problem (we cannot run
# any code that comes from the sources coming from the PR. Therefore we extract the
# DEFAULT_BRANCH and DEFAULT_CONSTRAINTS_BRANCH and DEBIAN_VERSION via custom grep/awk/sed commands
id: defaults
run: |
DEFAULT_BRANCH=$(grep "export DEFAULT_BRANCH" scripts/ci/libraries/_initialization.sh | \
awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g')
echo "DEFAULT_BRANCH=${DEFAULT_BRANCH}" >> $GITHUB_ENV
DEFAULT_CONSTRAINTS_BRANCH=$(grep "export DEFAULT_CONSTRAINTS_BRANCH" \
scripts/ci/libraries/_initialization.sh | \
awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g')
echo "DEFAULT_CONSTRAINTS_BRANCH=${DEFAULT_CONSTRAINTS_BRANCH}" >> $GITHUB_ENV
DEBIAN_VERSION=$(grep "export DEBIAN_VERSION" scripts/ci/libraries/_initialization.sh | \
cut -d "=" -f 3 | sed s'/["}]//g')
echo "DEBIAN_VERSION=${DEBIAN_VERSION}" >> $GITHUB_ENV
if: needs.build-info.outputs.inWorkflowBuild == 'true'
- run: python -m pip install --editable ./dev/breeze/
if: needs.build-info.outputs.inWorkflowBuild == 'true'
- name: "Free space"
run: airflow-freespace
if: needs.build-info.outputs.inWorkflowBuild == 'true'
- name: "Pull CI image for PROD build: ${{ matrix.python-version }}:${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
run: ./scripts/ci/images/ci_pull_ci_image_on_ci.sh
env:
GITHUB_REGISTRY_PULL_IMAGE_TAG: "${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
if: needs.build-info.outputs.inWorkflowBuild == 'true'
- name: "Build PROD image ${{ matrix.python-version }}:${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
run: ./scripts/ci/images/ci_build_prod_image_on_ci.sh
if: needs.build-info.outputs.inWorkflowBuild == 'true'
env:
GITHUB_REGISTRY_PULL_IMAGE_TAG: "latest"
GITHUB_REGISTRY_PUSH_IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }}
UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgradeToNewerDependencies }}
DOCKER_CACHE: ${{ needs.build-info.outputs.cacheDirective }}
- name: "Push PROD image ${{ matrix.python-version }}:${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
run: ./scripts/ci/images/ci_push_production_images.sh
if: needs.build-info.outputs.inWorkflowBuild == 'true'
env:
GITHUB_REGISTRY_PUSH_IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }}
run-new-breeze-tests:
timeout-minutes: 10
name: Breeze2 tests
runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
needs: [build-info]
steps:
- uses: actions/checkout@v2
with:
persist-credentials: false
- uses: actions/setup-python@v2
with:
python-version: ${{ needs.build-info.outputs.defaultPythonVersion }}
cache: 'pip'
cache-dependency-path: ./dev/breeze/setup*
- run: python -m pip install --editable ./dev/breeze/
- run: python -m pytest ./dev/breeze/ -n auto --color=yes
tests-ui:
timeout-minutes: 10
name: React UI tests
runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
needs: [build-info]
if: needs.build-info.outputs.run-ui-tests == 'true'
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v2
with:
persist-credentials: false
- name: "Setup node"
uses: actions/setup-node@v2
with:
node-version: 14
- name: "Cache eslint"
uses: actions/cache@v2
with:
path: 'airflow/ui/node_modules'
key: ${{ runner.os }}-ui-node-modules-${{ hashFiles('airflow/ui/**/yarn.lock') }}
- run: yarn --cwd airflow/ui/ install --frozen-lockfile --non-interactive
- run: yarn --cwd airflow/ui/ run test
tests-www:
timeout-minutes: 10
name: React WWW tests
runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
needs: [build-info]
if: needs.build-info.outputs.run-www-tests == 'true'
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v2
with:
persist-credentials: false
- name: "Setup node"
uses: actions/setup-node@v2
with:
node-version: 14
- name: "Cache eslint"
uses: actions/cache@v2
with:
path: 'airflow/www/node_modules'
key: ${{ runner.os }}-ui-node-modules-${{ hashFiles('airflow/ui/**/yarn.lock') }}
- run: yarn --cwd airflow/www/ install --frozen-lockfile --non-interactive
- run: yarn --cwd airflow/www/ run test
test-openapi-client-generation:
timeout-minutes: 10
name: "Test OpenAPI client generation"
runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
needs: [build-info]
if: needs.build-info.outputs.needs-api-codegen == 'true'
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v2
with:
fetch-depth: 2
persist-credentials: false
- name: "Generate client codegen diff"
run: ./scripts/ci/openapi/client_codegen_diff.sh
test-examples-of-prod-image-building:
timeout-minutes: 60
name: "Test examples of production image building"
runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
needs: [build-info]
if: needs.build-info.outputs.image-build == 'true'
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v2
with:
fetch-depth: 2
persist-credentials: false
- name: "Setup python"
uses: actions/setup-python@v2
with:
python-version: ${{ needs.build-info.outputs.defaultPythonVersion }}
cache: 'pip'
cache-dependency-path: ./dev/breeze/setup*
- run: python -m pip install --editable ./dev/breeze/
- name: "Free space"
run: airflow-freespace
- name: "Cache virtualenv environment"
uses: actions/cache@v2
with:
path: '.build/.docker_venv'
key: ${{ runner.os }}-docker-venv-${{ hashFiles('scripts/ci/images/ci_run_docker_tests.py') }}
- name: "Test examples of PROD image building"
working-directory: docs/docker-stack/docker-examples
run: >
python -m pytest ../../../docker_tests/test_examples_of_prod_image_building.py
-n auto --color=yes
test-docker-compose-quick-start:
timeout-minutes: 60
name: "Test docker-compose quick start"
runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
needs: [build-info, wait-for-prod-images]
if: needs.build-info.outputs.image-build == 'true'
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v2
with:
fetch-depth: 2
persist-credentials: false
- name: "Setup python"
uses: actions/setup-python@v2
with:
python-version: ${{ needs.build-info.outputs.defaultPythonVersion }}
cache: 'pip'
cache-dependency-path: ./dev/breeze/setup*
- run: python -m pip install --editable ./dev/breeze/
- name: "Free space"
run: airflow-freespace
- name: "Cache virtualenv environment"
uses: actions/cache@v2
with:
path: '.build/.docker_venv'
key: ${{ runner.os }}-docker-venv-${{ hashFiles('scripts/ci/images/ci_run_docker_tests.py') }}
- name: "Test docker-compose quick start"
run: ./scripts/ci/images/ci_run_docker_compose_quick_start_test.sh
wait-for-ci-images:
timeout-minutes: 120
name: "Wait for CI images"
runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
needs: [build-info, build-ci-images]
if: needs.build-info.outputs.image-build == 'true'
env:
RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
BACKEND: sqlite
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v2
with:
persist-credentials: false
- name: "Setup python"
uses: actions/setup-python@v2
with:
python-version: ${{ needs.build-info.outputs.defaultPythonVersion }}
cache: 'pip'
cache-dependency-path: ./dev/breeze/setup*
- run: python -m pip install --editable ./dev/breeze/
- name: "Free space"
run: airflow-freespace
- name: "Cache virtualenv environment"
uses: actions/cache@v2
with:
path: '.build/.docker_venv'
key: ${{ runner.os }}-docker-venv-${{ hashFiles('scripts/ci/images/ci_run_docker_tests.py') }}
- name: >
Wait for CI images
${{ needs.build-info.outputs.pythonVersions }}:${{ env.IMAGE_TAG_FOR_THE_BUILD }}
id: wait-for-images
# We wait for the images to be available either from the "build-images.yml" workflow or
# from the build-ci-image above.
# We are utilising single job to wait for all images because this job merely waits
# for the images to be available and run tests with the images.
run: ./scripts/ci/images/ci_wait_for_and_verify_all_ci_images.sh
env:
CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING: >
${{needs.build-info.outputs.pythonVersionsListAsString}}
GITHUB_REGISTRY_PULL_IMAGE_TAG: "${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
VERIFY_IMAGE: "true"
PR_LABELS: "${{ needs.build-info.outputs.pullRequestLabels }}"
static-checks:
timeout-minutes: 30
name: "Static checks"
runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
needs: [build-info, wait-for-ci-images]
env:
RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
MOUNT_SELECTED_LOCAL_SOURCES: "true"
PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}}
if: needs.build-info.outputs.basic-checks-only == 'false'
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v2
with:
persist-credentials: false
- name: "Setup python"
uses: actions/setup-python@v2
with:
python-version: ${{ needs.build-info.outputs.defaultPythonVersion }}
cache: 'pip'
cache-dependency-path: ./dev/breeze/setup*
- run: python -m pip install --editable ./dev/breeze/
- name: "Free space"
run: airflow-freespace
- name: "Pull CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
run: ./scripts/ci/images/ci_pull_ci_image_on_ci.sh
env:
GITHUB_REGISTRY_PULL_IMAGE_TAG: "${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
- name: "Get Python version"
run: "echo \"::set-output name=host-python-version::$(python -c
'import platform; print(platform.python_version())')\""
id: host-python-version
- name: "Cache pre-commit local-installation"
uses: actions/cache@v2
with:
path: ~/.local
key: "pre-commit-local-installation-${{steps.host-python-version.outputs.host-python-version}}-\
${{ hashFiles('setup.py', 'setup.cfg') }}"
restore-keys: "\
pre-commit-local-installation-${{steps.host-python-version.outputs.host-python-version}}-"
- name: "Cache pre-commit envs"
uses: actions/cache@v2
with:
path: ~/.cache/pre-commit
key: "pre-commit-${{steps.host-python-version.outputs.host-python-version}}-\
${{ hashFiles('.pre-commit-config.yaml') }}"
restore-keys: pre-commit-${{steps.host-python-version.outputs.host-python-version}}
- name: "Cache eslint"
uses: actions/cache@v2
with:
path: 'airflow/ui/node_modules'
key: ${{ runner.os }}-ui-node-modules-${{ hashFiles('airflow/ui/**/yarn.lock') }}
- name: "Static checks"
run: ./scripts/ci/static_checks/run_static_checks.sh
env:
VERBOSE: false
SKIP: "identity"
COLUMNS: 250
# Those checks are run if no image needs to be built for checks. This is for simple changes that
# Do not touch any of the python code or any of the important files that might require building
# The CI Docker image and they can be run entirely using the pre-commit virtual environments on host
static-checks-basic-checks-only:
timeout-minutes: 30
name: "Static checks: basic checks only"
runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
needs: [build-info]
env:
RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
SKIP: "build,mypy,flake8,identity"
MOUNT_SELECTED_LOCAL_SOURCES: "true"
PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}}
if: needs.build-info.outputs.basic-checks-only == 'true'
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v2
with:
persist-credentials: false
- name: "Setup python"
uses: actions/setup-python@v2
with:
python-version: ${{ needs.build-info.outputs.defaultPythonVersion }}
- name: >
Fetch incoming commit ${{ github.sha }} with its parent
uses: actions/checkout@v2
with:
ref: ${{ github.sha }}
fetch-depth: 2
persist-credentials: false
- name: "Get Python version"
run: "echo \"::set-output name=host-python-version::$(python -c
'import platform; print(platform.python_version())')\""
id: host-python-version
- name: "Cache pre-commit local-installation"
uses: actions/cache@v2
with:
path: ~/.local
key: "pre-commit-local-installation-${{steps.host-python-version.outputs.host-python-version}}-\
${{ hashFiles('setup.py', 'setup.cfg') }}"
restore-keys: "\
pre-commit-local-installation-${{steps.host-python-version.outputs.host-python-version}}-"
- name: "Cache pre-commit envs"
uses: actions/cache@v2
with:
path: ~/.cache/pre-commit
key: "pre-commit-basic-${{steps.host-python-version.outputs.host-python-version}}-\
${{ hashFiles('.pre-commit-config.yaml') }}"
restore-keys: pre-commit-basic-${{steps.host-python-version.outputs.host-python-version}}
- name: "Static checks: basic checks only"
run: ./scripts/ci/static_checks/run_basic_static_checks.sh "${{ github.sha }}"
env:
VERBOSE: false
docs:
timeout-minutes: 45
name: "Build docs"
runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
needs: [build-info, wait-for-ci-images]
if: needs.build-info.outputs.docs-build == 'true'
env:
RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v2
with:
persist-credentials: false
submodules: recursive
- uses: actions/setup-python@v2
with:
python-version: ${{needs.build-info.outputs.defaultPythonVersion}}
cache: 'pip'
cache-dependency-path: ./dev/breeze/setup*
- run: python -m pip install --editable ./dev/breeze/
- name: "Free space"
run: airflow-freespace
- name: "Pull CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
run: ./scripts/ci/images/ci_pull_ci_image_on_ci.sh
env:
GITHUB_REGISTRY_PULL_IMAGE_TAG: "${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
- name: "Fetch inventory versions"
run: ./scripts/ci/docs/ci_docs_prepare.sh
- uses: actions/cache@v2
id: cache-doc-inventories
with:
path: ./docs/_inventory_cache/
key: docs-inventory-v1-${{ hashFiles('constraints.txt') }}
restore-keys: |
docs-inventory-${{ hashFiles('constraints.txt') }}
docs-inventory-
- name: "Build docs"
run: ./scripts/ci/docs/ci_docs.sh
- name: Configure AWS credentials
uses: ./.github/actions/configure-aws-credentials
if: >
github.ref == 'refs/heads/main' && github.repository == 'apache/airflow' &&
github.event_name == 'push'
with:
aws-access-key-id: ${{ secrets.DOCS_AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.DOCS_AWS_SECRET_ACCESS_KEY }}
aws-region: eu-central-1
- name: "Upload documentation to AWS S3"
if: >
github.ref == 'refs/heads/main' && github.repository == 'apache/airflow' &&
github.event_name == 'push'
run: aws s3 sync --delete ./files/documentation s3://apache-airflow-docs
prepare-test-provider-packages-wheel:
timeout-minutes: 40
name: "Build and test provider packages wheel"
runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
needs: [build-info, wait-for-ci-images]
env:
RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
AIRFLOW_EXTRAS: "all"
PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}}
VERSION_SUFFIX_FOR_PYPI: ".dev0"
NON_INTERACTIVE: "true"
GENERATE_PROVIDERS_ISSUE: "true"
if: needs.build-info.outputs.image-build == 'true' && needs.build-info.outputs.default-branch == 'main'
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v2
with:
persist-credentials: false
if: needs.build-info.outputs.default-branch == 'main'
- name: "Setup python"
uses: actions/setup-python@v2
with:
python-version: ${{ needs.build-info.outputs.defaultPythonVersion }}
cache: 'pip'
cache-dependency-path: ./dev/breeze/setup*
- run: python -m pip install --editable ./dev/breeze/
- name: "Free space"
run: airflow-freespace
- name: "Pull CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
run: ./scripts/ci/images/ci_pull_ci_image_on_ci.sh
env:
GITHUB_REGISTRY_PULL_IMAGE_TAG: "${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
- name: "Prepare provider documentation"
run: ./scripts/ci/provider_packages/ci_prepare_provider_documentation.sh
- name: "Prepare provider packages: wheel"
run: ./scripts/ci/provider_packages/ci_prepare_provider_packages.sh
env:
PACKAGE_FORMAT: "wheel"
- name: "Prepare airflow package: wheel"
run: ./scripts/ci/build_airflow/ci_build_airflow_packages.sh
env:
PACKAGE_FORMAT: "wheel"
- name: "Install and test provider packages and airflow via wheel files"
run: ./scripts/ci/provider_packages/ci_install_and_test_provider_packages.sh
env:
USE_AIRFLOW_VERSION: "wheel"
PACKAGE_FORMAT: "wheel"
- name: "Replace non-compliant providers with their 2.1-compliant versions"
run: ./scripts/ci/provider_packages/ci_make_providers_2_1_compliant.sh
- name: "Install and test provider packages and airflow on Airflow 2.1 files"
run: ./scripts/ci/provider_packages/ci_install_and_test_provider_packages.sh
env:
USE_AIRFLOW_VERSION: "2.1.0"
SKIP_TWINE_CHECK: "true"
PACKAGE_FORMAT: "wheel"
prepare-test-provider-packages-sdist:
timeout-minutes: 40
name: "Build and test provider packages sdist"
runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
needs: [build-info, wait-for-ci-images]
env:
RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
AIRFLOW_EXTRAS: "all"
PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}}
VERSION_SUFFIX_FOR_PYPI: ".dev0"
NON_INTERACTIVE: "true"
GENERATE_PROVIDERS_ISSUE: "true"
if: needs.build-info.outputs.image-build == 'true' && needs.build-info.outputs.default-branch == 'main'
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v2
with:
persist-credentials: false
if: needs.build-info.outputs.default-branch == 'main'
- name: "Setup python"
uses: actions/setup-python@v2
with:
python-version: ${{ needs.build-info.outputs.defaultPythonVersion }}
cache: 'pip'
cache-dependency-path: ./dev/breeze/setup*
- run: python -m pip install --editable ./dev/breeze/
- name: "Free space"
run: airflow-freespace
- name: "Pull CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
run: ./scripts/ci/images/ci_pull_ci_image_on_ci.sh
env:
GITHUB_REGISTRY_PULL_IMAGE_TAG: "${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
- name: "Prepare provider packages: sdist"
run: ./scripts/ci/provider_packages/ci_prepare_provider_packages.sh
env:
PACKAGE_FORMAT: "sdist"
- name: "Prepare airflow package: sdist"
run: ./scripts/ci/build_airflow/ci_build_airflow_packages.sh
env:
PACKAGE_FORMAT: "sdist"
- name: "Upload provider distribution artifacts"
uses: actions/upload-artifact@v2
with:
name: airflow-provider-packages
path: "./dist/apache-airflow-providers-*.tar.gz"
retention-days: 1
- name: "Install and test provider packages and airflow via sdist files"
run: ./scripts/ci/provider_packages/ci_install_and_test_provider_packages.sh
env:
USE_AIRFLOW_VERSION: "sdist"
PACKAGE_FORMAT: "sdist"
tests-helm:
timeout-minutes: 80
name: "Python unit tests for helm chart"
runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
needs: [build-info, wait-for-ci-images]
env:
RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
MOUNT_SELECTED_LOCAL_SOURCES: "true"
TEST_TYPES: "Helm"
BACKEND: ""
DB_RESET: "false"
PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}}
if: >
needs.build-info.outputs.needs-helm-tests == 'true' &&
(github.repository == 'apache/airflow' || github.event_name != 'schedule') &&
needs.build-info.outputs.default-branch == 'main'
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v2
with:
persist-credentials: false
- name: "Setup python"
uses: actions/setup-python@v2
with:
python-version: ${{ needs.build-info.outputs.defaultPythonVersion }}
cache: 'pip'
cache-dependency-path: ./dev/breeze/setup*
- run: python -m pip install --editable ./dev/breeze/
- name: "Free space"
run: airflow-freespace
- name: "Pull CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
run: ./scripts/ci/images/ci_pull_ci_image_on_ci.sh
env:
GITHUB_REGISTRY_PULL_IMAGE_TAG: "${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
- name: "Tests: Helm"
run: ./scripts/ci/testing/ci_run_airflow_testing.sh
env:
PR_LABELS: "${{ needs.build-info.outputs.pullRequestLabels }}"
- name: "Upload airflow logs"
uses: actions/upload-artifact@v2
if: failure()
with:
name: airflow-logs-helm
path: "./files/airflow_logs*"
retention-days: 7
- name: "Upload container logs"
uses: actions/upload-artifact@v2
if: failure()
with:
name: container-logs-helm
path: "./files/container_logs*"
retention-days: 7
- name: "Upload artifact for coverage"
uses: actions/upload-artifact@v2
if: needs.build-info.outputs.runCoverage == 'true'
with:
name: >
coverage-helm
path: "./files/coverage*.xml"
retention-days: 7
tests-postgres:
timeout-minutes: 130
name: >
Postgres${{matrix.postgres-version}},Py${{matrix.python-version}}:
${{needs.build-info.outputs.testTypes}}
runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
needs: [build-info, wait-for-ci-images]
strategy:
matrix:
python-version: ${{ fromJson(needs.build-info.outputs.pythonVersions) }}
postgres-version: ${{ fromJson(needs.build-info.outputs.postgresVersions) }}
exclude: ${{ fromJson(needs.build-info.outputs.postgresExclude) }}
fail-fast: false
env:
RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
BACKEND: postgres
PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }}
POSTGRES_VERSION: ${{ matrix.postgres-version }}
TEST_TYPES: "${{needs.build-info.outputs.testTypes}}"
if: needs.build-info.outputs.run-tests == 'true'
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v2
with:
persist-credentials: false
- name: "Setup python"
uses: actions/setup-python@v2
with:
python-version: ${{ needs.build-info.outputs.defaultPythonVersion }}
cache: 'pip'
cache-dependency-path: ./dev/breeze/setup*
- run: python -m pip install --editable ./dev/breeze/
- name: "Free space"
run: airflow-freespace
- name: "Pull CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
run: ./scripts/ci/images/ci_pull_ci_image_on_ci.sh
env:
GITHUB_REGISTRY_PULL_IMAGE_TAG: "${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
- name: "Test downgrade"
run: ./scripts/ci/testing/run_downgrade_test.sh
- name: "Test Offline SQL generation"
run: ./scripts/ci/testing/run_offline_sql_test.sh
- name: "Tests: ${{needs.build-info.outputs.testTypes}}"
run: ./scripts/ci/testing/ci_run_airflow_testing.sh
env:
PR_LABELS: "${{ needs.build-info.outputs.pullRequestLabels }}"
- name: "Upload airflow logs"
uses: actions/upload-artifact@v2
if: failure()
with:
name: airflow-logs-${{matrix.python-version}}-${{matrix.postgres-version}}
path: "./files/airflow_logs*"
retention-days: 7
- name: "Upload container logs"
uses: actions/upload-artifact@v2
if: failure()
with:
name: container-logs-postgres-${{matrix.python-version}}-${{matrix.postgres-version}}
path: "./files/container_logs*"
retention-days: 7
- name: "Upload artifact for coverage"
uses: actions/upload-artifact@v2
if: needs.build-info.outputs.runCoverage == 'true'
with:
name: >
coverage-postgres-${{matrix.python-version}}-${{matrix.postgres-version}}
path: "./files/coverage*.xml"
retention-days: 7
tests-mysql:
timeout-minutes: 130
name: >
MySQL${{matrix.mysql-version}}, Py${{matrix.python-version}}: ${{needs.build-info.outputs.testTypes}}
runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
needs: [build-info, wait-for-ci-images]
strategy:
matrix:
python-version: ${{ fromJson(needs.build-info.outputs.pythonVersions) }}
mysql-version: ${{ fromJson(needs.build-info.outputs.mysqlVersions) }}
exclude: ${{ fromJson(needs.build-info.outputs.mysqlExclude) }}
fail-fast: false
env:
RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
BACKEND: mysql
PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }}
MYSQL_VERSION: ${{ matrix.mysql-version }}
TEST_TYPES: "${{needs.build-info.outputs.testTypes}}"
if: needs.build-info.outputs.run-tests == 'true'
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v2
with:
persist-credentials: false
- name: "Setup python"
uses: actions/setup-python@v2
with:
python-version: ${{ needs.build-info.outputs.defaultPythonVersion }}
cache: 'pip'
cache-dependency-path: ./dev/breeze/setup*
- run: python -m pip install --editable ./dev/breeze/
- name: "Free space"
run: airflow-freespace
- name: "Pull CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
run: ./scripts/ci/images/ci_pull_ci_image_on_ci.sh
env:
GITHUB_REGISTRY_PULL_IMAGE_TAG: "${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
- name: "Test downgrade"
run: ./scripts/ci/testing/run_downgrade_test.sh
- name: "Test Offline SQL generation"
run: ./scripts/ci/testing/run_offline_sql_test.sh
- name: "Tests: ${{needs.build-info.outputs.testTypes}}"
run: ./scripts/ci/testing/ci_run_airflow_testing.sh
env:
PR_LABELS: "${{ needs.build-info.outputs.pullRequestLabels }}"
- name: "Upload airflow logs"
uses: actions/upload-artifact@v2
if: failure()
with:
name: airflow-logs-${{matrix.python-version}}-${{matrix.mysql-version}}
path: "./files/airflow_logs*"
retention-days: 7
- name: "Upload container logs"
uses: actions/upload-artifact@v2
if: failure()
with:
name: container-logs-mysql-${{matrix.python-version}}-${{matrix.mysql-version}}
path: "./files/container_logs*"
retention-days: 7
- name: "Upload artifact for coverage"
uses: actions/upload-artifact@v2
if: needs.build-info.outputs.runCoverage == 'true'
with:
name: coverage-mysql-${{matrix.python-version}}-${{matrix.mysql-version}}
path: "./files/coverage*.xml"
retention-days: 7
tests-mssql:
timeout-minutes: 130
name: >
MSSQL${{matrix.mssql-version}}, Py${{matrix.python-version}}: ${{needs.build-info.outputs.testTypes}}
runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
needs: [build-info, wait-for-ci-images]
strategy:
matrix:
python-version: ${{ fromJson(needs.build-info.outputs.pythonVersions) }}
mssql-version: ${{ fromJson(needs.build-info.outputs.mssqlVersions) }}
exclude: ${{ fromJson(needs.build-info.outputs.mssqlExclude) }}
fail-fast: false
env:
RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
BACKEND: mssql
PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }}
MSSQL_VERSION: ${{ matrix.mssql-version }}
TEST_TYPES: "${{needs.build-info.outputs.testTypes}}"
if: needs.build-info.outputs.run-tests == 'true'
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v2
with:
persist-credentials: false
- name: "Setup python"
uses: actions/setup-python@v2
with:
python-version: ${{ needs.build-info.outputs.defaultPythonVersion }}
cache: 'pip'
cache-dependency-path: ./dev/breeze/setup*
- run: python -m pip install --editable ./dev/breeze/
- name: "Free space"
run: airflow-freespace
- name: "Pull CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
run: ./scripts/ci/images/ci_pull_ci_image_on_ci.sh
env:
GITHUB_REGISTRY_PULL_IMAGE_TAG: "${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
- name: "Test downgrade"
run: ./scripts/ci/testing/run_downgrade_test.sh
- name: "Tests: ${{needs.build-info.outputs.testTypes}}"
run: ./scripts/ci/testing/ci_run_airflow_testing.sh
env:
PR_LABELS: "${{ needs.build-info.outputs.pullRequestLabels }}"
- name: "Upload airflow logs"
uses: actions/upload-artifact@v2
if: failure()
with:
name: airflow-logs-${{matrix.python-version}}-${{matrix.mssql-version}}
path: "./files/airflow_logs*"
retention-days: 7
- name: "Upload container logs"
uses: actions/upload-artifact@v2
if: failure()
with:
name: container-logs-mssql-${{matrix.python-version}}-${{matrix.mssql-version}}
path: "./files/container_logs*"
retention-days: 7
- name: "Upload artifact for coverage"
uses: actions/upload-artifact@v2
if: needs.build-info.outputs.runCoverage == 'true'
with:
name: coverage-mssql-${{matrix.python-version}}-${{matrix.mssql-version}}
path: "./files/coverage*.xml"
retention-days: 7
tests-sqlite:
timeout-minutes: 130
name: >
Sqlite Py${{matrix.python-version}}: ${{needs.build-info.outputs.testTypes}}
runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
needs: [build-info, wait-for-ci-images]
strategy:
matrix:
python-version: ${{ fromJson(needs.build-info.outputs.pythonVersions) }}
exclude: ${{ fromJson(needs.build-info.outputs.sqliteExclude) }}
fail-fast: false
env:
RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
BACKEND: sqlite
PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }}
TEST_TYPES: "${{needs.build-info.outputs.testTypes}}"
if: needs.build-info.outputs.run-tests == 'true'
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v2
with:
persist-credentials: false
- name: "Setup python"
uses: actions/setup-python@v2
with:
python-version: ${{ needs.build-info.outputs.defaultPythonVersion }}
cache: 'pip'
cache-dependency-path: ./dev/breeze/setup*
- run: python -m pip install --editable ./dev/breeze/
- name: "Free space"
run: airflow-freespace
- name: "Pull CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
run: ./scripts/ci/images/ci_pull_ci_image_on_ci.sh
env:
GITHUB_REGISTRY_PULL_IMAGE_TAG: "${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
- name: "Test downgrade"
run: ./scripts/ci/testing/run_downgrade_test.sh
- name: "Tests: ${{needs.build-info.outputs.testTypes}}"
run: ./scripts/ci/testing/ci_run_airflow_testing.sh
env:
PR_LABELS: "${{ needs.build-info.outputs.pullRequestLabels }}"
- name: "Upload airflow logs"
uses: actions/upload-artifact@v2
if: failure()
with:
name: airflow-logs-${{matrix.python-version}}
path: './files/airflow_logs*'
retention-days: 7
- name: "Upload container logs"
uses: actions/upload-artifact@v2
if: failure()
with:
name: container-logs-sqlite-${{matrix.python-version}}
path: "./files/container_logs*"
retention-days: 7
- name: "Upload artifact for coverage"
uses: actions/upload-artifact@v2
if: needs.build-info.outputs.runCoverage == 'true'
with:
name: coverage-sqlite-${{matrix.python-version}}
path: ./files/coverage*.xml
retention-days: 7
tests-quarantined:
timeout-minutes: 60
name: "Quarantined tests"
runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
continue-on-error: true
needs: [build-info, wait-for-ci-images]
env:
RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
PYTHON_MAJOR_MINOR_VERSION: ${{ needs.build-info.outputs.defaultPythonVersion }}
MYSQL_VERSION: ${{needs.build-info.outputs.defaultMySQLVersion}}
POSTGRES_VERSION: ${{needs.build-info.outputs.defaultPostgresVersion}}
TEST_TYPES: "Quarantined"
NUM_RUNS: 10
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
if: needs.build-info.outputs.run-tests == 'true'
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v2
with:
persist-credentials: false
- name: "Setup python"
uses: actions/setup-python@v2
with:
python-version: ${{ needs.build-info.outputs.defaultPythonVersion }}
cache: 'pip'
cache-dependency-path: ./dev/breeze/setup*
- name: "Set issue id for main"
if: github.ref == 'refs/heads/main'
run: |
echo "ISSUE_ID=10118" >> $GITHUB_ENV
- name: "Set issue id for v1-10-stable"
if: github.ref == 'refs/heads/v1-10-stable'
run: |
echo "ISSUE_ID=10127" >> $GITHUB_ENV
- name: "Set issue id for v1-10-test"
if: github.ref == 'refs/heads/v1-10-test'
run: |
echo "ISSUE_ID=10128" >> $GITHUB_ENV
- run: python -m pip install --editable ./dev/breeze/
- name: "Free space"
run: airflow-freespace
- name: "Pull CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
run: ./scripts/ci/images/ci_pull_ci_image_on_ci.sh
env:
GITHUB_REGISTRY_PULL_IMAGE_TAG: "${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
- name: "Tests: Quarantined"
run: ./scripts/ci/testing/ci_run_quarantined_tests.sh
env:
PR_LABELS: "${{ needs.build-info.outputs.pullRequestLabels }}"
- name: "Upload Quarantine test results"
uses: actions/upload-artifact@v2
if: always()
with:
name: quarantined-tests
path: "files/test_result-*.xml"
retention-days: 7
- name: "Upload airflow logs"
uses: actions/upload-artifact@v2
if: failure()
with:
name: airflow-logs-quarantined-${{ matrix.backend }}
path: "./files/airflow_logs*"
retention-days: 7
- name: "Upload container logs"
uses: actions/upload-artifact@v2
if: failure()
with:
name: container-logs-quarantined-${{ matrix.backend }}
path: "./files/container_logs*"
retention-days: 7
- name: "Upload artifact for coverage"
uses: actions/upload-artifact@v2
if: needs.build-info.outputs.runCoverage == 'true'
with:
name: coverage-quarantined-${{ matrix.backend }}
path: "./files/coverage*.xml"
retention-days: 7
upload-coverage:
timeout-minutes: 15
name: "Upload coverage"
runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
continue-on-error: true
needs:
- build-info
- tests-postgres
- tests-sqlite
- tests-mysql
- tests-mssql
- tests-quarantined
env:
RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
# Only upload coverage on merges to main
if: needs.build-info.outputs.runCoverage == 'true'
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v2
with:
persist-credentials: false
submodules: recursive
- name: "Download all artifacts from the current build"
uses: actions/download-artifact@v2
with:
path: ./coverage-files
- name: "Removes unnecessary artifacts"
run: ls ./coverage-files | grep -v coverage | xargs rm -rf
- name: "Upload all coverage reports to codecov"
uses: ./.github/actions/codecov-action
with:
directory: "./coverage-files"
wait-for-prod-images:
timeout-minutes: 120
name: "Wait for PROD images"
runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
needs: [build-info, wait-for-ci-images, build-prod-images]
if: needs.build-info.outputs.image-build == 'true'
env:
RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
BACKEND: sqlite
PYTHON_MAJOR_MINOR_VERSION: ${{ needs.build-info.outputs.defaultPythonVersion }}
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v2
with:
persist-credentials: false
- name: "Setup python"
uses: actions/setup-python@v2
with:
python-version: ${{ needs.build-info.outputs.defaultPythonVersion }}
cache: 'pip'
cache-dependency-path: ./dev/breeze/setup*
- run: python -m pip install --editable ./dev/breeze/
- name: "Free space"
run: airflow-freespace
- name: "Cache virtualenv environment"
uses: actions/cache@v2
with:
path: '.build/.docker_venv'
key: ${{ runner.os }}-docker-venv-${{ hashFiles('scripts/ci/images/ci_run_docker_tests.py') }}
- name: >
Wait for PROD images
${{ needs.build-info.outputs.pythonVersions }}:${{ env.IMAGE_TAG_FOR_THE_BUILD }}
# We wait for the images to be available either from "build-images.yml' run as pull_request_target
# or from build-prod-image above.
# We are utilising single job to wait for all images because this job merely waits
# For the images to be available and test them.
id: wait-for-images
run: ./scripts/ci/images/ci_wait_for_and_verify_all_prod_images.sh
env:
CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING: >
${{needs.build-info.outputs.pythonVersionsListAsString}}
GITHUB_REGISTRY_PULL_IMAGE_TAG: "${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
VERIFY_IMAGE: "true"
PR_LABELS: "${{ needs.build-info.outputs.pullRequestLabels }}"
tests-kubernetes:
timeout-minutes: 70
name: Helm Chart; ${{matrix.executor}}
runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
needs: [build-info, wait-for-prod-images]
strategy:
matrix:
executor: [KubernetesExecutor, CeleryExecutor, LocalExecutor]
fail-fast: false
env:
RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
BACKEND: postgres
RUN_TESTS: "true"
RUNTIME: "kubernetes"
KUBERNETES_MODE: "image"
EXECUTOR: ${{matrix.executor}}
KIND_VERSION: "${{ needs.build-info.outputs.defaultKindVersion }}"
HELM_VERSION: "${{ needs.build-info.outputs.defaultHelmVersion }}"
CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING: >
${{needs.build-info.outputs.pythonVersionsListAsString}}
CURRENT_KUBERNETES_VERSIONS_AS_STRING: >
${{needs.build-info.outputs.kubernetesVersionsListAsString}}
if: >
( needs.build-info.outputs.run-kubernetes-tests == 'true' ||
needs.build-info.outputs.needs-helm-tests == 'true' ) &&
needs.build-info.outputs.default-branch == 'main'
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v2
with:
persist-credentials: false
- name: "Setup python"
uses: actions/setup-python@v2
with:
python-version: ${{ needs.build-info.outputs.defaultPythonVersion }}
cache: 'pip'
cache-dependency-path: ./dev/breeze/setup*
- run: python -m pip install --editable ./dev/breeze/
- name: "Free space"
run: airflow-freespace
- name: "Get all PROD images: ${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
run: ./scripts/ci/images/ci_wait_for_and_verify_all_prod_images.sh
env:
GITHUB_REGISTRY_PULL_IMAGE_TAG: "${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
VERIFY_IMAGE: "false"
- name: "Cache bin folder with tools for kubernetes testing"
uses: actions/cache@v2
with:
path: ".build/kubernetes-bin"
key: "kubernetes-binaries
-${{ needs.build-info.outputs.defaultKindVersion }}\
-${{ needs.build-info.outputs.defaultHelmVersion }}"
restore-keys: "kubernetes-binaries"
- name: "Kubernetes Tests"
run: ./scripts/ci/kubernetes/ci_setup_clusters_and_run_kubernetes_tests_in_parallel.sh
env:
PR_LABELS: "${{ needs.build-info.outputs.pullRequestLabels }}"
- name: "Upload KinD logs"
uses: actions/upload-artifact@v2
if: failure() || cancelled()
with:
name: >
kind-logs-${{matrix.executor}}
path: /tmp/kind_logs_*
retention-days: 7
tests-helm-executor-upgrade:
timeout-minutes: 100
name: Helm Chart Executor Upgrade
runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
needs: [build-info, wait-for-prod-images]
env:
RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
BACKEND: postgres
RUN_TESTS: "true"
RUNTIME: "kubernetes"
KUBERNETES_MODE: "image"
EXECUTOR: "KubernetesExecutor"
KIND_VERSION: "${{ needs.build-info.outputs.defaultKindVersion }}"
HELM_VERSION: "${{ needs.build-info.outputs.defaultHelmVersion }}"
CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING: >
${{needs.build-info.outputs.pythonVersionsListAsString}}
CURRENT_KUBERNETES_VERSIONS_AS_STRING: >
${{needs.build-info.outputs.kubernetesVersionsListAsString}}
if: >
needs.build-info.outputs.run-kubernetes-tests == 'true' &&
needs.build-info.outputs.default-branch == 'main'
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v2
with:
persist-credentials: false
- name: "Setup python"
uses: actions/setup-python@v2
with:
python-version: ${{ needs.build-info.outputs.defaultPythonVersion }}
cache: 'pip'
cache-dependency-path: ./dev/breeze/setup*
- run: python -m pip install --editable ./dev/breeze/
- name: "Free space"
run: airflow-freespace
- name: "Get all PROD images: ${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
run: ./scripts/ci/images/ci_wait_for_and_verify_all_prod_images.sh
env:
GITHUB_REGISTRY_PULL_IMAGE_TAG: "${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
VERIFY_IMAGE: "false"
- name: "Cache virtualenv for kubernetes testing"
uses: actions/cache@v2
with:
path: ".build/.kubernetes_venv"
key: "kubernetes-${{ needs.build-info.outputs.defaultPythonVersion }}\
-${{needs.build-info.outputs.kubernetesVersionsListAsString}}
-${{needs.build-info.outputs.pythonVersionsListAsString}}
-${{ hashFiles('setup.py','setup.cfg') }}"
restore-keys: "kubernetes-${{ needs.build-info.outputs.defaultPythonVersion }}-\
-${{needs.build-info.outputs.kubernetesVersionsListAsString}}
-${{needs.build-info.outputs.pythonVersionsListAsString}}"
- name: "Cache bin folder with tools for kubernetes testing"
uses: actions/cache@v2
with:
path: ".build/kubernetes-bin"
key: "kubernetes-binaries
-${{ needs.build-info.outputs.defaultKindVersion }}\
-${{ needs.build-info.outputs.defaultHelmVersion }}"
restore-keys: "kubernetes-binaries"
- name: "Kubernetes Helm Chart Executor Upgrade Tests"
run: ./scripts/ci/kubernetes/ci_upgrade_cluster_with_different_executors_in_parallel.sh
env:
PR_LABELS: "${{ needs.build-info.outputs.pullRequestLabels }}"
- name: "Upload KinD logs"
uses: actions/upload-artifact@v2
if: failure() || cancelled()
with:
name: >
kind-logs-KubernetesExecutor
path: /tmp/kind_logs_*
retention-days: 7
constraints:
permissions:
contents: write
timeout-minutes: 40
name: "Constraints"
runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
needs:
- build-info
- wait-for-ci-images
- wait-for-prod-images
- static-checks
- tests-sqlite
- tests-mysql
- tests-mssql
- tests-postgres
env:
RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }}
CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING: ${{needs.build-info.outputs.pythonVersionsListAsString}}
if: needs.build-info.outputs.upgradeToNewerDependencies != 'false'
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v2
with:
persist-credentials: false
submodules: recursive
- name: "Setup python"
uses: actions/setup-python@v2
with:
python-version: ${{ needs.build-info.outputs.defaultPythonVersion }}
cache: 'pip'
cache-dependency-path: ./dev/breeze/setup*
- run: python -m pip install --editable ./dev/breeze/
- name: "Free space"
run: airflow-freespace
- name: >
Wait for CI images
${{ needs.build-info.outputs.pythonVersions }}:${{ env.IMAGE_TAG_FOR_THE_BUILD }}
run: ./scripts/ci/images/ci_wait_for_and_verify_all_ci_images.sh
env:
GITHUB_REGISTRY_PULL_IMAGE_TAG: "${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
VERIFY_IMAGE: "false"
- name: "Generate constraints with PyPI providers"
run: ./scripts/ci/constraints/ci_generate_all_constraints.sh
env:
GENERATE_CONSTRAINTS_MODE: "pypi-providers"
PR_LABELS: "${{ needs.build-info.outputs.pullRequestLabels }}"
- name: "Generate constraints with source providers"
run: ./scripts/ci/constraints/ci_generate_all_constraints.sh
env:
GENERATE_CONSTRAINTS_MODE: "source-providers"
PR_LABELS: "${{ needs.build-info.outputs.pullRequestLabels }}"
- name: "Generate constraints without providers"
run: ./scripts/ci/constraints/ci_generate_all_constraints.sh
env:
GENERATE_CONSTRAINTS_MODE: "no-providers"
PR_LABELS: "${{ needs.build-info.outputs.pullRequestLabels }}"
- name: "Set constraints branch name"
id: constraints-branch
run: ./scripts/ci/constraints/ci_branch_constraints.sh
if: needs.build-info.outputs.mergeRun == 'true'
- name: Checkout ${{ steps.constraints-branch.outputs.branch }}
uses: actions/checkout@v2
if: needs.build-info.outputs.mergeRun == 'true'
with:
path: "repo"
ref: ${{ steps.constraints-branch.outputs.branch }}
persist-credentials: false
- name: "Commit changed constraint files for ${{needs.build-info.outputs.pythonVersions}}"
run: ./scripts/ci/constraints/ci_commit_constraints.sh
if: needs.build-info.outputs.mergeRun == 'true'
- name: "Push changes"
uses: ./.github/actions/github-push-action
if: needs.build-info.outputs.mergeRun == 'true'
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
branch: ${{ steps.constraints-branch.outputs.branch }}
directory: "repo"
# Push BuildX cache to GitHub Registry in Apache repository, if all tests are successful and build
# is executed as result of direct push to "main" or one of the "vX-Y-test" branches
# It rebuilds all images using just-pushed constraints using buildx and pushes them to registry
# It will automatically check if a new python image was released and will pull the latest one if needed
push-buildx-cache-to-github-registry:
permissions:
packages: write
timeout-minutes: 120
name: "Push images as cache to GitHub Registry"
runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
needs:
- build-info
- constraints
- docs
if: needs.build-info.outputs.mergeRun == 'true'
strategy:
matrix:
python-version: ${{ fromJson(needs.build-info.outputs.pythonVersions) }}
env:
RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }}
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v2
with:
persist-credentials: false
- name: "Setup python"
uses: actions/setup-python@v2
with:
python-version: ${{ needs.build-info.outputs.defaultPythonVersion }}
cache: 'pip'
cache-dependency-path: ./dev/breeze/setup*
- run: python -m pip install --editable ./dev/breeze/
- name: "Free space"
run: airflow-freespace
- name: "Start ARM instance"
run: ./scripts/ci/images/ci_start_arm_instance_and_connect_to_docker.sh
- name: "Build CI image cache and push ${{env.PYTHON_MAJOR_MINOR_VERSION}}"
run: ./scripts/ci/images/ci_build_ci_image_on_ci.sh
env:
GITHUB_REGISTRY_PULL_IMAGE_TAG: "latest"
GITHUB_REGISTRY_PUSH_IMAGE_TAG: "latest"
UPGRADE_TO_NEWER_DEPENDENCIES: "false"
PLATFORM: "linux/amd64,linux/arm64"
PREPARE_BUILDX_CACHE: "true"
- name: "Pull CI image for PROD build: ${{ matrix.python-version }}:${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
run: ./scripts/ci/images/ci_pull_ci_image_on_ci.sh
env:
GITHUB_REGISTRY_PULL_IMAGE_TAG: "${{ env.IMAGE_TAG_FOR_THE_BUILD }}"
- name: "Build PROD image cache and push ${{env.PYTHON_MAJOR_MINOR_VERSION}}"
run: ./scripts/ci/images/ci_build_prod_image_on_ci.sh
env:
GITHUB_REGISTRY_PULL_IMAGE_TAG: "latest"
GITHUB_REGISTRY_PUSH_IMAGE_TAG: "latest"
UPGRADE_TO_NEWER_DEPENDENCIES: "false"
PLATFORM: "linux/amd64,linux/arm64"
PREPARE_BUILDX_CACHE: "true"
VERSION_SUFFIX_FOR_PYPI: ".dev0"
- name: "Stop ARM instance"
run: ./scripts/ci/images/ci_stop_arm_instance.sh
if: always()