diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000..0fb21896 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,35 @@ +# https://editorconfig.org +root = true + +[*] +charset = utf-8 +end_of_line = lf +indent_style = space +insert_final_newline = true +trim_trailing_whitespace = true + +[*.sh] +binary_next_line = true +# We sadly have to use tabs in shell scripts otherwise we can't indent here documents: +# https://www.gnu.org/software/bash/manual/html_node/Redirections.html#Here-Documents +indent_style = tab +shell_variant = bash +switch_case_indent = true + +# Catches scripts that we can't give a .sh file extension, such as the Buildpack API scripts. +[**/bin/**] +binary_next_line = true +indent_style = tab +shell_variant = bash +switch_case_indent = true + +[.hatchet/repos/**] +ignore = true + +# The setup-ruby GitHub Action creates this directory when caching is enabled, and if +# its not ignored will cause false positives when running shfmt in the CI lint job. +[vendor/bundle/**] +ignore = true + +[Makefile] +indent_style = tab diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d7f97513..56417d09 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -10,6 +10,10 @@ on: permissions: contents: read +env: + # Used by shfmt and more. + FORCE_COLOR: 1 + jobs: lint: runs-on: ubuntu-24.04 @@ -23,6 +27,10 @@ jobs: ruby-version: "3.3" - name: Run ShellCheck run: make lint-scripts + - name: Run shfmt + uses: docker://mvdan/shfmt:latest + with: + args: "--diff ." - name: Run Rubocop run: bundle exec rubocop diff --git a/Makefile b/Makefile index 1b4741ed..009bffae 100644 --- a/Makefile +++ b/Makefile @@ -1,5 +1,5 @@ # These targets are not files -.PHONY: lint lint-scripts lint-ruby run publish +.PHONY: lint lint-scripts lint-ruby check-format format run publish STACK ?= heroku-24 FIXTURE ?= spec/fixtures/python_version_unspecified @@ -7,7 +7,7 @@ FIXTURE ?= spec/fixtures/python_version_unspecified # Converts a stack name of `heroku-NN` to its build Docker image tag of `heroku/heroku:NN-build`. STACK_IMAGE_TAG := heroku/$(subst -,:,$(STACK))-build -lint: lint-scripts lint-ruby +lint: lint-scripts check-format lint-ruby lint-scripts: @git ls-files -z --cached --others --exclude-standard 'bin/*' '*/bin/*' '*.sh' | xargs -0 shellcheck --check-sourced --color=always @@ -15,6 +15,12 @@ lint-scripts: lint-ruby: @bundle exec rubocop +check-format: + @shfmt --diff . + +format: + @shfmt --write --list . + run: @echo "Running buildpack using: STACK=$(STACK) FIXTURE=$(FIXTURE)" @docker run --rm -it -v $(PWD):/src:ro --tmpfs /app -e "HOME=/app" -e "STACK=$(STACK)" "$(STACK_IMAGE_TAG)" \ diff --git a/bin/compile b/bin/compile index f9c219c0..c8942020 100755 --- a/bin/compile +++ b/bin/compile @@ -103,16 +103,16 @@ mkdir -p "$CACHE_DIR/.heroku" mkdir -p .heroku # The Python installation. -cp -R "$CACHE_DIR/.heroku/python" .heroku/ &> /dev/null || true +cp -R "$CACHE_DIR/.heroku/python" .heroku/ &>/dev/null || true # A plain text file which contains the current stack being used (used for cache busting). -cp -R "$CACHE_DIR/.heroku/python-stack" .heroku/ &> /dev/null || true +cp -R "$CACHE_DIR/.heroku/python-stack" .heroku/ &>/dev/null || true # A plain text file which contains the current python version being used (used for cache busting). -cp -R "$CACHE_DIR/.heroku/python-version" .heroku/ &> /dev/null || true +cp -R "$CACHE_DIR/.heroku/python-version" .heroku/ &>/dev/null || true # A plain text file which contains the current sqlite3 version being used (used for cache busting). -cp -R "$CACHE_DIR/.heroku/python-sqlite3-version" .heroku/ &> /dev/null || true +cp -R "$CACHE_DIR/.heroku/python-sqlite3-version" .heroku/ &>/dev/null || true # "editable" installations of code repositories, via pip or pipenv. if [[ -d "$CACHE_DIR/.heroku/src" ]]; then - cp -R "$CACHE_DIR/.heroku/src" .heroku/ &> /dev/null || true + cp -R "$CACHE_DIR/.heroku/src" .heroku/ &>/dev/null || true fi # The pre_compile hook. Customers rely on this. Don't remove it. @@ -124,47 +124,47 @@ source "${BUILDPACK_DIR}/bin/steps/hooks/pre_compile" # Sticky runtimes. If there was a previous build, and it used a given version of Python, # continue to use that version of Python in perpetuity. if [[ -f "$CACHE_DIR/.heroku/python-version" ]]; then - CACHED_PYTHON_VERSION=$(cat "$CACHE_DIR/.heroku/python-version") + CACHED_PYTHON_VERSION=$(cat "$CACHE_DIR/.heroku/python-version") fi # We didn't always record the stack version. This code is in place because of that. if [[ -f "$CACHE_DIR/.heroku/python-stack" ]]; then - CACHED_PYTHON_STACK=$(cat "$CACHE_DIR/.heroku/python-stack") + CACHED_PYTHON_STACK=$(cat "$CACHE_DIR/.heroku/python-stack") else - CACHED_PYTHON_STACK=$STACK + CACHED_PYTHON_STACK=$STACK fi # TODO: Move this into a new package manager handling implementation when adding Poetry support. # We intentionally don't mention `setup.py` here since it's being removed soon. if [[ ! -f requirements.txt && ! -f Pipfile && ! -f setup.py ]]; then - puts-warn - puts-warn "Error: Couldn't find any supported Python package manager files." - puts-warn - puts-warn "A Python app on Heroku must have either a 'requirements.txt' or" - puts-warn "'Pipfile' package manager file in the root directory of its" - puts-warn "source code." - puts-warn - puts-warn "Currently the root directory of your app contains:" - puts-warn - # TODO: Overhaul logging helpers so they can handle prefixing multi-line strings, and switch to them. - # shellcheck disable=SC2012 # Using `ls` instead of `find` is absolutely fine for this use case. - ls -1 --indicator-style=slash "${BUILD_DIR}" | sed 's/^/ ! /' - puts-warn - puts-warn "If your app already has a package manager file, check that it:" - puts-warn - puts-warn "1. Is in the top level directory (not a subdirectory)." - puts-warn "2. Has the correct spelling (the filenames are case-sensitive)." - puts-warn "3. Isn't listed in '.gitignore' or '.slugignore'." - puts-warn - puts-warn "Otherwise, add a package manager file to your app. If your app has" - puts-warn "no dependencies, then create an empty 'requirements.txt' file." - puts-warn - puts-warn "For help with using Python on Heroku, see:" - puts-warn "https://devcenter.heroku.com/articles/getting-started-with-python" - puts-warn "https://devcenter.heroku.com/articles/python-support" - puts-warn - meta_set "failure_reason" "package-manager-not-found" - exit 1 + puts-warn + puts-warn "Error: Couldn't find any supported Python package manager files." + puts-warn + puts-warn "A Python app on Heroku must have either a 'requirements.txt' or" + puts-warn "'Pipfile' package manager file in the root directory of its" + puts-warn "source code." + puts-warn + puts-warn "Currently the root directory of your app contains:" + puts-warn + # TODO: Overhaul logging helpers so they can handle prefixing multi-line strings, and switch to them. + # shellcheck disable=SC2012 # Using `ls` instead of `find` is absolutely fine for this use case. + ls -1 --indicator-style=slash "${BUILD_DIR}" | sed 's/^/ ! /' + puts-warn + puts-warn "If your app already has a package manager file, check that it:" + puts-warn + puts-warn "1. Is in the top level directory (not a subdirectory)." + puts-warn "2. Has the correct spelling (the filenames are case-sensitive)." + puts-warn "3. Isn't listed in '.gitignore' or '.slugignore'." + puts-warn + puts-warn "Otherwise, add a package manager file to your app. If your app has" + puts-warn "no dependencies, then create an empty 'requirements.txt' file." + puts-warn + puts-warn "For help with using Python on Heroku, see:" + puts-warn "https://devcenter.heroku.com/articles/getting-started-with-python" + puts-warn "https://devcenter.heroku.com/articles/python-support" + puts-warn + meta_set "failure_reason" "package-manager-not-found" + exit 1 fi # Pipenv Python version support. @@ -173,21 +173,21 @@ fi source "${BUILDPACK_DIR}/bin/steps/pipenv-python-version" if [[ -f runtime.txt ]]; then - # PYTHON_VERSION_SOURCE may have already been set by the pipenv-python-version step. - # TODO: Refactor this and stop pipenv-python-version using runtime.txt as an API. - PYTHON_VERSION_SOURCE=${PYTHON_VERSION_SOURCE:-"runtime.txt"} - puts-step "Using Python version specified in ${PYTHON_VERSION_SOURCE}" - meta_set "python_version_reason" "specified" + # PYTHON_VERSION_SOURCE may have already been set by the pipenv-python-version step. + # TODO: Refactor this and stop pipenv-python-version using runtime.txt as an API. + PYTHON_VERSION_SOURCE=${PYTHON_VERSION_SOURCE:-"runtime.txt"} + puts-step "Using Python version specified in ${PYTHON_VERSION_SOURCE}" + meta_set "python_version_reason" "specified" elif [[ -n "${CACHED_PYTHON_VERSION:-}" ]]; then - puts-step "No Python version was specified. Using the same version as the last build: ${CACHED_PYTHON_VERSION}" - echo " To use a different version, see: https://devcenter.heroku.com/articles/python-runtimes" - meta_set "python_version_reason" "cached" - echo "${CACHED_PYTHON_VERSION}" > runtime.txt + puts-step "No Python version was specified. Using the same version as the last build: ${CACHED_PYTHON_VERSION}" + echo " To use a different version, see: https://devcenter.heroku.com/articles/python-runtimes" + meta_set "python_version_reason" "cached" + echo "${CACHED_PYTHON_VERSION}" >runtime.txt else - puts-step "No Python version was specified. Using the buildpack default: ${DEFAULT_PYTHON_VERSION}" - echo " To use a different version, see: https://devcenter.heroku.com/articles/python-runtimes" - meta_set "python_version_reason" "default" - echo "${DEFAULT_PYTHON_VERSION}" > runtime.txt + puts-step "No Python version was specified. Using the buildpack default: ${DEFAULT_PYTHON_VERSION}" + echo " To use a different version, see: https://devcenter.heroku.com/articles/python-runtimes" + meta_set "python_version_reason" "default" + echo "${DEFAULT_PYTHON_VERSION}" >runtime.txt fi # Create the directory for .profile.d, if it doesn't exist. @@ -201,11 +201,11 @@ mkdir -p /app/.heroku/src # This is (hopefully obviously) because apps end up running from `/app` in production. # Realpath is used to support use-cases where one of the locations is a symlink to the other. if [[ "$(realpath "${BUILD_DIR}")" != "$(realpath /app)" ]]; then - # python expects to reside in /app, so set up symlinks - # we will not remove these later so subsequent buildpacks can still invoke it - ln -nsf "$BUILD_DIR/.heroku/python" /app/.heroku/python - ln -nsf "$BUILD_DIR/.heroku/vendor" /app/.heroku/vendor - # Note: .heroku/src is copied in later. + # python expects to reside in /app, so set up symlinks + # we will not remove these later so subsequent buildpacks can still invoke it + ln -nsf "$BUILD_DIR/.heroku/python" /app/.heroku/python + ln -nsf "$BUILD_DIR/.heroku/vendor" /app/.heroku/vendor + # Note: .heroku/src is copied in later. fi # Download / Install Python, from pre-build binaries available on Amazon S3. @@ -221,10 +221,10 @@ source "${BUILDPACK_DIR}/bin/steps/pipenv" # This allows for people to ship a setup.py application to Heroku if [[ ! -f requirements.txt ]] && [[ ! -f Pipfile ]]; then - meta_set "setup_py_only" "true" - echo "-e ." > requirements.txt + meta_set "setup_py_only" "true" + echo "-e ." >requirements.txt else - meta_set "setup_py_only" "false" + meta_set "setup_py_only" "false" fi # SQLite3 support. @@ -251,11 +251,10 @@ meta_time "nltk_downloader_duration" "${nltk_downloader_start_time}" # In CI, $BUILD_DIR is /app. # Realpath is used to support use-cases where one of the locations is a symlink to the other. if [[ "$(realpath "${BUILD_DIR}")" != "$(realpath /app)" ]]; then - rm -rf "$BUILD_DIR/.heroku/src" - deep-cp /app/.heroku/src "$BUILD_DIR/.heroku/src" + rm -rf "$BUILD_DIR/.heroku/src" + deep-cp /app/.heroku/src "$BUILD_DIR/.heroku/src" fi - # Django collectstatic support. # The buildpack automatically runs collectstatic for Django applications. # This is the cause for the majority of build failures on the Python platform. @@ -265,7 +264,6 @@ collectstatic_start_time=$(nowms) sub_env "${BUILDPACK_DIR}/bin/steps/collectstatic" meta_time "django_collectstatic_duration" "${collectstatic_start_time}" - # Programmatically create .profile.d script for application runtime environment variables. # Set the PATH to include Python / pip / pipenv / etc. @@ -286,7 +284,7 @@ set_default_env PYTHONPATH "\$HOME" # Python expects to be in /app, if at runtime, it is not, set # up symlinks… this can occur when the subdir buildpack is used. -cat <> "$PROFILE_PATH" +cat <>"$PROFILE_PATH" if [[ \$HOME != "/app" ]]; then mkdir -p /app/.heroku ln -nsf "\$HOME/.heroku/python" /app/.heroku/python @@ -298,7 +296,7 @@ EOT # (such as `/tmp/build_`) back to `/app`. This is not done during the build itself, since later # buildpacks still need the build time paths. if [[ "${BUILD_DIR}" != "/app" ]]; then - cat <> "$PROFILE_PATH" + cat <>"$PROFILE_PATH" find .heroku/python/lib/python*/site-packages/ -type f -and \( -name '*.egg-link' -or -name '*.pth' -or -name '__editable___*_finder.py' \) -exec sed -i -e 's#${BUILD_DIR}#/app#' {} \+ EOT fi @@ -320,9 +318,9 @@ rm -rf "$CACHE_DIR/.heroku/src" mkdir -p "$CACHE_DIR/.heroku" cp -R .heroku/python "$CACHE_DIR/.heroku/" cp -R .heroku/python-version "$CACHE_DIR/.heroku/" -cp -R .heroku/python-stack "$CACHE_DIR/.heroku/" &> /dev/null || true +cp -R .heroku/python-stack "$CACHE_DIR/.heroku/" &>/dev/null || true if [[ -d .heroku/src ]]; then - cp -R .heroku/src "$CACHE_DIR/.heroku/" &> /dev/null || true + cp -R .heroku/src "$CACHE_DIR/.heroku/" &>/dev/null || true fi meta_time "total_duration" "${compile_start_time}" diff --git a/bin/detect b/bin/detect index d7b27069..498792ba 100755 --- a/bin/detect +++ b/bin/detect @@ -13,27 +13,27 @@ BUILD_DIR="${1}" # so that Python projects that are missing some of the required files still pass detection, # allowing us to show a helpful error message during the build phase. KNOWN_PYTHON_PROJECT_FILES=( - .python-version - app.py - main.py - manage.py - pdm.lock - Pipfile - Pipfile.lock - poetry.lock - pyproject.toml - requirements.txt - runtime.txt - setup.cfg - setup.py - uv.lock + .python-version + app.py + main.py + manage.py + pdm.lock + Pipfile + Pipfile.lock + poetry.lock + pyproject.toml + requirements.txt + runtime.txt + setup.cfg + setup.py + uv.lock ) for filename in "${KNOWN_PYTHON_PROJECT_FILES[@]}"; do - if [[ -f "${BUILD_DIR}/${filename}" ]]; then - echo "Python" - exit 0 - fi + if [[ -f "${BUILD_DIR}/${filename}" ]]; then + echo "Python" + exit 0 + fi done # Cytokine incorrectly indents the first line, so we have to leave it empty. @@ -43,7 +43,7 @@ echo 1>&2 # since during compile the build will still require a package manager file, so it # makes sense to describe the stricter requirements up front. # TODO: Overhaul logging helpers so they can handle prefixing multi-line strings, and switch to them. -sed 's/^/ ! /' 1>&2 << EOF +sed 's/^/ ! /' 1>&2 </dev/null; then - # Determine pysqlite3 usage since it's the only package that requires the sqlite3 headers. - if pip show pysqlite3 &>/dev/null; then - kv_pair pysqlite3_installed true - else - kv_pair pysqlite3_installed false - fi + # Determine pysqlite3 usage since it's the only package that requires the sqlite3 headers. + if pip show pysqlite3 &>/dev/null; then + kv_pair pysqlite3_installed true + else + kv_pair pysqlite3_installed false + fi - if pip show pysqlite3-binary &>/dev/null; then - kv_pair pysqlite3_binary_installed true - else - kv_pair pysqlite3_binary_installed false - fi + if pip show pysqlite3-binary &>/dev/null; then + kv_pair pysqlite3_binary_installed true + else + kv_pair pysqlite3_binary_installed false + fi fi diff --git a/bin/steps/collectstatic b/bin/steps/collectstatic index 5731408a..41b33fe0 100755 --- a/bin/steps/collectstatic +++ b/bin/steps/collectstatic @@ -22,21 +22,21 @@ source "${BUILDPACK_DIR}/lib/metadata.sh" meta_init "${CACHE_DIR}" "python" if [[ -f .heroku/collectstatic_disabled ]]; then - puts-step "Skipping Django collectstatic since the file '.heroku/collectstatic_disabled' exists." - puts-warn "This approach is deprecated, please set the env var DISABLE_COLLECTSTATIC=1 instead." - meta_set "django_collectstatic" "disabled-file" - exit 0 + puts-step "Skipping Django collectstatic since the file '.heroku/collectstatic_disabled' exists." + puts-warn "This approach is deprecated, please set the env var DISABLE_COLLECTSTATIC=1 instead." + meta_set "django_collectstatic" "disabled-file" + exit 0 fi if [[ "${DISABLE_COLLECTSTATIC:-0}" != "0" ]]; then - puts-step "Skipping Django collectstatic since the env var DISABLE_COLLECTSTATIC is set." - meta_set "django_collectstatic" "disabled-env-var" - exit 0 + puts-step "Skipping Django collectstatic since the env var DISABLE_COLLECTSTATIC is set." + meta_set "django_collectstatic" "disabled-env-var" + exit 0 fi # Ensure that Django is actually installed. if ! is_module_available 'django'; then - exit 0 + exit 0 fi # Location of 'manage.py', if it exists. @@ -44,9 +44,9 @@ MANAGE_FILE=$(find . -maxdepth 3 -type f -name 'manage.py' -printf '%d\t%P\n' | MANAGE_FILE=${MANAGE_FILE:-fakepath} if [[ ! -f "${MANAGE_FILE}" ]]; then - puts-step "Skipping Django collectstatic since no manage.py file found." - meta_set "django_collectstatic" "skipped-no-manage-py" - exit 0 + puts-step "Skipping Django collectstatic since no manage.py file found." + meta_set "django_collectstatic" "skipped-no-manage-py" + exit 0 fi meta_set "django_collectstatic" "enabled" @@ -65,20 +65,20 @@ set -e echo if [[ "${COLLECTSTATIC_STATUS}" == 0 ]]; then - exit 0 + exit 0 fi # Display a warning if collectstatic failed. if grep -q 'SyntaxError' "$COLLECTSTATIC_LOG"; then - meta_set "failure_reason" "collectstatic-syntax-error" + meta_set "failure_reason" "collectstatic-syntax-error" elif grep -q 'ImproperlyConfigured' "$COLLECTSTATIC_LOG"; then - meta_set "failure_reason" "collectstatic-improper-configuration" + meta_set "failure_reason" "collectstatic-improper-configuration" elif grep -q 'The CSS file' "$COLLECTSTATIC_LOG"; then - meta_set "failure_reason" "collectstatic-fancy-references" + meta_set "failure_reason" "collectstatic-fancy-references" elif grep -q 'OSError' "$COLLECTSTATIC_LOG"; then - meta_set "failure_reason" "collectstatic-missing-file" + meta_set "failure_reason" "collectstatic-missing-file" else - meta_set "failure_reason" "collectstatic-other" + meta_set "failure_reason" "collectstatic-other" fi echo " ! Error while running '$ python $MANAGE_FILE collectstatic --noinput'." @@ -93,10 +93,10 @@ echo " https://devcenter.heroku.com/articles/django-assets" # Additionally, dump out the environment, if debug mode is on. if [[ -n "$DEBUG_COLLECTSTATIC" ]]; then - echo - echo "****** Collectstatic environment variables:" - echo - env | indent + echo + echo "****** Collectstatic environment variables:" + echo + env | indent fi exit 1 diff --git a/bin/steps/hooks/post_compile b/bin/steps/hooks/post_compile index 111491ee..7a20ed0a 100755 --- a/bin/steps/hooks/post_compile +++ b/bin/steps/hooks/post_compile @@ -1,12 +1,12 @@ #!/usr/bin/env bash if [[ -f bin/post_compile ]]; then - post_compile_hook_start_time=$(nowms) - meta_set "post_compile_hook" "true" - echo "-----> Running post-compile hook" - chmod +x bin/post_compile - sub_env bin/post_compile - meta_time "post_compile_hook_duration" "${post_compile_hook_start_time}" + post_compile_hook_start_time=$(nowms) + meta_set "post_compile_hook" "true" + echo "-----> Running post-compile hook" + chmod +x bin/post_compile + sub_env bin/post_compile + meta_time "post_compile_hook_duration" "${post_compile_hook_start_time}" else - meta_set "post_compile_hook" "false" + meta_set "post_compile_hook" "false" fi diff --git a/bin/steps/hooks/pre_compile b/bin/steps/hooks/pre_compile index c22eb110..c5be04ce 100755 --- a/bin/steps/hooks/pre_compile +++ b/bin/steps/hooks/pre_compile @@ -1,12 +1,12 @@ #!/usr/bin/env bash if [[ -f bin/pre_compile ]]; then - pre_compile_hook_start_time=$(nowms) - meta_set "pre_compile_hook" "true" - echo "-----> Running pre-compile hook" - chmod +x bin/pre_compile - sub_env bin/pre_compile - meta_time "pre_compile_hook_duration" "${pre_compile_hook_start_time}" + pre_compile_hook_start_time=$(nowms) + meta_set "pre_compile_hook" "true" + echo "-----> Running pre-compile hook" + chmod +x bin/pre_compile + sub_env bin/pre_compile + meta_time "pre_compile_hook_duration" "${pre_compile_hook_start_time}" else - meta_set "pre_compile_hook" "false" + meta_set "pre_compile_hook" "false" fi diff --git a/bin/steps/nltk b/bin/steps/nltk index f04e60cd..f5f939ef 100755 --- a/bin/steps/nltk +++ b/bin/steps/nltk @@ -17,22 +17,22 @@ EXPORT_PATH="${BUILDPACK_DIR}/export" # Check that nltk was installed by pip, otherwise obviously not needed if is_module_available 'nltk'; then - puts-step "Downloading NLTK corpora..." + puts-step "Downloading NLTK corpora..." - nltk_packages_definition="$BUILD_DIR/nltk.txt" + nltk_packages_definition="$BUILD_DIR/nltk.txt" - if [[ -f "$nltk_packages_definition" ]]; then - meta_set "nltk_downloader" "enabled" + if [[ -f "$nltk_packages_definition" ]]; then + meta_set "nltk_downloader" "enabled" - readarray -t nltk_packages < "$nltk_packages_definition" - puts-step "Downloading NLTK packages: ${nltk_packages[*]}" + readarray -t nltk_packages <"$nltk_packages_definition" + puts-step "Downloading NLTK packages: ${nltk_packages[*]}" - python -m nltk.downloader -d "$BUILD_DIR/.heroku/python/nltk_data" "${nltk_packages[@]}" | indent - set_env NLTK_DATA "/app/.heroku/python/nltk_data" + python -m nltk.downloader -d "$BUILD_DIR/.heroku/python/nltk_data" "${nltk_packages[@]}" | indent + set_env NLTK_DATA "/app/.heroku/python/nltk_data" - else - meta_set "nltk_downloader" "skipped-no-nltk-file" - puts-warn "'nltk.txt' not found, not downloading any corpora" - puts-warn "Learn more: https://devcenter.heroku.com/articles/python-nltk" - fi + else + meta_set "nltk_downloader" "skipped-no-nltk-file" + puts-warn "'nltk.txt' not found, not downloading any corpora" + puts-warn "Learn more: https://devcenter.heroku.com/articles/python-nltk" + fi fi diff --git a/bin/steps/pip-install b/bin/steps/pip-install index e180e649..86ecfd0d 100755 --- a/bin/steps/pip-install +++ b/bin/steps/pip-install @@ -1,44 +1,44 @@ #!/usr/bin/env bash if [[ -z "$SKIP_PIP_INSTALL" ]]; then - pip_install_start_time=$(nowms) - meta_set "package_manager" "pip" - - puts-step "Installing requirements with pip" - - # Set Pip env vars - # This reads certain environment variables set on the Heroku app config - # and makes them accessible to the pip install process. - # - # PIP_EXTRA_INDEX_URL allows for an alternate pypi URL to be used. - if [[ -r "$ENV_DIR/PIP_EXTRA_INDEX_URL" ]]; then - PIP_EXTRA_INDEX_URL="$(cat "$ENV_DIR/PIP_EXTRA_INDEX_URL")" - export PIP_EXTRA_INDEX_URL - fi - - set +e - - /app/.heroku/python/bin/pip install -r requirements.txt --exists-action=w --src='/app/.heroku/src' --disable-pip-version-check --no-cache-dir --progress-bar off 2>&1 | tee "$WARNINGS_LOG" | cleanup | indent - PIP_STATUS="${PIPESTATUS[0]}" - set -e - - show-warnings - - if [[ ! $PIP_STATUS -eq 0 ]]; then - meta_set "failure_reason" "pip-install" - exit 1 - fi - - cp requirements.txt .heroku/python/requirements-declared.txt - /app/.heroku/python/bin/pip freeze --disable-pip-version-check > .heroku/python/requirements-installed.txt - - # Install test dependencies, for CI. - if [[ -n "$INSTALL_TEST" ]]; then - if [[ -f requirements-test.txt ]]; then - puts-step "Installing test dependencies..." - /app/.heroku/python/bin/pip install -r requirements-test.txt --exists-action=w --src='/app/.heroku/src' --disable-pip-version-check --no-cache-dir 2>&1 | cleanup | indent - fi - fi - - meta_time "dependencies_install_duration" "${pip_install_start_time}" + pip_install_start_time=$(nowms) + meta_set "package_manager" "pip" + + puts-step "Installing requirements with pip" + + # Set Pip env vars + # This reads certain environment variables set on the Heroku app config + # and makes them accessible to the pip install process. + # + # PIP_EXTRA_INDEX_URL allows for an alternate pypi URL to be used. + if [[ -r "$ENV_DIR/PIP_EXTRA_INDEX_URL" ]]; then + PIP_EXTRA_INDEX_URL="$(cat "$ENV_DIR/PIP_EXTRA_INDEX_URL")" + export PIP_EXTRA_INDEX_URL + fi + + set +e + + /app/.heroku/python/bin/pip install -r requirements.txt --exists-action=w --src='/app/.heroku/src' --disable-pip-version-check --no-cache-dir --progress-bar off 2>&1 | tee "$WARNINGS_LOG" | cleanup | indent + PIP_STATUS="${PIPESTATUS[0]}" + set -e + + show-warnings + + if [[ ! $PIP_STATUS -eq 0 ]]; then + meta_set "failure_reason" "pip-install" + exit 1 + fi + + cp requirements.txt .heroku/python/requirements-declared.txt + /app/.heroku/python/bin/pip freeze --disable-pip-version-check >.heroku/python/requirements-installed.txt + + # Install test dependencies, for CI. + if [[ -n "$INSTALL_TEST" ]]; then + if [[ -f requirements-test.txt ]]; then + puts-step "Installing test dependencies..." + /app/.heroku/python/bin/pip install -r requirements-test.txt --exists-action=w --src='/app/.heroku/src' --disable-pip-version-check --no-cache-dir 2>&1 | cleanup | indent + fi + fi + + meta_time "dependencies_install_duration" "${pip_install_start_time}" fi diff --git a/bin/steps/pipenv b/bin/steps/pipenv index 1d97941c..6974ae2d 100755 --- a/bin/steps/pipenv +++ b/bin/steps/pipenv @@ -11,46 +11,46 @@ rm -f .heroku/python/Pipfile.lock.sha256 if [[ -f Pipfile ]]; then - pipenv_install_start_time=$(nowms) - meta_set "package_manager" "pipenv" - - # Skip installing dependencies using pip later. - # TODO: Stop leaking this env var into subshells such as post_compile hooks. - export SKIP_PIP_INSTALL=1 - - # Set Pip env vars - # This reads certain environment variables set on the Heroku app config - # and makes them accessible to the pip install process. - # - # PIP_EXTRA_INDEX_URL allows for an alternate pypi URL to be used. - if [[ -r "$ENV_DIR/PIP_EXTRA_INDEX_URL" ]]; then - PIP_EXTRA_INDEX_URL="$(cat "$ENV_DIR/PIP_EXTRA_INDEX_URL")" - export PIP_EXTRA_INDEX_URL - fi - - PIPENV_VERSION=$(get_requirement_version 'pipenv') - meta_set "pipenv_version" "${PIPENV_VERSION}" - - /app/.heroku/python/bin/pip install --quiet --disable-pip-version-check --no-cache-dir "pipenv==${PIPENV_VERSION}" - - # Install the test dependencies, for CI. - # TODO: This is currently inconsistent with the non-test path, since it assumes (but doesn't check for) a lockfile. - if [[ -n "$INSTALL_TEST" ]]; then - meta_set "pipenv_has_lockfile" "true" - puts-step "Installing test dependencies" - /app/.heroku/python/bin/pipenv install --dev --system --deploy --extra-pip-args='--src=/app/.heroku/src' 2>&1 | cleanup | indent - - # Install the dependencies. - elif [[ ! -f Pipfile.lock ]]; then - meta_set "pipenv_has_lockfile" "false" - puts-step "Installing dependencies with Pipenv ${PIPENV_VERSION}" - /app/.heroku/python/bin/pipenv install --system --skip-lock --extra-pip-args='--src=/app/.heroku/src' 2>&1 | indent - - else - meta_set "pipenv_has_lockfile" "true" - puts-step "Installing dependencies with Pipenv ${PIPENV_VERSION}" - /app/.heroku/python/bin/pipenv install --system --deploy --extra-pip-args='--src=/app/.heroku/src' 2>&1 | indent - fi - - meta_time "dependencies_install_duration" "${pipenv_install_start_time}" + pipenv_install_start_time=$(nowms) + meta_set "package_manager" "pipenv" + + # Skip installing dependencies using pip later. + # TODO: Stop leaking this env var into subshells such as post_compile hooks. + export SKIP_PIP_INSTALL=1 + + # Set Pip env vars + # This reads certain environment variables set on the Heroku app config + # and makes them accessible to the pip install process. + # + # PIP_EXTRA_INDEX_URL allows for an alternate pypi URL to be used. + if [[ -r "$ENV_DIR/PIP_EXTRA_INDEX_URL" ]]; then + PIP_EXTRA_INDEX_URL="$(cat "$ENV_DIR/PIP_EXTRA_INDEX_URL")" + export PIP_EXTRA_INDEX_URL + fi + + PIPENV_VERSION=$(get_requirement_version 'pipenv') + meta_set "pipenv_version" "${PIPENV_VERSION}" + + /app/.heroku/python/bin/pip install --quiet --disable-pip-version-check --no-cache-dir "pipenv==${PIPENV_VERSION}" + + # Install the test dependencies, for CI. + # TODO: This is currently inconsistent with the non-test path, since it assumes (but doesn't check for) a lockfile. + if [[ -n "$INSTALL_TEST" ]]; then + meta_set "pipenv_has_lockfile" "true" + puts-step "Installing test dependencies" + /app/.heroku/python/bin/pipenv install --dev --system --deploy --extra-pip-args='--src=/app/.heroku/src' 2>&1 | cleanup | indent + + # Install the dependencies. + elif [[ ! -f Pipfile.lock ]]; then + meta_set "pipenv_has_lockfile" "false" + puts-step "Installing dependencies with Pipenv ${PIPENV_VERSION}" + /app/.heroku/python/bin/pipenv install --system --skip-lock --extra-pip-args='--src=/app/.heroku/src' 2>&1 | indent + + else + meta_set "pipenv_has_lockfile" "true" + puts-step "Installing dependencies with Pipenv ${PIPENV_VERSION}" + /app/.heroku/python/bin/pipenv install --system --deploy --extra-pip-args='--src=/app/.heroku/src' 2>&1 | indent + fi + + meta_time "dependencies_install_duration" "${pipenv_install_start_time}" fi diff --git a/bin/steps/pipenv-python-version b/bin/steps/pipenv-python-version index 9adba900..b6614cf9 100755 --- a/bin/steps/pipenv-python-version +++ b/bin/steps/pipenv-python-version @@ -4,60 +4,59 @@ if [[ -f $BUILD_DIR/Pipfile ]]; then - if [[ ! -f $BUILD_DIR/runtime.txt ]]; then - if [[ ! -f $BUILD_DIR/Pipfile.lock ]]; then - puts-warn "No 'Pipfile.lock' found! We recommend you commit this into your repository." - fi - if [[ -f $BUILD_DIR/Pipfile.lock ]]; then - # Ignore unused env var warning since this is used by bin/compile. - # shellcheck disable=2034 - PYTHON_VERSION_SOURCE='Pipfile.lock' - set +e - PYTHON=$(jq -r '._meta.requires.python_full_version' "$BUILD_DIR/Pipfile.lock") - if [[ "$PYTHON" != "null" ]]; then - echo "python-$PYTHON" > "$BUILD_DIR/runtime.txt" - fi - set -e + if [[ ! -f $BUILD_DIR/runtime.txt ]]; then + if [[ ! -f $BUILD_DIR/Pipfile.lock ]]; then + puts-warn "No 'Pipfile.lock' found! We recommend you commit this into your repository." + fi + if [[ -f $BUILD_DIR/Pipfile.lock ]]; then + # Ignore unused env var warning since this is used by bin/compile. + # shellcheck disable=2034 + PYTHON_VERSION_SOURCE='Pipfile.lock' + set +e + PYTHON=$(jq -r '._meta.requires.python_full_version' "$BUILD_DIR/Pipfile.lock") + if [[ "$PYTHON" != "null" ]]; then + echo "python-$PYTHON" >"$BUILD_DIR/runtime.txt" + fi + set -e - if [[ "$PYTHON" == "null" ]]; then - PYTHON=$(jq -r '._meta.requires.python_version' "$BUILD_DIR/Pipfile.lock") - case "${PYTHON}" in - 2.7) - echo "${LATEST_27}" > "${BUILD_DIR}/runtime.txt" - ;; - 3.4) - echo "${LATEST_34}" > "${BUILD_DIR}/runtime.txt" - ;; - 3.5) - echo "${LATEST_35}" > "${BUILD_DIR}/runtime.txt" - ;; - 3.6) - echo "${LATEST_36}" > "${BUILD_DIR}/runtime.txt" - ;; - 3.7) - echo "${LATEST_37}" > "${BUILD_DIR}/runtime.txt" - ;; - 3.8) - echo "${LATEST_38}" > "${BUILD_DIR}/runtime.txt" - ;; - 3.9) - echo "${LATEST_39}" > "${BUILD_DIR}/runtime.txt" - ;; - 3.10) - echo "${LATEST_310}" > "${BUILD_DIR}/runtime.txt" - ;; - 3.11) - echo "${LATEST_311}" > "${BUILD_DIR}/runtime.txt" - ;; - 3.12) - echo "${LATEST_312}" > "${BUILD_DIR}/runtime.txt" - ;; - # TODO: Make this case an error - *) ;; - esac - fi + if [[ "$PYTHON" == "null" ]]; then + PYTHON=$(jq -r '._meta.requires.python_version' "$BUILD_DIR/Pipfile.lock") + case "${PYTHON}" in + 2.7) + echo "${LATEST_27}" >"${BUILD_DIR}/runtime.txt" + ;; + 3.4) + echo "${LATEST_34}" >"${BUILD_DIR}/runtime.txt" + ;; + 3.5) + echo "${LATEST_35}" >"${BUILD_DIR}/runtime.txt" + ;; + 3.6) + echo "${LATEST_36}" >"${BUILD_DIR}/runtime.txt" + ;; + 3.7) + echo "${LATEST_37}" >"${BUILD_DIR}/runtime.txt" + ;; + 3.8) + echo "${LATEST_38}" >"${BUILD_DIR}/runtime.txt" + ;; + 3.9) + echo "${LATEST_39}" >"${BUILD_DIR}/runtime.txt" + ;; + 3.10) + echo "${LATEST_310}" >"${BUILD_DIR}/runtime.txt" + ;; + 3.11) + echo "${LATEST_311}" >"${BUILD_DIR}/runtime.txt" + ;; + 3.12) + echo "${LATEST_312}" >"${BUILD_DIR}/runtime.txt" + ;; + # TODO: Make this case an error + *) ;; + esac + fi - - fi - fi + fi + fi fi diff --git a/bin/steps/python b/bin/steps/python index 333a9302..a481f9b0 100755 --- a/bin/steps/python +++ b/bin/steps/python @@ -7,51 +7,51 @@ PYTHON_VERSION="${PYTHON_VERSION##+([[:space:]])}" PYTHON_VERSION="${PYTHON_VERSION%%+([[:space:]])}" function eol_python_version_error() { - local major_version="${1}" - local eol_date="${2}" - puts-warn - puts-warn "Python ${major_version} reached upstream end-of-life on ${eol_date}, and is" - puts-warn "therefore no longer receiving security updates:" - puts-warn "https://devguide.python.org/versions/#supported-versions" - puts-warn - puts-warn "As such, it is no longer supported by this buildpack." - puts-warn - puts-warn "Please upgrade to a newer Python version." - puts-warn - puts-warn "For a list of the supported Python versions, see:" - puts-warn "https://devcenter.heroku.com/articles/python-support#supported-runtimes" - puts-warn - meta_set "failure_reason" "python-version-eol" - exit 1 + local major_version="${1}" + local eol_date="${2}" + puts-warn + puts-warn "Python ${major_version} reached upstream end-of-life on ${eol_date}, and is" + puts-warn "therefore no longer receiving security updates:" + puts-warn "https://devguide.python.org/versions/#supported-versions" + puts-warn + puts-warn "As such, it is no longer supported by this buildpack." + puts-warn + puts-warn "Please upgrade to a newer Python version." + puts-warn + puts-warn "For a list of the supported Python versions, see:" + puts-warn "https://devcenter.heroku.com/articles/python-support#supported-runtimes" + puts-warn + meta_set "failure_reason" "python-version-eol" + exit 1 } # We check for EOL prior to checking if the archive exists on S3, to ensure the more specific EOL error # message is still shown for newer stacks where the EOL Python versions might not have been built. case "${PYTHON_VERSION}" in - python-3.7.+([0-9])) - eol_python_version_error "3.7" "June 27th, 2023" - ;; - python-3.6.+([0-9])) - eol_python_version_error "3.6" "December 23rd, 2021" - ;; - *) ;; + python-3.7.+([0-9])) + eol_python_version_error "3.7" "June 27th, 2023" + ;; + python-3.6.+([0-9])) + eol_python_version_error "3.6" "December 23rd, 2021" + ;; + *) ;; esac # The Python runtime archive filename is of form: 'python-X.Y.Z-ubuntu-22.04-amd64.tar.zst' # The Ubuntu version is calculated from `STACK` since it's faster than calling `lsb_release`. -UBUNTU_VERSION="${STACK/heroku-}.04" +UBUNTU_VERSION="${STACK/heroku-/}.04" ARCH=$(dpkg --print-architecture) PYTHON_URL="${S3_BASE_URL}/${PYTHON_VERSION}-ubuntu-${UBUNTU_VERSION}-${ARCH}.tar.zst" if ! curl --output /dev/null --silent --head --fail --retry 3 --retry-connrefused --connect-timeout 10 "${PYTHON_URL}"; then - puts-warn - puts-warn "Requested runtime '${PYTHON_VERSION}' is not available for this stack (${STACK})." - puts-warn - puts-warn "For a list of the supported Python versions, see:" - puts-warn "https://devcenter.heroku.com/articles/python-support#supported-runtimes" - puts-warn - meta_set "failure_reason" "python-version-not-found" - exit 1 + puts-warn + puts-warn "Requested runtime '${PYTHON_VERSION}' is not available for this stack (${STACK})." + puts-warn + puts-warn "For a list of the supported Python versions, see:" + puts-warn "https://devcenter.heroku.com/articles/python-support#supported-runtimes" + puts-warn + meta_set "failure_reason" "python-version-not-found" + exit 1 fi # TODO: Refactor Python version usage to use the non-prefixed form everywhere. @@ -60,109 +60,109 @@ meta_set "python_version" "${python_version_without_prefix}" meta_set "python_version_major" "${python_version_without_prefix%.*}" function warn_if_patch_update_available() { - local requested_version="${1}" - local latest_patch_version="${2}" - # Extract the patch version component of the version strings (ie: the '5' in '3.10.5'). - local requested_patch_number="${requested_version##*.}" - local latest_patch_number="${latest_patch_version##*.}" - if (( requested_patch_number < latest_patch_number )); then - puts-warn - puts-warn "A Python security update is available! Upgrade as soon as possible to: ${latest_patch_version}" - puts-warn "See: https://devcenter.heroku.com/articles/python-runtimes" - puts-warn - fi + local requested_version="${1}" + local latest_patch_version="${2}" + # Extract the patch version component of the version strings (ie: the '5' in '3.10.5'). + local requested_patch_number="${requested_version##*.}" + local latest_patch_number="${latest_patch_version##*.}" + if ((requested_patch_number < latest_patch_number)); then + puts-warn + puts-warn "A Python security update is available! Upgrade as soon as possible to: ${latest_patch_version}" + puts-warn "See: https://devcenter.heroku.com/articles/python-runtimes" + puts-warn + fi } # We wait until now to display outdated Python version warnings, since we only want to show them # if there weren't any errors with the version to avoid adding noise to the error messages. case "${PYTHON_VERSION}" in - python-3.12.*) - warn_if_patch_update_available "${PYTHON_VERSION}" "${LATEST_312}" - ;; - python-3.11.*) - warn_if_patch_update_available "${PYTHON_VERSION}" "${LATEST_311}" - ;; - python-3.10.*) - warn_if_patch_update_available "${PYTHON_VERSION}" "${LATEST_310}" - ;; - python-3.9.*) - warn_if_patch_update_available "${PYTHON_VERSION}" "${LATEST_39}" - ;; - python-3.8.*) - puts-warn - puts-warn "Python 3.8 will reach its upstream end-of-life in October 2024, at which" - puts-warn "point it will no longer receive security updates:" - puts-warn "https://devguide.python.org/versions/#supported-versions" - puts-warn - puts-warn "Support for Python 3.8 will be removed from this buildpack on December 4th, 2024." - puts-warn - puts-warn "Upgrade to a newer Python version as soon as possible to keep your app secure." - puts-warn "See: https://devcenter.heroku.com/articles/python-runtimes" - puts-warn - warn_if_patch_update_available "${PYTHON_VERSION}" "${LATEST_38}" - ;; - # TODO: Make this case an error, since it should be unreachable. - *) ;; + python-3.12.*) + warn_if_patch_update_available "${PYTHON_VERSION}" "${LATEST_312}" + ;; + python-3.11.*) + warn_if_patch_update_available "${PYTHON_VERSION}" "${LATEST_311}" + ;; + python-3.10.*) + warn_if_patch_update_available "${PYTHON_VERSION}" "${LATEST_310}" + ;; + python-3.9.*) + warn_if_patch_update_available "${PYTHON_VERSION}" "${LATEST_39}" + ;; + python-3.8.*) + puts-warn + puts-warn "Python 3.8 will reach its upstream end-of-life in October 2024, at which" + puts-warn "point it will no longer receive security updates:" + puts-warn "https://devguide.python.org/versions/#supported-versions" + puts-warn + puts-warn "Support for Python 3.8 will be removed from this buildpack on December 4th, 2024." + puts-warn + puts-warn "Upgrade to a newer Python version as soon as possible to keep your app secure." + puts-warn "See: https://devcenter.heroku.com/articles/python-runtimes" + puts-warn + warn_if_patch_update_available "${PYTHON_VERSION}" "${LATEST_38}" + ;; + # TODO: Make this case an error, since it should be unreachable. + *) ;; esac if [[ "$STACK" != "$CACHED_PYTHON_STACK" ]]; then - puts-step "Stack has changed from $CACHED_PYTHON_STACK to $STACK, clearing cache" - rm -rf .heroku/python-stack .heroku/python-version .heroku/python .heroku/vendor .heroku/python .heroku/python-sqlite3-version + puts-step "Stack has changed from $CACHED_PYTHON_STACK to $STACK, clearing cache" + rm -rf .heroku/python-stack .heroku/python-version .heroku/python .heroku/vendor .heroku/python .heroku/python-sqlite3-version fi if [[ -f .heroku/python-version ]]; then - if [[ ! "$(cat .heroku/python-version)" == "$PYTHON_VERSION" ]]; then - puts-step "Python version has changed from $(cat .heroku/python-version) to ${PYTHON_VERSION}, clearing cache" - rm -rf .heroku/python - else - SKIP_INSTALL=1 - fi + if [[ ! "$(cat .heroku/python-version)" == "$PYTHON_VERSION" ]]; then + puts-step "Python version has changed from $(cat .heroku/python-version) to ${PYTHON_VERSION}, clearing cache" + rm -rf .heroku/python + else + SKIP_INSTALL=1 + fi fi # If using Pip, check if we should reinstall python dependencies given that requirements.txt # is non-deterministic (not all packages pinned, doesn't handle uninstalls etc). We don't need # to do this when using Pipenv, since it has a lockfile and syncs the packages for us. if [[ -f "${BUILD_DIR}/requirements.txt" ]]; then - if [[ ! -f "$CACHE_DIR/.heroku/requirements.txt" ]]; then - # This is a the first build of an app (or the build cache was cleared). Since there - # are no cached packages, we only need to store the requirements file for next time. - cp -R "$BUILD_DIR/requirements.txt" "$CACHE_DIR/.heroku/requirements.txt" - else - # IF there IS a cached directory, check for differences with the new one - if ! diff "$BUILD_DIR/requirements.txt" "$CACHE_DIR/.heroku/requirements.txt" &> /dev/null; then - puts-step "Requirements file has been changed, clearing cached dependencies" - # if there are any differences, clear the Python cache - # Installing Python over again does not take noticably more time - cp -R "$BUILD_DIR/requirements.txt" "$CACHE_DIR/.heroku/requirements.txt" - rm -rf .heroku/python - unset SKIP_INSTALL - else - puts-step "No change in requirements detected, installing from cache" - fi - fi + if [[ ! -f "$CACHE_DIR/.heroku/requirements.txt" ]]; then + # This is a the first build of an app (or the build cache was cleared). Since there + # are no cached packages, we only need to store the requirements file for next time. + cp -R "$BUILD_DIR/requirements.txt" "$CACHE_DIR/.heroku/requirements.txt" + else + # IF there IS a cached directory, check for differences with the new one + if ! diff "$BUILD_DIR/requirements.txt" "$CACHE_DIR/.heroku/requirements.txt" &>/dev/null; then + puts-step "Requirements file has been changed, clearing cached dependencies" + # if there are any differences, clear the Python cache + # Installing Python over again does not take noticably more time + cp -R "$BUILD_DIR/requirements.txt" "$CACHE_DIR/.heroku/requirements.txt" + rm -rf .heroku/python + unset SKIP_INSTALL + else + puts-step "No change in requirements detected, installing from cache" + fi + fi fi if [[ -n "${SKIP_INSTALL}" ]]; then - puts-step "Using cached install of ${PYTHON_VERSION}" + puts-step "Using cached install of ${PYTHON_VERSION}" else - puts-step "Installing ${PYTHON_VERSION}" + puts-step "Installing ${PYTHON_VERSION}" - # Prepare destination directory. - mkdir -p .heroku/python + # Prepare destination directory. + mkdir -p .heroku/python - if ! curl --silent --show-error --fail --retry 3 --retry-connrefused --connect-timeout 10 "${PYTHON_URL}" | tar --zstd --extract --directory .heroku/python; then - # The Python version was confirmed to exist previously, so any failure here is due to - # a networking issue or archive/buildpack bug rather than the runtime not existing. - puts-warn "Failed to download/install ${PYTHON_VERSION}" - meta_set "failure_reason" "python-download" - exit 1 - fi + if ! curl --silent --show-error --fail --retry 3 --retry-connrefused --connect-timeout 10 "${PYTHON_URL}" | tar --zstd --extract --directory .heroku/python; then + # The Python version was confirmed to exist previously, so any failure here is due to + # a networking issue or archive/buildpack bug rather than the runtime not existing. + puts-warn "Failed to download/install ${PYTHON_VERSION}" + meta_set "failure_reason" "python-download" + exit 1 + fi - # Record for future reference. - echo "$PYTHON_VERSION" > .heroku/python-version - echo "$STACK" > .heroku/python-stack + # Record for future reference. + echo "$PYTHON_VERSION" >.heroku/python-version + echo "$STACK" >.heroku/python-stack - hash -r + hash -r fi PIP_VERSION=$(get_requirement_version 'pip') @@ -186,12 +186,12 @@ BUNDLED_PIP_WHEEL_LIST=(.heroku/python/lib/python*/ensurepip/_bundled/pip-*.whl) BUNDLED_PIP_WHEEL="${BUNDLED_PIP_WHEEL_LIST[0]}" if [[ -z "${BUNDLED_PIP_WHEEL}" ]]; then - puts-warn "Failed to locate the bundled pip wheel" - meta_set "failure_reason" "bundled-pip-not-found" - exit 1 + puts-warn "Failed to locate the bundled pip wheel" + meta_set "failure_reason" "bundled-pip-not-found" + exit 1 fi /app/.heroku/python/bin/python "${BUNDLED_PIP_WHEEL}/pip" install --quiet --disable-pip-version-check --no-cache-dir \ - "pip==${PIP_VERSION}" "setuptools==${SETUPTOOLS_VERSION}" "wheel==${WHEEL_VERSION}" + "pip==${PIP_VERSION}" "setuptools==${SETUPTOOLS_VERSION}" "wheel==${WHEEL_VERSION}" hash -r diff --git a/bin/steps/sqlite3 b/bin/steps/sqlite3 index f21d606f..c7200ceb 100755 --- a/bin/steps/sqlite3 +++ b/bin/steps/sqlite3 @@ -4,77 +4,77 @@ # and the APT buildpack should be used if an app needs the sqlite CLI/headers. sqlite3_install() { - HEROKU_PYTHON_DIR="$1" - HEADERS_ONLY="$3" - - mkdir -p "$HEROKU_PYTHON_DIR" - - APT_CACHE_DIR="$HEROKU_PYTHON_DIR/apt/cache" - APT_STATE_DIR="$HEROKU_PYTHON_DIR/apt/state" - - mkdir -p "$APT_CACHE_DIR/archives/partial" - mkdir -p "$APT_STATE_DIR/lists/partial" - - APT_OPTIONS=( - "--option=debug::nolocking=true" - "--option=dir::cache=${APT_CACHE_DIR}" - "--option=dir::state=${APT_STATE_DIR}" - "--option=dir::etc::sourcelist=/etc/apt/sources.list" - ) - - apt-get "${APT_OPTIONS[@]}" update > /dev/null 2>&1 - if [[ -z "$HEADERS_ONLY" ]]; then - apt-get "${APT_OPTIONS[@]}" -y -d --reinstall install libsqlite3-dev sqlite3 > /dev/null 2>&1 - else - apt-get "${APT_OPTIONS[@]}" -y -d --reinstall install libsqlite3-dev - fi - - find "$APT_CACHE_DIR/archives/" -name "*.deb" -exec dpkg -x {} "$HEROKU_PYTHON_DIR/sqlite3/" \; - - mkdir -p "$HEROKU_PYTHON_DIR/include" - mkdir -p "$HEROKU_PYTHON_DIR/lib" - - # remove old sqlite3 libraries/binaries - find "$HEROKU_PYTHON_DIR/include/" -name "sqlite3*.h" -exec rm -f {} \; - find "$HEROKU_PYTHON_DIR/lib/" -name "libsqlite3.*" -exec rm -f {} \; - rm -f "$HEROKU_PYTHON_DIR/lib/pkgconfig/sqlite3.pc" - rm -f "$HEROKU_PYTHON_DIR/bin/sqlite3" - - # eg: `x86_64` or `aarch64` - GNU_ARCH=$(arch) - - # copy over sqlite3 headers & bins and setup linking against the stack image library - mv "$HEROKU_PYTHON_DIR/sqlite3/usr/include/"* "$HEROKU_PYTHON_DIR/include/" - mv "$HEROKU_PYTHON_DIR/sqlite3/usr/lib/${GNU_ARCH}-linux-gnu"/libsqlite3.*a "$HEROKU_PYTHON_DIR/lib/" - mkdir -p "$HEROKU_PYTHON_DIR/lib/pkgconfig" - # set the right prefix/lib directories - sed -e 's/prefix=\/usr/prefix=\/app\/.heroku\/python/' -e "s/\/${GNU_ARCH}-linux-gnu//" "$HEROKU_PYTHON_DIR/sqlite3/usr/lib/${GNU_ARCH}-linux-gnu/pkgconfig/sqlite3.pc" > "$HEROKU_PYTHON_DIR/lib/pkgconfig/sqlite3.pc" - # need to point the libsqlite3.so to the stack image library for /usr/bin/ld -lsqlite3 - SQLITE3_LIBFILE="/usr/lib/${GNU_ARCH}-linux-gnu/$(readlink -n "$HEROKU_PYTHON_DIR/sqlite3/usr/lib/${GNU_ARCH}-linux-gnu/libsqlite3.so")" - ln -s "$SQLITE3_LIBFILE" "$HEROKU_PYTHON_DIR/lib/libsqlite3.so" - if [[ -z "$HEADERS_ONLY" ]]; then - mv "$HEROKU_PYTHON_DIR/sqlite3/usr/bin"/* "$HEROKU_PYTHON_DIR/bin/" - fi - - # cleanup - rm -rf "$HEROKU_PYTHON_DIR/sqlite3/" - rm -rf "$HEROKU_PYTHON_DIR/apt/" + HEROKU_PYTHON_DIR="$1" + HEADERS_ONLY="$3" + + mkdir -p "$HEROKU_PYTHON_DIR" + + APT_CACHE_DIR="$HEROKU_PYTHON_DIR/apt/cache" + APT_STATE_DIR="$HEROKU_PYTHON_DIR/apt/state" + + mkdir -p "$APT_CACHE_DIR/archives/partial" + mkdir -p "$APT_STATE_DIR/lists/partial" + + APT_OPTIONS=( + "--option=debug::nolocking=true" + "--option=dir::cache=${APT_CACHE_DIR}" + "--option=dir::state=${APT_STATE_DIR}" + "--option=dir::etc::sourcelist=/etc/apt/sources.list" + ) + + apt-get "${APT_OPTIONS[@]}" update >/dev/null 2>&1 + if [[ -z "$HEADERS_ONLY" ]]; then + apt-get "${APT_OPTIONS[@]}" -y -d --reinstall install libsqlite3-dev sqlite3 >/dev/null 2>&1 + else + apt-get "${APT_OPTIONS[@]}" -y -d --reinstall install libsqlite3-dev + fi + + find "$APT_CACHE_DIR/archives/" -name "*.deb" -exec dpkg -x {} "$HEROKU_PYTHON_DIR/sqlite3/" \; + + mkdir -p "$HEROKU_PYTHON_DIR/include" + mkdir -p "$HEROKU_PYTHON_DIR/lib" + + # remove old sqlite3 libraries/binaries + find "$HEROKU_PYTHON_DIR/include/" -name "sqlite3*.h" -exec rm -f {} \; + find "$HEROKU_PYTHON_DIR/lib/" -name "libsqlite3.*" -exec rm -f {} \; + rm -f "$HEROKU_PYTHON_DIR/lib/pkgconfig/sqlite3.pc" + rm -f "$HEROKU_PYTHON_DIR/bin/sqlite3" + + # eg: `x86_64` or `aarch64` + GNU_ARCH=$(arch) + + # copy over sqlite3 headers & bins and setup linking against the stack image library + mv "$HEROKU_PYTHON_DIR/sqlite3/usr/include/"* "$HEROKU_PYTHON_DIR/include/" + mv "$HEROKU_PYTHON_DIR/sqlite3/usr/lib/${GNU_ARCH}-linux-gnu"/libsqlite3.*a "$HEROKU_PYTHON_DIR/lib/" + mkdir -p "$HEROKU_PYTHON_DIR/lib/pkgconfig" + # set the right prefix/lib directories + sed -e 's/prefix=\/usr/prefix=\/app\/.heroku\/python/' -e "s/\/${GNU_ARCH}-linux-gnu//" "$HEROKU_PYTHON_DIR/sqlite3/usr/lib/${GNU_ARCH}-linux-gnu/pkgconfig/sqlite3.pc" >"$HEROKU_PYTHON_DIR/lib/pkgconfig/sqlite3.pc" + # need to point the libsqlite3.so to the stack image library for /usr/bin/ld -lsqlite3 + SQLITE3_LIBFILE="/usr/lib/${GNU_ARCH}-linux-gnu/$(readlink -n "$HEROKU_PYTHON_DIR/sqlite3/usr/lib/${GNU_ARCH}-linux-gnu/libsqlite3.so")" + ln -s "$SQLITE3_LIBFILE" "$HEROKU_PYTHON_DIR/lib/libsqlite3.so" + if [[ -z "$HEADERS_ONLY" ]]; then + mv "$HEROKU_PYTHON_DIR/sqlite3/usr/bin"/* "$HEROKU_PYTHON_DIR/bin/" + fi + + # cleanup + rm -rf "$HEROKU_PYTHON_DIR/sqlite3/" + rm -rf "$HEROKU_PYTHON_DIR/apt/" } buildpack_sqlite3_install() { - puts-step "Installing SQLite3" - - # TODO: This never actual prints failure or even aborts the build, since - # the conditional disables `set -e` inside the called function: - # https://stackoverflow.com/q/19789102 - # ...plus whoever wrote this forgot the `exit 1` in the `else` anyway. - if sqlite3_install "$BUILD_DIR/.heroku/python" ; then - # mcount "success.python.sqlite3" - : - else - echo "Sqlite3 failed to install." - # mcount "failure.python.sqlite3" - fi - - mkdir -p "$CACHE_DIR/.heroku/" + puts-step "Installing SQLite3" + + # TODO: This never actual prints failure or even aborts the build, since + # the conditional disables `set -e` inside the called function: + # https://stackoverflow.com/q/19789102 + # ...plus whoever wrote this forgot the `exit 1` in the `else` anyway. + if sqlite3_install "$BUILD_DIR/.heroku/python"; then + # mcount "success.python.sqlite3" + : + else + echo "Sqlite3 failed to install." + # mcount "failure.python.sqlite3" + fi + + mkdir -p "$CACHE_DIR/.heroku/" } diff --git a/bin/utils b/bin/utils index 5ce22d86..09961257 100755 --- a/bin/utils +++ b/bin/utils @@ -8,57 +8,56 @@ shopt -s nullglob source "${BUILDPACK_DIR}/vendor/buildpack-stdlib_v8.sh" if [[ "$(uname)" == "Darwin" ]]; then - sed() { command sed -l "$@"; } + sed() { command sed -l "$@"; } else - sed() { command sed -u "$@"; } + sed() { command sed -u "$@"; } fi # Syntax sugar. indent() { - sed "s/^/ /" + sed "s/^/ /" } - # Clean up pip output cleanup() { - sed -e 's/\.\.\.\+/.../g' | sed -e '/already satisfied/Id' | sed -e '/No files were found to uninstall/Id' | sed -e '/Overwriting/Id' | sed -e '/python executable/Id' | sed -e '/no previously-included files/Id' + sed -e 's/\.\.\.\+/.../g' | sed -e '/already satisfied/Id' | sed -e '/No files were found to uninstall/Id' | sed -e '/Overwriting/Id' | sed -e '/python executable/Id' | sed -e '/no previously-included files/Id' } # Buildpack Steps. puts-step() { - echo "-----> $*" + echo "-----> $*" } # Buildpack Warnings. puts-warn() { - echo " ! $*" + echo " ! $*" } # Does some serious copying. deep-cp() { - declare source="$1" target="$2" + declare source="$1" target="$2" - mkdir -p "$target" + mkdir -p "$target" - # cp doesn't like being called without source params, - # so make sure they expand to something first. - # subshell to avoid surprising caller with shopts. - ( - shopt -s nullglob dotglob - set -- "$source"/!(tmp|.|..) - [[ $# == 0 ]] || cp -a "$@" "$target" - ) + # cp doesn't like being called without source params, + # so make sure they expand to something first. + # subshell to avoid surprising caller with shopts. + ( + shopt -s nullglob dotglob + set -- "$source"/!(tmp|.|..) + [[ $# == 0 ]] || cp -a "$@" "$target" + ) } # Measure the size of the Python installation. measure-size() { - { du -s .heroku/python 2>/dev/null || echo 0; } | awk '{print $1}' + { du -s .heroku/python 2>/dev/null || echo 0; } | awk '{print $1}' } # Returns 0 if the specified module exists, otherwise returns 1. is_module_available() { - local module_name="${1}" - python -c "import sys, importlib.util; sys.exit(0 if importlib.util.find_spec('${module_name}') else 1)" + local module_name="${1}" + python -c "import sys, importlib.util; sys.exit(0 if importlib.util.find_spec('${module_name}') else 1)" } # The requirement versions are effectively buildpack constants, however, we want @@ -66,9 +65,9 @@ is_module_available() { # files. The requirements files contain contents like `package==1.2.3` (and not just # the package version) so we have to extract the version substring from it. get_requirement_version() { - local package_name="${1}" - local requirement - requirement=$(cat "${BUILDPACK_DIR}/requirements/${package_name}.txt") - local requirement_version="${requirement#"${package_name}=="}" - echo "${requirement_version}" + local package_name="${1}" + local requirement + requirement=$(cat "${BUILDPACK_DIR}/requirements/${package_name}.txt") + local requirement_version="${requirement#"${package_name}=="}" + echo "${requirement_version}" } diff --git a/bin/warnings b/bin/warnings index 0036f5cc..e3f7fba9 100755 --- a/bin/warnings +++ b/bin/warnings @@ -1,14 +1,14 @@ #!/usr/bin/env bash gdal-missing() { - if grep -qi 'Could not find gdal-config' "$WARNINGS_LOG"; then - echo - puts-warn "Hello! Package installation failed since the GDAL library was not found." - puts-warn "For GDAL, GEOS and PROJ support, use the Geo buildpack alongside the Python buildpack:" - puts-warn "https://github.com/heroku/heroku-geo-buildpack" - fi + if grep -qi 'Could not find gdal-config' "$WARNINGS_LOG"; then + echo + puts-warn "Hello! Package installation failed since the GDAL library was not found." + puts-warn "For GDAL, GEOS and PROJ support, use the Geo buildpack alongside the Python buildpack:" + puts-warn "https://github.com/heroku/heroku-geo-buildpack" + fi } show-warnings() { - gdal-missing + gdal-missing } diff --git a/builds/build_python_runtime.sh b/builds/build_python_runtime.sh index 88817225..e1c35c64 100755 --- a/builds/build_python_runtime.sh +++ b/builds/build_python_runtime.sh @@ -15,61 +15,61 @@ SRC_DIR="/tmp/src" UPLOAD_DIR="/tmp/upload" function abort() { - echo "Error: ${1}" >&2 - exit 1 + echo "Error: ${1}" >&2 + exit 1 } case "${STACK}" in - heroku-24) - SUPPORTED_PYTHON_VERSIONS=( - "3.10" - "3.11" - "3.12" - ) - ;; - heroku-22) - SUPPORTED_PYTHON_VERSIONS=( - "3.9" - "3.10" - "3.11" - "3.12" - ) - ;; - heroku-20) - SUPPORTED_PYTHON_VERSIONS=( - "3.8" - "3.9" - "3.10" - "3.11" - "3.12" - ) - ;; - *) - abort "Unsupported stack '${STACK}'!" - ;; + heroku-24) + SUPPORTED_PYTHON_VERSIONS=( + "3.10" + "3.11" + "3.12" + ) + ;; + heroku-22) + SUPPORTED_PYTHON_VERSIONS=( + "3.9" + "3.10" + "3.11" + "3.12" + ) + ;; + heroku-20) + SUPPORTED_PYTHON_VERSIONS=( + "3.8" + "3.9" + "3.10" + "3.11" + "3.12" + ) + ;; + *) + abort "Unsupported stack '${STACK}'!" + ;; esac if [[ ! " ${SUPPORTED_PYTHON_VERSIONS[*]} " == *" ${PYTHON_MAJOR_VERSION} "* ]]; then - abort "Python ${PYTHON_MAJOR_VERSION} is not supported on ${STACK}!" + abort "Python ${PYTHON_MAJOR_VERSION} is not supported on ${STACK}!" fi # The release keys can be found on https://www.python.org/downloads/ -> "OpenPGP Public Keys". case "${PYTHON_MAJOR_VERSION}" in - 3.12) - # https://github.com/Yhg1s.gpg - GPG_KEY_FINGERPRINT='7169605F62C751356D054A26A821E680E5FA6305' - ;; - 3.10|3.11) - # https://keybase.io/pablogsal/ - GPG_KEY_FINGERPRINT='A035C8C19219BA821ECEA86B64E628F8D684696D' - ;; - 3.8|3.9) - # https://keybase.io/ambv/ - GPG_KEY_FINGERPRINT='E3FF2839C048B25C084DEBE9B26995E310250568' - ;; - *) - abort "Unsupported Python version '${PYTHON_MAJOR_VERSION}'!" - ;; + 3.12) + # https://github.com/Yhg1s.gpg + GPG_KEY_FINGERPRINT='7169605F62C751356D054A26A821E680E5FA6305' + ;; + 3.10 | 3.11) + # https://keybase.io/pablogsal/ + GPG_KEY_FINGERPRINT='A035C8C19219BA821ECEA86B64E628F8D684696D' + ;; + 3.8 | 3.9) + # https://keybase.io/ambv/ + GPG_KEY_FINGERPRINT='E3FF2839C048B25C084DEBE9B26995E310250568' + ;; + *) + abort "Unsupported Python version '${PYTHON_MAJOR_VERSION}'!" + ;; esac echo "Building Python ${PYTHON_VERSION} for ${STACK} (${ARCH})..." @@ -94,59 +94,59 @@ cd "${SRC_DIR}" # for maximum compatibility / most battle-tested build configuration: # https://github.com/docker-library/python CONFIGURE_OPTS=( - # Explicitly set the target architecture rather than auto-detecting based on the host CPU. - # This only affects targets like i386 (for which we don't build), but we pass it anyway for - # completeness and parity with the Python Docker image builds. - "--build=$(dpkg-architecture --query DEB_BUILD_GNU_TYPE)" - # Support loadable extensions in the `_sqlite` extension module. - "--enable-loadable-sqlite-extensions" - # Enable recommended release build performance optimisations such as PGO. - "--enable-optimizations" - # Make autoconf's configure option validation more strict. - "--enable-option-checking=fatal" - # Install Python into `/tmp/python` rather than the default of `/usr/local`. - "--prefix=${INSTALL_DIR}" - # Skip running `ensurepip` as part of install, since the buildpack installs a curated - # version of pip itself (which ensures it's consistent across Python patch releases). - "--with-ensurepip=no" - # Build the `pyexpat` module using the `expat` library in the base image (which will - # automatically receive security updates), rather than CPython's vendored version. - "--with-system-expat" + # Explicitly set the target architecture rather than auto-detecting based on the host CPU. + # This only affects targets like i386 (for which we don't build), but we pass it anyway for + # completeness and parity with the Python Docker image builds. + "--build=$(dpkg-architecture --query DEB_BUILD_GNU_TYPE)" + # Support loadable extensions in the `_sqlite` extension module. + "--enable-loadable-sqlite-extensions" + # Enable recommended release build performance optimisations such as PGO. + "--enable-optimizations" + # Make autoconf's configure option validation more strict. + "--enable-option-checking=fatal" + # Install Python into `/tmp/python` rather than the default of `/usr/local`. + "--prefix=${INSTALL_DIR}" + # Skip running `ensurepip` as part of install, since the buildpack installs a curated + # version of pip itself (which ensures it's consistent across Python patch releases). + "--with-ensurepip=no" + # Build the `pyexpat` module using the `expat` library in the base image (which will + # automatically receive security updates), rather than CPython's vendored version. + "--with-system-expat" ) if [[ "${PYTHON_MAJOR_VERSION}" != 3.[8-9] ]]; then - CONFIGURE_OPTS+=( - # Shared builds are beneficial for a number of reasons: - # - Reduces the size of the build, since it avoids the duplication between - # the Python binary and the static library. - # - Permits use-cases that only work with the shared Python library, - # and not the static library (such as `pycall.rb` or `PyO3`). - # - More consistent with the official Python Docker images and other distributions. - # - # However, shared builds are slower unless `no-semantic-interposition`and LTO is used: - # https://fedoraproject.org/wiki/Changes/PythonNoSemanticInterpositionSpeedup - # https://github.com/python/cpython/issues/83161 - # - # It's only as of Python 3.10 that `no-semantic-interposition` is enabled by default, - # so we only use shared builds on Python 3.10+ to avoid needing to override the default - # compiler flags. - "--enable-shared" - "--with-lto" - # Counter-intuitively, the static library is still generated by default even when - # the shared library is enabled, so we disable it to reduce the build size. - # This option only exists for Python 3.10+. - "--without-static-libpython" - ) + CONFIGURE_OPTS+=( + # Shared builds are beneficial for a number of reasons: + # - Reduces the size of the build, since it avoids the duplication between + # the Python binary and the static library. + # - Permits use-cases that only work with the shared Python library, + # and not the static library (such as `pycall.rb` or `PyO3`). + # - More consistent with the official Python Docker images and other distributions. + # + # However, shared builds are slower unless `no-semantic-interposition`and LTO is used: + # https://fedoraproject.org/wiki/Changes/PythonNoSemanticInterpositionSpeedup + # https://github.com/python/cpython/issues/83161 + # + # It's only as of Python 3.10 that `no-semantic-interposition` is enabled by default, + # so we only use shared builds on Python 3.10+ to avoid needing to override the default + # compiler flags. + "--enable-shared" + "--with-lto" + # Counter-intuitively, the static library is still generated by default even when + # the shared library is enabled, so we disable it to reduce the build size. + # This option only exists for Python 3.10+. + "--without-static-libpython" + ) fi if [[ "${PYTHON_MAJOR_VERSION}" == "3.11" || "${PYTHON_MAJOR_VERSION}" == "3.12" ]]; then - CONFIGURE_OPTS+=( - # Skip building the test modules, since we remove them after the build anyway. - # This feature was added in Python 3.10+, however it wasn't until Python 3.11 - # that compatibility issues between it and PGO were fixed: - # https://github.com/python/cpython/pull/29315 - "--disable-test-modules" - ) + CONFIGURE_OPTS+=( + # Skip building the test modules, since we remove them after the build anyway. + # This feature was added in Python 3.10+, however it wasn't until Python 3.11 + # that compatibility issues between it and PGO were fixed: + # https://github.com/python/cpython/pull/29315 + "--disable-test-modules" + ) fi ./configure "${CONFIGURE_OPTS[@]}" @@ -163,28 +163,28 @@ fi # We only use `dpkg-buildflags` for Python versions where we build in shared mode (Python 3.9+), # since some of the options it enables interferes with the stripping of static libraries. if [[ "${PYTHON_MAJOR_VERSION}" == 3.[8-9] ]]; then - EXTRA_CFLAGS='' - LDFLAGS='-Wl,--strip-all' + EXTRA_CFLAGS='' + LDFLAGS='-Wl,--strip-all' else - EXTRA_CFLAGS="$(dpkg-buildflags --get CFLAGS)" - LDFLAGS="$(dpkg-buildflags --get LDFLAGS) -Wl,--strip-all" + EXTRA_CFLAGS="$(dpkg-buildflags --get CFLAGS)" + LDFLAGS="$(dpkg-buildflags --get LDFLAGS) -Wl,--strip-all" fi make -j "$(nproc)" "EXTRA_CFLAGS=${EXTRA_CFLAGS}" "LDFLAGS=${LDFLAGS}" make install if [[ "${PYTHON_MAJOR_VERSION}" == 3.[8-9] ]]; then - # On older versions of Python we're still building the static library, which has to be - # manually stripped since the linker stripping enabled in LDFLAGS doesn't cover them. - # We're using `--strip-unneeded` since `--strip-all` would remove the `.symtab` section - # that is required for static libraries to be able to be linked. - # `find` is used since there are multiple copies of the static library in version-specific - # locations, eg: - # - `lib/libpython3.9.a` - # - `lib/python3.9/config-3.9-x86_64-linux-gnu/libpython3.9.a` - find "${INSTALL_DIR}" -type f -name '*.a' -print -exec strip --strip-unneeded '{}' + + # On older versions of Python we're still building the static library, which has to be + # manually stripped since the linker stripping enabled in LDFLAGS doesn't cover them. + # We're using `--strip-unneeded` since `--strip-all` would remove the `.symtab` section + # that is required for static libraries to be able to be linked. + # `find` is used since there are multiple copies of the static library in version-specific + # locations, eg: + # - `lib/libpython3.9.a` + # - `lib/python3.9/config-3.9-x86_64-linux-gnu/libpython3.9.a` + find "${INSTALL_DIR}" -type f -name '*.a' -print -exec strip --strip-unneeded '{}' + elif ! find "${INSTALL_DIR}" -type f -name '*.a' -print -exec false '{}' +; then - abort "Unexpected static libraries found!" + abort "Unexpected static libraries found!" fi # Remove unneeded test directories, similar to the official Docker Python images: diff --git a/builds/test_python_runtime.sh b/builds/test_python_runtime.sh index 258600a3..8dfce8d9 100755 --- a/builds/test_python_runtime.sh +++ b/builds/test_python_runtime.sh @@ -22,27 +22,30 @@ tar --zstd --extract --verbose --file "${ARCHIVE_FILEPATH}" --directory "${INSTA # Check that all dynamically linked libraries exist in the run image (since it has fewer packages than the build image). LDD_OUTPUT=$(find "${INSTALL_DIR}" -type f,l \( -name 'python3' -o -name '*.so*' \) -exec ldd '{}' +) if grep 'not found' <<<"${LDD_OUTPUT}" | sort --unique; then - echo "The above dynamically linked libraries were not found!" - exit 1 + echo "The above dynamically linked libraries were not found!" + exit 1 fi # Check that optional and/or system library dependent stdlib modules were built. optional_stdlib_modules=( - _uuid - bz2 - ctypes - curses - dbm.gnu - dbm.ndbm - decimal - lzma - readline - sqlite3 - ssl - xml.parsers.expat - zlib + _uuid + bz2 + ctypes + curses + dbm.gnu + dbm.ndbm + decimal + lzma + readline + sqlite3 + ssl + xml.parsers.expat + zlib ) -if ! "${INSTALL_DIR}/bin/python3" -c "import $(IFS=, ; echo "${optional_stdlib_modules[*]}")"; then - echo "The above optional stdlib module failed to import! Check the compile logs to see if it was skipped due to missing libraries/headers." - exit 1 +if ! "${INSTALL_DIR}/bin/python3" -c "import $( + IFS=, + echo "${optional_stdlib_modules[*]}" +)"; then + echo "The above optional stdlib module failed to import! Check the compile logs to see if it was skipped due to missing libraries/headers." + exit 1 fi diff --git a/etc/publish.sh b/etc/publish.sh index c7801efe..ea07f296 100755 --- a/etc/publish.sh +++ b/etc/publish.sh @@ -9,9 +9,9 @@ newVersion="v$((curVersion + 1))" read -r -p "Deploy as version: $newVersion [y/n]? " choice case "$choice" in - y|Y ) echo "";; - n|N ) exit 0;; - * ) exit 1;; + y | Y) echo "" ;; + n | N) exit 0 ;; + *) exit 1 ;; esac git fetch origin diff --git a/lib/kvstore.sh b/lib/kvstore.sh index bd5c6fad..17e2dd0d 100644 --- a/lib/kvstore.sh +++ b/lib/kvstore.sh @@ -3,73 +3,73 @@ # Taken from: https://github.com/heroku/heroku-buildpack-nodejs/blob/main/lib/kvstore.sh kv_create() { - local f="${1}" - mkdir -p "$(dirname "${f}")" - touch "${f}" + local f="${1}" + mkdir -p "$(dirname "${f}")" + touch "${f}" } kv_clear() { - local f="${1}" - echo "" >"${f}" + local f="${1}" + echo "" >"${f}" } kv_set() { - if [[ $# -eq 3 ]]; then - local f="${1}" - if [[ -f "${f}" ]]; then - echo "${2}=${3}" >>"${f}" - fi - fi + if [[ $# -eq 3 ]]; then + local f="${1}" + if [[ -f "${f}" ]]; then + echo "${2}=${3}" >>"${f}" + fi + fi } kv_get() { - if [[ $# -eq 2 ]]; then - local f="${1}" - if [[ -f "${f}" ]]; then - grep "^${2}=" "${f}" | sed -e "s/^${2}=//" | tail -n 1 - fi - fi + if [[ $# -eq 2 ]]; then + local f="${1}" + if [[ -f "${f}" ]]; then + grep "^${2}=" "${f}" | sed -e "s/^${2}=//" | tail -n 1 + fi + fi } # get the value, but wrap it in quotes if it contains a space kv_get_escaped() { - local value - value=$(kv_get "${1}" "${2}") - if [[ "${value}" =~ [[:space:]]+ ]]; then - echo "\"${value}\"" - else - echo "${value}" - fi + local value + value=$(kv_get "${1}" "${2}") + if [[ "${value}" =~ [[:space:]]+ ]]; then + echo "\"${value}\"" + else + echo "${value}" + fi } kv_keys() { - local f="${1}" - local keys=() + local f="${1}" + local keys=() - if [[ -f "${f}" ]]; then - # Iterate over each line, splitting on the '=' character - # - # The || [[ -n "${key}" ]] statement addresses an issue with reading the last line - # of a file when there is no newline at the end. This will not happen if the file - # is created with this module, but can happen if it is written by hand. - # See: https://stackoverflow.com/questions/12916352/shell-script-read-missing-last-line - while IFS="=" read -r key value || [[ -n "${key}" ]]; do - # if there are any empty lines in the store, skip them - if [[ -n "${key}" ]]; then - keys+=("${key}") - fi - done <"${f}" + if [[ -f "${f}" ]]; then + # Iterate over each line, splitting on the '=' character + # + # The || [[ -n "${key}" ]] statement addresses an issue with reading the last line + # of a file when there is no newline at the end. This will not happen if the file + # is created with this module, but can happen if it is written by hand. + # See: https://stackoverflow.com/questions/12916352/shell-script-read-missing-last-line + while IFS="=" read -r key value || [[ -n "${key}" ]]; do + # if there are any empty lines in the store, skip them + if [[ -n "${key}" ]]; then + keys+=("${key}") + fi + done <"${f}" - echo "${keys[@]}" | tr ' ' '\n' | sort -u - fi + echo "${keys[@]}" | tr ' ' '\n' | sort -u + fi } kv_list() { - local f="${1}" + local f="${1}" - kv_keys "${f}" | tr ' ' '\n' | while read -r key; do - if [[ -n "${key}" ]]; then - echo "${key}=$(kv_get_escaped "${f}" "${key}")" - fi - done + kv_keys "${f}" | tr ' ' '\n' | while read -r key; do + if [[ -n "${key}" ]]; then + echo "${key}=$(kv_get_escaped "${f}" "${key}")" + fi + done } diff --git a/lib/metadata.sh b/lib/metadata.sh index 20aab3da..7d2a39b7 100644 --- a/lib/metadata.sh +++ b/lib/metadata.sh @@ -10,47 +10,47 @@ PREVIOUS_BUILD_DATA_FILE="" # Must be called before you can use any other methods meta_init() { - local cache_dir="${1}" - local buildpack_name="${2}" - BUILD_DATA_FILE="${cache_dir}/build-data/${buildpack_name}" - PREVIOUS_BUILD_DATA_FILE="${cache_dir}/build-data/${buildpack_name}-prev" + local cache_dir="${1}" + local buildpack_name="${2}" + BUILD_DATA_FILE="${cache_dir}/build-data/${buildpack_name}" + PREVIOUS_BUILD_DATA_FILE="${cache_dir}/build-data/${buildpack_name}-prev" } # Moves the data from the last build into the correct place, and clears the store # This should be called after meta_init in bin/compile meta_setup() { - # if the file already exists because it's from the last build, save it - if [[ -f "${BUILD_DATA_FILE}" ]]; then - cp "${BUILD_DATA_FILE}" "${PREVIOUS_BUILD_DATA_FILE}" - fi + # if the file already exists because it's from the last build, save it + if [[ -f "${BUILD_DATA_FILE}" ]]; then + cp "${BUILD_DATA_FILE}" "${PREVIOUS_BUILD_DATA_FILE}" + fi - kv_create "${BUILD_DATA_FILE}" - kv_clear "${BUILD_DATA_FILE}" + kv_create "${BUILD_DATA_FILE}" + kv_clear "${BUILD_DATA_FILE}" } # Force removal of exiting data file state. This is mostly useful during testing and not # expected to be used during buildpack execution. meta_force_clear() { - [[ -f "${BUILD_DATA_FILE}" ]] && rm "${BUILD_DATA_FILE}" - [[ -f "${PREVIOUS_BUILD_DATA_FILE}" ]] && rm "${PREVIOUS_BUILD_DATA_FILE}" + [[ -f "${BUILD_DATA_FILE}" ]] && rm "${BUILD_DATA_FILE}" + [[ -f "${PREVIOUS_BUILD_DATA_FILE}" ]] && rm "${PREVIOUS_BUILD_DATA_FILE}" } meta_get() { - kv_get "${BUILD_DATA_FILE}" "${1}" + kv_get "${BUILD_DATA_FILE}" "${1}" } meta_set() { - kv_set "${BUILD_DATA_FILE}" "${1}" "${2}" + kv_set "${BUILD_DATA_FILE}" "${1}" "${2}" } # Similar to mtime from buildpack-stdlib meta_time() { - local key="${1}" - local start="${2}" - local end="${3:-$(nowms)}" - local time - time="$(echo "${start}" "${end}" | awk '{ printf "%.3f", ($2 - $1)/1000 }')" - kv_set "${BUILD_DATA_FILE}" "${key}" "${time}" + local key="${1}" + local start="${2}" + local end="${3:-$(nowms)}" + local time + time="$(echo "${start}" "${end}" | awk '{ printf "%.3f", ($2 - $1)/1000 }')" + kv_set "${BUILD_DATA_FILE}" "${key}" "${time}" } # Retrieve a value from a previous build if it exists @@ -60,13 +60,13 @@ meta_time() { # - deployed with a new major version of Node # - etc meta_prev_get() { - kv_get "${PREVIOUS_BUILD_DATA_FILE}" "${1}" + kv_get "${PREVIOUS_BUILD_DATA_FILE}" "${1}" } log_meta_data() { - # print all values on one line in logfmt format - # https://brandur.org/logfmt - # the echo call ensures that all values are printed on a single line - # shellcheck disable=SC2005 disable=SC2046 - echo $(kv_list "${BUILD_DATA_FILE}") + # print all values on one line in logfmt format + # https://brandur.org/logfmt + # the echo call ensures that all values are printed on a single line + # shellcheck disable=SC2005 disable=SC2046 + echo $(kv_list "${BUILD_DATA_FILE}") } diff --git a/spec/fixtures/hooks/bin/print-env-vars.sh b/spec/fixtures/hooks/bin/print-env-vars.sh index dbd598a5..af6afded 100755 --- a/spec/fixtures/hooks/bin/print-env-vars.sh +++ b/spec/fixtures/hooks/bin/print-env-vars.sh @@ -3,8 +3,8 @@ set -euo pipefail printenv | sort \ - | grep -vE '^(_|BUILDPACK_LOG_FILE|DYNO|OLDPWD|REQUEST_ID|SHLVL)=' \ - | sed --regexp-extended \ - --expression 's#(=/tmp/build_)[^:/]+#\1#' \ - --expression 's#^(ENV_DIR=/tmp/).*#\1...#' \ - --expression 's#^(SOURCE_VERSION=).*#\1...#' + | grep -vE '^(_|BUILDPACK_LOG_FILE|DYNO|OLDPWD|REQUEST_ID|SHLVL)=' \ + | sed --regexp-extended \ + --expression 's#(=/tmp/build_)[^:/]+#\1#' \ + --expression 's#^(ENV_DIR=/tmp/).*#\1...#' \ + --expression 's#^(SOURCE_VERSION=).*#\1...#' diff --git a/vendor/WEB_CONCURRENCY.sh b/vendor/WEB_CONCURRENCY.sh index d6cc1a23..c4ea7209 100755 --- a/vendor/WEB_CONCURRENCY.sh +++ b/vendor/WEB_CONCURRENCY.sh @@ -25,43 +25,43 @@ # exit on error, have to use return not exit, and returning non-zero doesn't have an effect. function detect_memory_limit_in_mb() { - local memory_limit_file='/sys/fs/cgroup/memory/memory.limit_in_bytes' + local memory_limit_file='/sys/fs/cgroup/memory/memory.limit_in_bytes' - # This memory limits file only exists on Heroku, or when using cgroups v1 (Docker < 20.10). - if [[ -f "${memory_limit_file}" ]]; then - local memory_limit_in_mb=$(($(cat "${memory_limit_file}") / 1048576)) + # This memory limits file only exists on Heroku, or when using cgroups v1 (Docker < 20.10). + if [[ -f "${memory_limit_file}" ]]; then + local memory_limit_in_mb=$(($(cat "${memory_limit_file}") / 1048576)) - # Ignore values above 1TB RAM, since when using cgroups v1 the limits file reports a - # bogus value of thousands of TB RAM when there is no container memory limit set. - if ((memory_limit_in_mb <= 1048576)); then - echo "${memory_limit_in_mb}" - return 0 - fi - fi + # Ignore values above 1TB RAM, since when using cgroups v1 the limits file reports a + # bogus value of thousands of TB RAM when there is no container memory limit set. + if ((memory_limit_in_mb <= 1048576)); then + echo "${memory_limit_in_mb}" + return 0 + fi + fi - return 1 + return 1 } function output() { - # Only display log output for web dynos, to prevent breaking one-off dyno scripting use-cases, - # and to prevent confusion from messages about WEB_CONCURRENCY in the logs of non-web workers. - # (We still actually set the env vars for all dyno types for consistency and easier debugging.) - if [[ "${DYNO:-}" == web.* ]]; then - echo "Python buildpack: $*" >&2 - fi + # Only display log output for web dynos, to prevent breaking one-off dyno scripting use-cases, + # and to prevent confusion from messages about WEB_CONCURRENCY in the logs of non-web workers. + # (We still actually set the env vars for all dyno types for consistency and easier debugging.) + if [[ "${DYNO:-}" == web.* ]]; then + echo "Python buildpack: $*" >&2 + fi } if ! available_memory_in_mb=$(detect_memory_limit_in_mb); then - # This should never occur on Heroku, but will be common for non-Heroku environments such as Dokku. - output "Couldn't determine available memory. Skipping automatic configuration of WEB_CONCURRENCY." - return 0 + # This should never occur on Heroku, but will be common for non-Heroku environments such as Dokku. + output "Couldn't determine available memory. Skipping automatic configuration of WEB_CONCURRENCY." + return 0 fi if ! cpu_cores=$(nproc); then - # This should never occur in practice, since this buildpack only supports being run on our base - # images, and nproc is installed in all of them. - output "Couldn't determine number of CPU cores. Skipping automatic configuration of WEB_CONCURRENCY." - return 0 + # This should never occur in practice, since this buildpack only supports being run on our base + # images, and nproc is installed in all of them. + output "Couldn't determine number of CPU cores. Skipping automatic configuration of WEB_CONCURRENCY." + return 0 fi output "Detected ${available_memory_in_mb} MB available memory and ${cpu_cores} CPU cores." @@ -71,25 +71,25 @@ output "Detected ${available_memory_in_mb} MB available memory and ${cpu_cores} export DYNO_RAM="${available_memory_in_mb}" if [[ -v WEB_CONCURRENCY ]]; then - output "Skipping automatic configuration of WEB_CONCURRENCY since it's already set." - return 0 + output "Skipping automatic configuration of WEB_CONCURRENCY since it's already set." + return 0 fi minimum_memory_per_process_in_mb=256 # Prevents WEB_CONCURRENCY being set to zero if the environment is extremely memory constrained. if ((available_memory_in_mb < minimum_memory_per_process_in_mb)); then - max_concurrency_for_available_memory=1 + max_concurrency_for_available_memory=1 else - max_concurrency_for_available_memory=$((available_memory_in_mb / minimum_memory_per_process_in_mb)) + max_concurrency_for_available_memory=$((available_memory_in_mb / minimum_memory_per_process_in_mb)) fi max_concurrency_for_cpu_cores=$((cpu_cores * 2 + 1)) if ((max_concurrency_for_available_memory < max_concurrency_for_cpu_cores)); then - export WEB_CONCURRENCY="${max_concurrency_for_available_memory}" - output "Defaulting WEB_CONCURRENCY to ${WEB_CONCURRENCY} based on the available memory." + export WEB_CONCURRENCY="${max_concurrency_for_available_memory}" + output "Defaulting WEB_CONCURRENCY to ${WEB_CONCURRENCY} based on the available memory." else - export WEB_CONCURRENCY="${max_concurrency_for_cpu_cores}" - output "Defaulting WEB_CONCURRENCY to ${WEB_CONCURRENCY} based on the number of CPU cores." + export WEB_CONCURRENCY="${max_concurrency_for_cpu_cores}" + output "Defaulting WEB_CONCURRENCY to ${WEB_CONCURRENCY} based on the number of CPU cores." fi diff --git a/vendor/buildpack-stdlib_v8.sh b/vendor/buildpack-stdlib_v8.sh index 69e0e703..b06e729c 100755 --- a/vendor/buildpack-stdlib_v8.sh +++ b/vendor/buildpack-stdlib_v8.sh @@ -8,55 +8,55 @@ # Buildpack Steps. puts_step() { - if [[ "$*" == "-" ]]; then - read -r output - else - output=$* - fi - echo -e "\\e[1m\\e[36m=== $output\\e[0m" - unset output + if [[ "$*" == "-" ]]; then + read -r output + else + output=$* + fi + echo -e "\\e[1m\\e[36m=== $output\\e[0m" + unset output } # Buildpack Error. puts_error() { - if [[ "$*" == "-" ]]; then - read -r output - else - output=$* - fi - echo -e "\\e[1m\\e[31m=!= $output\\e[0m" + if [[ "$*" == "-" ]]; then + read -r output + else + output=$* + fi + echo -e "\\e[1m\\e[31m=!= $output\\e[0m" } # Buildpack Warning. puts_warn() { - if [[ "$*" == "-" ]]; then - read -r output - else - output=$* - fi - echo -e "\\e[1m\\e[33m=!= $output\\e[0m" + if [[ "$*" == "-" ]]; then + read -r output + else + output=$* + fi + echo -e "\\e[1m\\e[33m=!= $output\\e[0m" } # Is verbose set? is_verbose() { - if [[ -n $BUILDPACK_VERBOSE ]]; then - return 0 - else - return 1 - fi + if [[ -n $BUILDPACK_VERBOSE ]]; then + return 0 + else + return 1 + fi } # Buildpack Verbose. puts_verbose() { - if is_verbose; then - if [[ "$*" == "-" ]]; then - read -r output - else - output=$* - fi - echo "$output" - unset output - fi + if is_verbose; then + if [[ "$*" == "-" ]]; then + read -r output + else + output=$* + fi + echo "$output" + unset output + fi } # Buildpack Utilities @@ -65,50 +65,50 @@ puts_verbose() { # Usage: $ set-env key value # NOTICE: Expects PROFILE_PATH & EXPORT_PATH to be set! set_env() { - # TODO: automatically create profile path directory if it doesn't exist. - echo "export $1=$2" >> "$PROFILE_PATH" - echo "export $1=$2" >> "$EXPORT_PATH" + # TODO: automatically create profile path directory if it doesn't exist. + echo "export $1=$2" >>"$PROFILE_PATH" + echo "export $1=$2" >>"$EXPORT_PATH" } # Usage: $ set-default-env key value # NOTICE: Expects PROFILE_PATH & EXPORT_PATH to be set! set_default_env() { - echo "export $1=\${$1:-$2}" >> "$PROFILE_PATH" - echo "export $1=\${$1:-$2}" >> "$EXPORT_PATH" + echo "export $1=\${$1:-$2}" >>"$PROFILE_PATH" + echo "export $1=\${$1:-$2}" >>"$EXPORT_PATH" } # Usage: $ un-set-env key # NOTICE: Expects PROFILE_PATH to be set! un_set_env() { - echo "unset $1" >> "$PROFILE_PATH" + echo "unset $1" >>"$PROFILE_PATH" } # Usage: $ _env-blacklist pattern # Outputs a regex of default blacklist env vars. _env_blacklist() { - local regex=${1:-''} - if [[ -n "$regex" ]]; then - regex="|$regex" - fi - echo "^(PATH|CPATH|CPPATH|LD_PRELOAD|LIBRARY_PATH|LD_LIBRARY_PATH|PYTHONHOME$regex)$" + local regex=${1:-''} + if [[ -n "$regex" ]]; then + regex="|$regex" + fi + echo "^(PATH|CPATH|CPPATH|LD_PRELOAD|LIBRARY_PATH|LD_LIBRARY_PATH|PYTHONHOME$regex)$" } # Usage: $ export-env ENV_DIR WHITELIST BLACKLIST # Exports the environment variables defined in the given directory. export_env() { - local env_dir=${1:-$ENV_DIR} - local whitelist=${2:-''} - local blacklist - blacklist="$(_env_blacklist "$3")" - if [[ -d "$env_dir" ]]; then - # Environment variable names won't contain characters affected by: - # shellcheck disable=SC2045 - for e in $(ls "$env_dir"); do - echo "$e" | grep -E "$whitelist" | grep -qvE "$blacklist" && - export "$e=$(cat "$env_dir/$e")" - : - done - fi + local env_dir=${1:-$ENV_DIR} + local whitelist=${2:-''} + local blacklist + blacklist="$(_env_blacklist "$3")" + if [[ -d "$env_dir" ]]; then + # Environment variable names won't contain characters affected by: + # shellcheck disable=SC2045 + for e in $(ls "$env_dir"); do + echo "$e" | grep -E "$whitelist" | grep -qvE "$blacklist" \ + && export "$e=$(cat "$env_dir/$e")" + : + done + fi } # Usage: $ sub-env command @@ -118,16 +118,16 @@ export_env() { # WHITELIST=${2:-''} # BLACKLIST=${3:-'^(GIT_DIR|PYTHONHOME|LD_LIBRARY_PATH|LIBRARY_PATH|PATH)$'} sub_env() { - ( - # TODO: Fix https://github.com/heroku/buildpack-stdlib/issues/37 - # shellcheck disable=SC2153 - export_env "$ENV_DIR" "$WHITELIST" "$BLACKLIST" + ( + # TODO: Fix https://github.com/heroku/buildpack-stdlib/issues/37 + # shellcheck disable=SC2153 + export_env "$ENV_DIR" "$WHITELIST" "$BLACKLIST" - "$@" - ) + "$@" + ) } # Returns the current time, in milliseconds. nowms() { - date +%s%3N + date +%s%3N }