diff --git a/.authors.yml b/.authors.yml index 89d9a7ae00..db03794b80 100644 --- a/.authors.yml +++ b/.authors.yml @@ -612,7 +612,7 @@ first_commit: 2015-08-30 06:44:37 - name: Marcel Bargull email: marcel.bargull@udo.edu - num_commits: 85 + num_commits: 88 first_commit: 2016-09-26 11:45:54 github: mbargull alternate_emails: @@ -1202,7 +1202,7 @@ alternate_emails: - clee@anaconda.com - name: Ken Odegard - num_commits: 178 + num_commits: 204 email: kodegard@anaconda.com first_commit: 2020-09-08 19:53:41 github: kenodegard @@ -1240,7 +1240,7 @@ github: pre-commit-ci[bot] aliases: - pre-commit-ci[bot] - num_commits: 64 + num_commits: 71 first_commit: 2021-11-20 01:47:17 - name: Jacob Walls email: jacobtylerwalls@gmail.com @@ -1251,7 +1251,7 @@ github: beeankha alternate_emails: - beeankha@gmail.com - num_commits: 23 + num_commits: 27 first_commit: 2022-01-19 16:40:06 - name: Conda Bot email: 18747875+conda-bot@users.noreply.github.com @@ -1262,7 +1262,7 @@ alternate_emails: - ad-team+condabot@anaconda.com - 18747875+conda-bot@users.noreply.github.com - num_commits: 96 + num_commits: 53 first_commit: 2022-01-17 18:09:22 - name: Uwe L. Korn email: xhochy@users.noreply.github.com @@ -1310,7 +1310,7 @@ - name: dependabot[bot] email: 49699333+dependabot[bot]@users.noreply.github.com github: dependabot[bot] - num_commits: 4 + num_commits: 19 first_commit: 2022-05-31 04:34:40 - name: Serhii Kupriienko email: 79282962+skupr-anaconda@users.noreply.github.com @@ -1327,7 +1327,7 @@ - name: Jaime Rodríguez-Guerra email: jaimergp@users.noreply.github.com github: jaimergp - num_commits: 10 + num_commits: 13 first_commit: 2022-11-02 19:34:51 - name: Dave Clements email: tnabtaf@gmail.com @@ -1439,3 +1439,18 @@ num_commits: 1 first_commit: 2024-02-06 11:43:45 github: finnagin +- name: Justin Wood (Callek) + email: callek@gmail.com + num_commits: 1 + first_commit: 2024-04-29 16:21:41 + github: Callek +- name: Tobias Fischer + email: info@tobiasfischer.info + num_commits: 1 + first_commit: 2024-03-25 13:13:16 + github: Tobias-Fischer +- name: Yannik Tausch + email: dev@ytausch.de + num_commits: 5 + first_commit: 2024-03-19 18:09:38 + github: ytausch diff --git a/.github/workflows/builds-review.yaml b/.github/workflows/builds-review.yaml index d789e536c6..4b39f2acba 100644 --- a/.github/workflows/builds-review.yaml +++ b/.github/workflows/builds-review.yaml @@ -17,7 +17,9 @@ jobs: include: - runner: ubuntu-latest subdir: linux-64 - - runner: macos-latest + - runner: macos-14 + subdir: osx-arm64 + - runner: macos-13 subdir: osx-64 - runner: windows-latest subdir: win-64 @@ -46,14 +48,14 @@ jobs: # Clean checkout of specific git ref needed for package metadata version # which needs env vars GIT_DESCRIBE_TAG and GIT_BUILD_STR: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 + - uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5 with: ref: ${{ github.ref }} clean: true fetch-depth: 0 - name: Create and upload review build - uses: conda/actions/canary-release@f46142e89fa703cc69f0421ca1d313ab2d5bfff6 + uses: conda/actions/canary-release@976289d0cfd85139701b26ddd133abdd025a7b5f # v24.5.0 with: package-name: ${{ github.event.repository.name }} subdir: ${{ matrix.subdir }} diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml index 07c7f75b12..66df3b0f38 100644 --- a/.github/workflows/cla.yml +++ b/.github/workflows/cla.yml @@ -18,7 +18,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Check CLA - uses: conda/actions/check-cla@f46142e89fa703cc69f0421ca1d313ab2d5bfff6 + uses: conda/actions/check-cla@976289d0cfd85139701b26ddd133abdd025a7b5f # v24.5.0 with: # [required] # A token with ability to comment, label, and modify the commit status diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 03b32fc111..11bd69c67b 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -22,7 +22,7 @@ jobs: if: '!github.event.repository.fork' runs-on: ubuntu-latest steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 + - uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5 with: fetch-depth: 0 - name: Setup diff --git a/.github/workflows/issues.yml b/.github/workflows/issues.yml index 7a114d6d41..634bf13e4f 100644 --- a/.github/workflows/issues.yml +++ b/.github/workflows/issues.yml @@ -23,12 +23,12 @@ jobs: runs-on: ubuntu-latest steps: # remove [pending::feedback] - - uses: actions-ecosystem/action-remove-labels@2ce5d41b4b6aa8503e285553f75ed56e0a40bae0 + - uses: actions-ecosystem/action-remove-labels@2ce5d41b4b6aa8503e285553f75ed56e0a40bae0 # v1.3.0 with: labels: ${{ env.FEEDBACK_LBL }} github_token: ${{ secrets.PROJECT_TOKEN }} # add [pending::support], if still open - - uses: actions-ecosystem/action-add-labels@18f1af5e3544586314bbe15c0273249c770b2daf + - uses: actions-ecosystem/action-add-labels@18f1af5e3544586314bbe15c0273249c770b2daf # v1.1.3 if: github.event.issue.state == 'open' with: labels: ${{ env.SUPPORT_LBL }} diff --git a/.github/workflows/labels.yml b/.github/workflows/labels.yml index f13985fb0a..4741235b7a 100644 --- a/.github/workflows/labels.yml +++ b/.github/workflows/labels.yml @@ -19,20 +19,20 @@ jobs: GLOBAL: https://raw.githubusercontent.com/conda/infra/main/.github/global.yml LOCAL: .github/labels.yml steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 + - uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5 - id: has_local - uses: andstor/file-existence-action@076e0072799f4942c8bc574a82233e1e4d13e9d6 + uses: andstor/file-existence-action@076e0072799f4942c8bc574a82233e1e4d13e9d6 # v3.0.0 with: files: ${{ env.LOCAL }} - name: Global Only - uses: EndBug/label-sync@52074158190acb45f3077f9099fea818aa43f97a + uses: EndBug/label-sync@52074158190acb45f3077f9099fea818aa43f97a # v2.3.3 if: steps.has_local.outputs.files_exists == 'false' with: config-file: ${{ env.GLOBAL }} delete-other-labels: true dry-run: ${{ github.event.inputs.dryrun }} - name: Global & Local - uses: EndBug/label-sync@52074158190acb45f3077f9099fea818aa43f97a + uses: EndBug/label-sync@52074158190acb45f3077f9099fea818aa43f97a # v2.3.3 if: steps.has_local.outputs.files_exists == 'true' with: config-file: | diff --git a/.github/workflows/lock.yml b/.github/workflows/lock.yml index 2204b62dda..0b63dec318 100644 --- a/.github/workflows/lock.yml +++ b/.github/workflows/lock.yml @@ -17,7 +17,7 @@ jobs: if: '!github.event.repository.fork' runs-on: ubuntu-latest steps: - - uses: dessant/lock-threads@1bf7ec25051fe7c00bdd17e6a7cf3d7bfb7dc771 + - uses: dessant/lock-threads@1bf7ec25051fe7c00bdd17e6a7cf3d7bfb7dc771 # v5.0.1 with: # Number of days of inactivity before a closed issue is locked issue-inactive-days: 365 diff --git a/.github/workflows/project.yml b/.github/workflows/project.yml index 7d06584c86..297ac2263a 100644 --- a/.github/workflows/project.yml +++ b/.github/workflows/project.yml @@ -13,7 +13,7 @@ jobs: if: '!github.event.repository.fork' runs-on: ubuntu-latest steps: - - uses: actions/add-to-project@9bfe908f2eaa7ba10340b31e314148fcfe6a2458 + - uses: actions/add-to-project@9bfe908f2eaa7ba10340b31e314148fcfe6a2458 # v1.0.1 with: # issues are added to the Planning project # PRs are added to the Review project diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index bcda1fea30..bde3340fbc 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -33,12 +33,12 @@ jobs: days-before-issue-stale: 90 days-before-issue-close: 21 steps: - - uses: conda/actions/read-yaml@f46142e89fa703cc69f0421ca1d313ab2d5bfff6 + - uses: conda/actions/read-yaml@976289d0cfd85139701b26ddd133abdd025a7b5f # v24.5.0 id: read_yaml with: path: https://raw.githubusercontent.com/conda/infra/main/.github/messages.yml - - uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e + - uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # v9.0.0 id: stale with: # Only issues with these labels are checked whether they are stale diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 29f98a129d..74ec94e40a 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -45,7 +45,7 @@ jobs: code: ${{ steps.filter.outputs.code }} steps: - name: Checkout Source - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 + uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5 # dorny/paths-filter needs git clone for non-PR events # https://github.com/dorny/paths-filter#supported-workflows if: github.event_name != 'pull_request' @@ -102,7 +102,7 @@ jobs: steps: - name: Checkout Source - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 + uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5 with: fetch-depth: 0 @@ -116,7 +116,7 @@ jobs: key: cache-${{ env.HASH }} - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@030178870c779d9e5e1b4e563269f3aa69b04081 + uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca with: condarc-file: .github/condarc run-post: false # skip post cleanup @@ -150,13 +150,13 @@ jobs: -m "${{ env.PYTEST_MARKER }}" - name: Upload Coverage - uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed + uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be # v4.3.1 with: flags: ${{ runner.os }},${{ runner.arch }},${{ matrix.python-version }},${{ matrix.test-type }} - name: Upload Test Results if: '!cancelled()' - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 with: name: test-results-${{ env.HASH }} path: | @@ -182,7 +182,7 @@ jobs: steps: - name: Checkout Source - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 + uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5 with: fetch-depth: 0 @@ -196,7 +196,7 @@ jobs: key: cache-${{ env.HASH }} - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@030178870c779d9e5e1b4e563269f3aa69b04081 + uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca with: condarc-file: .github/condarc run-post: false # skip post cleanup @@ -229,7 +229,7 @@ jobs: run: conda list --show-channel-urls - name: Run Benchmarks - uses: CodSpeedHQ/action@1dbf41f0ae41cebfe61e084e535aebe533409b4d + uses: CodSpeedHQ/action@0b631f8998f2389eb5144632b6f9f8fabd33a86e with: token: ${{ secrets.CODSPEED_TOKEN }} run: $CONDA/envs/test/bin/pytest --codspeed @@ -262,7 +262,7 @@ jobs: steps: - name: Checkout Source - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 + uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5 with: fetch-depth: 0 @@ -277,7 +277,7 @@ jobs: key: cache-${{ env.HASH }} - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@030178870c779d9e5e1b4e563269f3aa69b04081 + uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca with: condarc-file: .github\condarc run-post: false # skip post cleanup @@ -317,13 +317,13 @@ jobs: -m "${{ env.PYTEST_MARKER }}" - name: Upload Coverage - uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed + uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be # v4.3.1 with: flags: ${{ runner.os }},${{ runner.arch }},${{ matrix.python-version }},${{ matrix.test-type }} - name: Upload Test Results if: '!cancelled()' - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 with: name: test-results-${{ env.HASH }} path: | @@ -337,6 +337,10 @@ jobs: needs: changes if: github.event_name == 'schedule' || needs.changes.outputs.code == 'true' + # Old macOS needed for old SDK (see xcode step below) + # This is needed for some MACOSX_DEPLOYMENT_TARGET tests + # We could also install SDKs from a external provider in the future + # if we want to update this runner to a non-deprecated version runs-on: macos-11 defaults: run: @@ -362,7 +366,7 @@ jobs: steps: - name: Checkout Source - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 + uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5 with: fetch-depth: 0 @@ -376,7 +380,7 @@ jobs: key: cache-${{ env.HASH }} - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@030178870c779d9e5e1b4e563269f3aa69b04081 + uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca with: condarc-file: .github/condarc run-post: false # skip post cleanup @@ -413,13 +417,13 @@ jobs: -m "${{ env.PYTEST_MARKER }}" - name: Upload Coverage - uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed + uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be # v4.3.1 with: flags: ${{ runner.os }},${{ runner.arch }},${{ matrix.python-version }},${{ matrix.test-type }} - name: Upload Test Results if: '!cancelled()' - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 with: name: test-results-${{ env.HASH }} path: | @@ -441,11 +445,11 @@ jobs: runs-on: ubuntu-latest steps: - name: Download Artifacts - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 + uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e - name: Upload Combined Test Results # provides one downloadable archive of all matrix run test results for further analysis - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 with: name: test-results-${{ github.sha }}-all path: test-results-* @@ -489,9 +493,9 @@ jobs: include: - runner: ubuntu-latest subdir: linux-64 - - runner: macos-latest + - runner: macos-13 subdir: osx-64 - - runner: macos-14 + - runner: macos-14 # FUTURE: Use -latest subdir: osx-arm64 - runner: windows-latest subdir: win-64 @@ -500,7 +504,7 @@ jobs: # Clean checkout of specific git ref needed for package metadata version # which needs env vars GIT_DESCRIBE_TAG and GIT_BUILD_STR: - name: Checkout Source - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 + uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5 with: ref: ${{ github.ref }} clean: true @@ -534,7 +538,7 @@ jobs: Path(environ["GITHUB_ENV"]).write_text(f"ANACONDA_ORG_LABEL={label}") - name: Create & Upload - uses: conda/actions/canary-release@f46142e89fa703cc69f0421ca1d313ab2d5bfff6 + uses: conda/actions/canary-release@976289d0cfd85139701b26ddd133abdd025a7b5f # v24.5.0 with: package-name: ${{ github.event.repository.name }} subdir: ${{ matrix.subdir }} diff --git a/.github/workflows/upload.yml b/.github/workflows/upload.yml new file mode 100644 index 0000000000..984ba0fdd9 --- /dev/null +++ b/.github/workflows/upload.yml @@ -0,0 +1,55 @@ +name: Upload release + +on: + # https://docs.github.com/en/webhooks/webhook-events-and-payloads#release + release: + types: [published] + +concurrency: + # Concurrency group that uses the workflow name and PR number if available + # or commit SHA as a fallback. If a new build is triggered under that + # concurrency group while a previous build is running it will be canceled. + # Repeated pushes to a PR will cancel all previous builds, while multiple + # merges to main will not cancel. + group: ${{ github.workflow }}-${{ github.ref_name || github.sha }} + cancel-in-progress: true + +permissions: + contents: write + +jobs: + # create source archive and upload it to the published release + # URL to the archive: https://github.com/conda//releases/download//-.tar.gz + upload: + if: '!github.event.repository.fork' + runs-on: ubuntu-latest + env: + ARCHIVE_NAME: ${{ github.event.repository.name }}-${{ github.ref_name }} + steps: + - name: Checkout Source + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 + + - name: Create Release Directory + run: mkdir -p release + + - name: Archive Source + run: > + git archive + --prefix="${{ env.ARCHIVE_NAME }}/" + --output="release/${{ env.ARCHIVE_NAME }}.tar.gz" + HEAD + + - name: Compute Checksum + run: > + sha256sum "release/${{ env.ARCHIVE_NAME }}.tar.gz" + | awk '{print $1}' + > "release/${{ env.ARCHIVE_NAME }}.tar.gz.sha256sum" + + - name: Upload Archive + env: + GH_TOKEN: ${{ github.token }} + run: > + gh release upload + --clobber "${{ github.ref_name }}" + --repo "${{ github.repository }}" + release/* diff --git a/.mailmap b/.mailmap index 02df1bf754..4644f183c2 100644 --- a/.mailmap +++ b/.mailmap @@ -141,6 +141,7 @@ Joseph Hunkeler Juan Lasheras jlas Julian Rüth Julien Schueller +Justin Wood (Callek) Jürgen Gmach Jürgen Gmach Jędrzej Nowak Jedrzej Nowak Kai Tietz Kai Tietz <47363620+katietz@users.noreply.github.com> @@ -259,6 +260,7 @@ Thomas A Caswell Thomas A Caswell Thomas Holder Thomas Kluyver Tim Snyder +Tobias Fischer Todd Tomashek tomashek Todd Tomashek todd.m.tomashek Tom Davidson @@ -278,6 +280,7 @@ Wim Glenn wim glenn Wolf Vollprecht Wolfgang Ulmer Yann +Yannik Tausch Yoav Ram Yu Feng Zane Dufour zdog234 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b29ba4260c..7fbe8f31bc 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -58,7 +58,7 @@ repos: # auto format Python codes within docstrings - id: blacken-docs - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.4.1 + rev: v0.4.3 hooks: # lint & attempt to correct failures (e.g. pyupgrade) - id: ruff diff --git a/AUTHORS.md b/AUTHORS.md index 969994f016..73bac74a05 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -120,6 +120,7 @@ Authors are sorted alphabetically. * Juan Lasheras * Julian Rüth * Julien Schueller +* Justin Wood (Callek) * Jürgen Gmach * Jędrzej Nowak * Kai Tietz @@ -217,6 +218,7 @@ Authors are sorted alphabetically. * Thomas Holder * Thomas Kluyver * Tim Snyder +* Tobias Fischer * Todd Tomashek * Tom Davidson * Tom Pollard @@ -235,6 +237,7 @@ Authors are sorted alphabetically. * Wolf Vollprecht * Wolfgang Ulmer * Yann +* Yannik Tausch * Yoav Ram * Yu Feng * Zane Dufour diff --git a/CHANGELOG.md b/CHANGELOG.md index 42d745f874..8c2a863ce4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,156 @@ [//]: # (current developments) +## 24.5.1 (2024-05-23) + +### Bug fixes + +* Fix issue with modifying a `frozendict` when specifying `outputs/files` in `meta.yaml`. (#5342 via #5345) +* Fix excessive memory use in `inspect_linkages_lief`. (#5267 via #5348) + +### Deprecations + +* Mark `conda_build.metadata.toposort` as deprecated. Use `conda_build.metadata.toposort_outputs` instead. (#5342 via #5345) +* Mark `conda_build.metadata.check_circular_dependencies` as deprecated. Use `conda_build.metadata._check_circular_dependencies` instead. (#5342 via #5345) + +### Contributors + +* @beeankha +* @kenodegard +* @mbargull + + + +## 24.5.0 (2024-05-06) + +### Enhancements + +* Only fetch `lfs` files for specific `git_ref`. (#5202) +* Add `conda_build.metadata._split_line_selector` to cache line-selector parsed text. (#5237) +* Add `conda_build.render.open_recipe` context manager to detect the recipe type (file/`meta.yaml`, directory/recipe, or tarball/package) and properly handling any exit/close behavior. (#5238) +* For Windows users, the stub executables used for Python entrypoints in packages are now codesigned. (#5252) +* Require `conda >=23.7.0`. (#5271) + +### Bug fixes + +* Fix all CLI arguments to properly initialize `conda.base.context.context` with parsed arguments. Fixes issue with arguments not being processed (e.g., `--override-channels` was previously ignored). (#3693 via #5271) + +### Deprecations + +* Deprecate `conda_build.conda_interface.CONDA_VERSION` constant. Use `conda.__version__` instead. (#5222) +* Deprecate `conda_build.conda_interface.binstar_upload` constant. Use `conda.base.context.context.binstar_upload` instead. (#5222) +* Deprecate `conda_build.conda_interface.default_python` constant. Use `conda.base.context.context.default_python` instead. (#5222) +* Deprecate `conda_build.conda_interface.envs_dirs` constant. Use `conda.base.context.context.envs_dirs` instead. (#5222) +* Deprecate `conda_build.conda_interface.pkgs_dirs` constant. Use `conda.base.context.context.pkgs_dirs` instead. (#5222) +* Deprecate `conda_build.conda_interface.cc_platform` constant. Use `conda.base.context.context.platform` instead. (#5222) +* Deprecate `conda_build.conda_interface.root_dir` constant. Use `conda.base.context.context.root_prefix` instead. (#5222) +* Deprecate `conda_build.conda_interface.root_writable` constant. Use `conda.base.context.context.root_writable` instead. (#5222) +* Deprecate `conda_build.conda_interface.subdir` constant. Use `conda.base.context.context.subdir` instead. (#5222) +* Deprecate `conda_build.conda_interface.create_default_packages` constant. Use `conda.base.context.context.create_default_packages` instead. (#5222) +* Deprecate `conda_build.conda_interface.get_rc_urls` function. Use `conda.base.context.context.channels` instead. (#5222) +* Deprecate `conda_build.conda_interface.get_prefix` function. Use `conda.base.context.context.target_prefix` instead. (#5222) +* Deprecate `conda_build.conda_interface.get_conda_channel` function. Use `conda.models.channel.Channel.from_value` instead. (#5222) +* Deprecate `conda_build.conda_interface.reset_context` function. Use `conda.base.context.reset_context` instead. (#5222) +* Deprecate `conda_build.conda_interface.context` singleton. Use `conda.base.context.context` instead. (#5251) +* Deprecate `conda_build.conda_interface.configparser` module. Use `configparser` instead. (#5251) +* Deprecate `conda_build.conda_interface.os` module. Use `os` instead. (#5251) +* Deprecate `conda_build.conda_interface.partial` function. Use `functools.partial` instead. (#5251) +* Deprecate `conda_build.conda_interface.import_module` function. Use `importlib.import_module` instead. (#5251) +* Deprecate `conda_build.conda_interface.determine_target_prefix` function. Use `conda.base.context.determine_target_prefix` instead. (#5251) +* Deprecate `conda_build.conda_interface.non_x86_linux_machines` constant. Use `conda.base.context.non_x86_machines` instead. (#5251) +* Deprecate `conda_build.conda_interface.ProgressiveFetchExtract` class. Use `conda.core.package_cache.ProgressiveFetchExtract` instead. (#5251) +* Deprecate `conda_build.conda_interface.CondaError` class. Use `conda.exceptions.CondaError` instead. (#5251) +* Deprecate `conda_build.conda_interface.CondaHTTPError` class. Use `conda.exceptions.CondaHTTPError` instead. (#5251) +* Deprecate `conda_build.conda_interface.LinkError` class. Use `conda.exceptions.LinkError` instead. (#5251) +* Deprecate `conda_build.conda_interface.LockError` class. Use `conda.exceptions.LockError` instead. (#5251) +* Deprecate `conda_build.conda_interface.NoPackagesFoundError` class. Use `conda.exceptions.NoPackagesFoundError` instead. (#5251) +* Deprecate `conda_build.conda_interface.PaddingError` class. Use `conda.exceptions.PaddingError` instead. (#5251) +* Deprecate `conda_build.conda_interface.UnsatisfiableError` class. Use `conda.exceptions.UnsatisfiableError` instead. (#5251) +* Deprecate `conda_build.conda_interface.get_conda_build_local_url` class. Use `conda.models.channel.get_conda_build_local_url` instead. (#5251) +* Deprecate `conda_build.config.Config.override_channels`. Defer to `conda.base.context.context.channels` instead. (#5271, #5324) +* Deprecate `conda_build.conda_interface._toposort`. Use `conda.common.toposort._toposort` instead. (#5276) +* Deprecate `conda_build.conda_interface.add_parser_channels`. Use `conda.cli.helpers.add_parser_channels` instead. (#5276) +* Deprecate `conda_build.conda_interface.add_parser_prefix`. Use `conda.cli.helpers.add_parser_prefix` instead. (#5276) +* Deprecate `conda_build.conda_interface.ArgumentParser`. Use `conda.cli.conda_argparse.ArgumentParser` instead. (#5276) +* Deprecate `conda_build.conda_interface.cc_conda_build`. Use `conda.base.context.context.conda_build` instead. (#5276) +* Deprecate `conda_build.conda_interface.Channel`. Use `conda.models.channel.Channel` instead. (#5276) +* Deprecate `conda_build.conda_interface.Completer`. Unused. (#5276) +* Deprecate `conda_build.conda_interface.CondaSession`. Use `conda.gateways.connection.session.CondaSession` instead. (#5276) +* Deprecate `conda_build.conda_interface.download`. Use `conda.gateways.connection.download.download` instead. (#5276) +* Deprecate `conda_build.conda_interface.EntityEncoder`. Use `conda.auxlib.entity.EntityEncoder` instead. (#5276) +* Deprecate `conda_build.conda_interface.env_path_backup_var_exists`. Unused. (#5276) +* Deprecate `conda_build.conda_interface.FileMode`. Use `conda.models.enums.FileMode` instead. (#5276) +* Deprecate `conda_build.conda_interface.human_bytes`. Use `conda.utils.human_bytes` instead. (#5276) +* Deprecate `conda_build.conda_interface.input`. Use `input` instead. (#5276) +* Deprecate `conda_build.conda_interface.InstalledPackages`. Unused. (#5276) +* Deprecate `conda_build.conda_interface.lchmod`. Use `conda.gateways.disk.link.lchmod` instead. (#5276) +* Deprecate `conda_build.conda_interface.MatchSpec`. Use `conda.models.match_spec.MatchSpec` instead. (#5276) +* Deprecate `conda_build.conda_interface.NoPackagesFound`. Use `conda.exceptions.ResolvePackageNotFound` instead. (#5276) +* Deprecate `conda_build.conda_interface.normalized_version`. Use `conda.models.version.normalized_version` instead. (#5276) +* Deprecate `conda_build.conda_interface.PackageRecord`. Use `conda.models.records.PackageRecord` instead. (#5276) +* Deprecate `conda_build.conda_interface.PathType`. Use `conda.models.enums.PathType` instead. (#5276) +* Deprecate `conda_build.conda_interface.prefix_placeholder`. Use `conda.base.constants.PREFIX_PLACEHOLDER` instead. (#5276) +* Deprecate `conda_build.conda_interface.Resolve`. Use `conda.resolve.Resolve` instead. (#5276) +* Deprecate `conda_build.conda_interface.rm_rf`. Use `conda_build.utils.rm_rf` instead. (#5276) +* Deprecate `conda_build.conda_interface.spec_from_line`. Use `conda.cli.common.spec_from_line` instead. (#5276) +* Deprecate `conda_build.conda_interface.specs_from_args`. Use `conda.cli.common.specs_from_args` instead. (#5276) +* Deprecate `conda_build.conda_interface.specs_from_url`. Use `conda.cli.common.specs_from_url` instead. (#5276) +* Deprecate `conda_build.conda_interface.StringIO`. Use `io.StringIO` instead. (#5276) +* Deprecate `conda_build.conda_interface.symlink_conda`. Unused. (#5276) +* Deprecate `conda_build.conda_interface.TempDirectory`. Use `conda.gateways.disk.create.TemporaryDirectory` instead. (#5276) +* Deprecate `conda_build.conda_interface.TmpDownload`. Use `conda.gateways.connection.download.TmpDownload` instead. (#5276) +* Deprecate `conda_build.conda_interface.unix_path_to_win`. Use `conda.utils.unix_path_to_win` instead. (#5276) +* Deprecate `conda_build.conda_interface.Unsatisfiable`. Use `conda.exceptions.UnsatisfiableError` instead. (#5276) +* Deprecate `conda_build.conda_interface.untracked`. Use `conda.misc.untracked` instead. (#5276) +* Deprecate `conda_build.conda_interface.url_path`. Use `conda.utils.url_path` instead. (#5276) +* Deprecate `conda_build.conda_interface.VersionOrder`. Use `conda.models.version.VersionOrder` instead. (#5276) +* Deprecate `conda_build.conda_interface.walk_prefix`. Use `conda.misc.walk_prefix` instead. (#5276) +* Deprecate `conda_build.conda_interface.win_path_to_unix`. Use `conda.common.path.win_path_to_unix` instead. (#5276) +* Deprecate `conda_build.variants.get_vars(loop_only)`. (#5280) +* Deprecate `conda_build.utils.HashableDict`. Use `frozendict.deepfreeze` instead. (#5284) +* Deprecate `conda_build.utils._convert_lists_to_sets`. Use `frozendict.deepfreeze` instead. (#5284) +* Deprecate `conda_build.utils.represent_hashabledict`. Use `frozendict.deepfreeze` instead. (#5284) +* Deprecate `conda_build.config.noarch_python_build_age_default`. (#5298) +* Postpone `conda_build.index.channel_data` deprecation. (#5299) +* Remove `conda_build.api.get_output_file_path`. Use `conda_build.api.get_output_file_paths` instead. (#5299) +* Remove `conda_build.bdist_conda`. (#5299) +* Remove `conda_build.build.have_prefix_files`. (#5299) +* Remove `conda_build.conda_interface.get_index`. Use `conda.core.index.get_index` instead. (#5299) +* Remove `conda_build.conda_interface.get_version_from_git_tag`. Use `conda_build.environ.get_version_from_git_tag` instead. (#5299) +* Remove `conda_build.conda_interface.handle_proxy_407`. Handled by `conda.gateways.connection.session.CondaSession`. (#5299) +* Remove `conda_build.conda_interface.hashsum_file`. Use `conda.gateways.disk.read.compute_sum` instead. (#5299) +* Remove `conda_build.conda_interface.md5_file`. Use `conda.gateways.disk.read.compute_sum(path, 'md5')` instead. (#5299) +* Remove `conda_build.environ._load_all_json`. (#5299) +* Remove `conda_build.environ._load_json`. (#5299) +* Remove `conda_build.environ.cached_actions`. (#5299) +* Remove `conda_build.environ.Environment`. Use `conda.core.prefix_data.PrefixData` instead. (#5299) +* Remove `conda_build.environ.InvalidEnvironment`. (#5299) +* Remove `conda_build.environ.LINK_ACTION`. (#5299) +* Remove `conda_build.environ.PREFIX_ACTION`. (#5299) +* Remove `conda_build.index._apply_instructions`. Use `conda_index._apply_instructions` instead. (#5299) +* Remove `conda_build.index.DummyExecutor`. (#5299) +* Remove `conda_build.index.LOCK_TIMEOUT_SECS`. (#5299) +* Remove `conda_build.index.LOCKFILE_NAME`. (#5299) +* Remove `conda_build.index.MAX_THREADS_DEFAULT`. (#5299) + +### Other + +* Enable CodSpeed benchmarks for select tests. (#5233) + +### Contributors + +* @beeankha +* @conda-bot +* @jaimergp +* @Callek made their first contribution in https://github.com/conda/conda-build/pull/5252 +* @kenodegard +* @mbargull +* @Tobias-Fischer made their first contribution in https://github.com/conda/conda-build/pull/5202 +* @ytausch made their first contribution in https://github.com/conda/conda-build/pull/5214 +* @dependabot[bot] +* @pre-commit-ci[bot] + + + ## 24.3.0 (2024-03-15) ### Enhancements diff --git a/RELEASE.md b/RELEASE.md index d45614facc..fed9bd3a81 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -14,18 +14,18 @@ # Release Process -> **Note:** +> [!NOTE] > Throughout this document are references to the version number as `YY.M.[$patch_number]`, this should be replaced with the correct version number. Do **not** prefix the version with a lowercase `v`. ## 1. Open the release issue and cut a release branch. (do this ~1 week prior to release) -> **Note:** +> [!NOTE] > The new release branch should adhere to the naming convention of `YY.M.x` (make sure to put the `.x` at the end!). In the case of patch/hotfix releases, however, do NOT cut a new release branch; instead, use the previously-cut release branch with the appropriate `YY.M.x` version numbers. Use the issue template below to create the release issue. After creating the release issue, pin it for easy access.
-GitHub Issue Template +

Release Template

```markdown ### Summary @@ -45,7 +45,8 @@ Placeholder for `{{ repo.name }} YY.M.x` release. [conda-forge]: https://github.com/conda-forge/{{ repo.name }}-feedstock [ReadTheDocs]: https://readthedocs.com/projects/continuumio-{{ repo.name }}/ -#### The week before release week +
+

The week before release week

- [ ] Create release branch (named `YY.M.x`) - [ ] Ensure release candidates are being successfully built (see `conda-canary/label/rc-{{ repo.name }}-YY.M.x`) @@ -53,10 +54,14 @@ Placeholder for `{{ repo.name }} YY.M.x` release. - [ ] Test release candidates -#### Release week +
+ +
+

Release week

- [ ] Create release PR (see [release process][process]) - [ ] [Publish release][releases] +- [ ] Merge `YY.M.x` back into `main` - [ ] Activate the `YY.M.x` branch on [ReadTheDocs][ReadTheDocs] - [ ] Feedstocks - [ ] Bump version & update dependencies/tests in [Anaconda, Inc.'s feedstock][main] @@ -72,22 +77,56 @@ Placeholder for `{{ repo.name }} YY.M.x` release. - [ ] [Matrix (conda/conda)](https://matrix.to/#/#conda_conda:gitter.im) (this auto posts from Discourse) - Summary - [ ] [Twitter](https://twitter.com/condaproject) + +
```
-> **Note:** +If a patch release is necessary, reopen the original release issue and append the following template to the release issue summary. + +
+

Patch Release Template

+ +```markdown +
+

Patch YY.M.N

+ +- [ ] +- [ ] Create release PR (see [release process][process]) +- [ ] [Publish release][releases] +- [ ] Merge `YY.M.x` back into `main` +- [ ] Feedstocks + - [ ] Bump version & update dependencies/tests in [Anaconda, Inc.'s feedstock][main] + - [ ] Bump version & update dependencies/tests in [conda-forge feedstock][conda-forge] +- [ ] Hand off to the Anaconda packaging team + +
+``` + +
+ +> [!NOTE] > The [epic template][epic template] is perfect for this; remember to remove the **`epic`** label. ## 2. Alert various parties of the upcoming release. (do this ~1 week prior to release) Let various interested parties know about the upcoming release; at minimum, conda-forge maintainers should be informed. For major features, a blog post describing the new features should be prepared and posted once the release is completed (see the announcements section of the release issue). -## 3. Ensure `rever.xsh` and `news/TEMPLATE` are up to date. +## 3. Manually test canary build(s). + +### Canary Builds for Manual Testing + +Once the release PRs are filed, successful canary builds will be available on `https://anaconda.org/conda-canary/conda/files?channel=rc-{{ repo.name }}-YY.M.x` for manual testing. + +> [!NOTE] +> You do not need to apply the `build::review` label for release PRs; every commit to the release branch builds and uploads canary builds to the respective `rc-` label. + +## 4. Ensure `rever.xsh` and `news/TEMPLATE` are up to date. These are synced from [`conda/infrastructure`][infrastructure].
-

4. Run rever. (ideally done on the Monday of release week)

+

5. Run rever. (ideally done on the Monday of release week)

Currently, there are only 2 activities we use rever for, (1) aggregating the authors and (2) updating the changelog. Aggregating the authors can be an error-prone process and also suffers from builtin race conditions (_i.e._, to generate an updated `.authors.yml` we need an updated `.mailmap` but to have an updated `.mailmap` we need an updated `.authors.yml`). This is why the following steps are very heavy-handed (and potentially repetitive) in running rever commands, undoing commits, squashing/reordering commits, etc. @@ -119,9 +158,9 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut (rever) $ git checkout -b changelog-YY.M.[$patch_number] ``` -2. Run `rever --activities authors`: +2. Run `rever --activities authors `: - > **Note:** + > **Note:** > Include `--force` when re-running any rever commands for the same ``, otherwise, rever will skip the activity and no changes will be made (i.e., rever remembers if an activity has been run for a given version). ```bash @@ -166,7 +205,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut (rever) $ git commit -m "Update .authors.yml" ``` - - Rerun `rever --activities authors` and finally check that your `.mailmap` is correct by running: + - Rerun `rever --activities authors --force ` and finally check that your `.mailmap` is correct by running: ```bash git shortlog -se @@ -194,7 +233,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut - Continue repeating the above processes until the `.authors.yml` and `.mailmap` are corrected to your liking. After completing this, you will have at most two commits on your release branch: ```bash - (rever) $ git cherry -v main + (rever) $ git cherry -v + 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml + 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap ``` @@ -202,7 +241,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut 4. Review news snippets (ensure they are all using the correct Markdown format, **not** reStructuredText) and add additional snippets for undocumented PRs/changes as necessary. - > **Note:** + > **Note:** > We've found it useful to name news snippets with the following format: `-`. > > We've also found that we like to include the PR #s inline with the text itself, e.g.: @@ -213,7 +252,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut > * Add `win-arm64` as a known platform (subdir). (#11778) > ``` - - You can utilize [GitHub's compare view][compare] to review what changes are to be included in this release. + - You can utilize [GitHub's compare view][compare] to review what changes are to be included in this release. Make sure you compare the current release branch against the previous one (e.g., `24.5.x` would be compared against `24.3.x`) - Add a new news snippet for any PRs of importance that are missing. @@ -227,7 +266,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut - After completing this, you will have at most three commits on your release branch: ```bash - (rever) $ git cherry -v main + (rever) $ git cherry -v + 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml + 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Update news @@ -235,7 +274,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut 5. Run `rever --activities changelog`: - > **Note:** + > **Note:** > This has previously been a notoriously fickle step (likely due to incorrect regex patterns in the `rever.xsh` config file and missing `github` keys in `.authors.yml`) so beware of potential hiccups. If this fails, it's highly likely to be an innocent issue. ```bash @@ -254,7 +293,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut - After completing this, you will have at most three commits on your release branch: ```bash - (rever) $ git cherry -v main + (rever) $ git cherry -v + 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml + 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Update news @@ -269,7 +308,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut - After completing this, you will have at most five commits on your release branch: ```bash - (rever) $ git cherry -v main + (rever) $ git cherry -v + 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml + 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Update news @@ -291,7 +330,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut - After completing this, you will have at most six commits on your release branch: ```bash - (rever) $ git cherry -v main + (rever) $ git cherry -v + 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml + 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Update news @@ -325,7 +364,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut 11. [Create][new release] the release and **SAVE AS A DRAFT** with the following values: - > **Note:** + > **Note:** > Only publish the release after the release PR is merged, until then always **save as draft**. | Field | Value | @@ -336,22 +375,13 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut
-## 5. Wait for review and approval of release PR. - -## 6. Manually test canary build(s). - -### Canary Builds for Manual Testing - -Once the release PRs are filed, successful canary builds will be available on `https://anaconda.org/conda-canary/conda/files?channel=rc-{{ repo.name }}-YY.M.x` for manual testing. - -> **Note:** -> You do not need to apply the `build::review` label for release PRs; every commit to the release branch builds and uploads canary builds to the respective `rc-` label. +## 6. Wait for review and approval of release PR. ## 7. Merge release PR and publish release. To publish the release, go to the project's release page (e.g., https://github.com/conda/conda/releases) and add the release notes from `CHANGELOG.md` to the draft release you created earlier. Then publish the release. -> **Note:** +> [!NOTE] > Release notes can be drafted and saved ahead of time. ## 8. Merge/cherry pick the release branch over to the `main` branch. @@ -367,19 +397,19 @@ To publish the release, go to the project's release page (e.g., https://github.c 4. Ensure that all of the commits being pulled in look accurate, then select "Create pull request". -> **Note:** +> [!NOTE] > Make sure NOT to push the "Update Branch" button. If there are [merge conflicts][merge conflicts], create a temporary "connector branch" dedicated to fixing merge conflicts separately from the `YY.M.x` and `main` branches. 5. Review and merge the pull request the same as any code change pull request. -> **Note:** +> [!NOTE] > The commits from the release branch need to be retained in order to be able to compare individual commits; in other words, a "merge commit" is required when merging the resulting pull request vs. a "squash merge". Protected branches will require permissions to be temporarily relaxed in order to enable this action. ## 9. Open PRs to bump [Anaconda Recipes][Anaconda Recipes] and [conda-forge][conda-forge] feedstocks to use `YY.M.[$patch_number]`. -> **Note:** +> [!NOTE] > Conda-forge's PRs will be auto-created via the `regro-cf-autotick-bot`. Follow the instructions below if any changes need to be made to the recipe that were not automatically added (these instructions are only necessary for anyone who is _not_ a conda-forge feedstock maintainer, since maintainers can push changes directly to the autotick branch): > - Create a new branch based off of autotick's branch (autotick's branches usually use the `regro-cf-autotick-bot:XX.YY.[$patch_number]_[short hash]` syntax) > - Add any changes via commits to that new branch @@ -392,7 +422,7 @@ To publish the release, go to the project's release page (e.g., https://github.c ## 10. Hand off to Anaconda's packaging team. -> **Note:** +> [!NOTE] > This step should NOT be done past Thursday morning EST; please start the process on a Monday, Tuesday, or Wednesday instead in order to avoid any potential debugging sessions over evenings or weekends.
diff --git a/conda_build/_link.py b/conda_build/_link.py index af841c0275..e8984fcd37 100644 --- a/conda_build/_link.py +++ b/conda_build/_link.py @@ -26,7 +26,7 @@ SITE_PACKAGES = "Lib/site-packages" else: BIN_DIR = join(PREFIX, "bin") - SITE_PACKAGES = "lib/python%s/site-packages" % sys.version[:3] + SITE_PACKAGES = f"lib/python{sys.version[:3]}/site-packages" # the list of these files is going to be store in info/_files FILES = [] @@ -110,20 +110,20 @@ def create_script(fn): dst = join(BIN_DIR, fn) if sys.platform == "win32": shutil.copy2(src, dst + "-script.py") - FILES.append("Scripts/%s-script.py" % fn) + FILES.append(f"Scripts/{fn}-script.py") shutil.copy2( join(THIS_DIR, "cli-%d.exe" % (8 * tuple.__itemsize__)), dst + ".exe" ) - FILES.append("Scripts/%s.exe" % fn) + FILES.append(f"Scripts/{fn}.exe") else: with open(src) as fi: data = fi.read() with open(dst, "w") as fo: - shebang = replace_long_shebang("#!%s\n" % normpath(sys.executable)) + shebang = replace_long_shebang(f"#!{normpath(sys.executable)}\n") fo.write(shebang) fo.write(data) os.chmod(dst, 0o775) - FILES.append("bin/%s" % fn) + FILES.append(f"bin/{fn}") def create_scripts(files): @@ -140,9 +140,9 @@ def main(): link_files("site-packages", SITE_PACKAGES, DATA["site-packages"]) link_files("Examples", "Examples", DATA["Examples"]) - with open(join(PREFIX, "conda-meta", "%s.files" % DATA["dist"]), "w") as fo: + with open(join(PREFIX, "conda-meta", "{}.files".format(DATA["dist"])), "w") as fo: for f in FILES: - fo.write("%s\n" % f) + fo.write(f"{f}\n") if __name__ == "__main__": diff --git a/conda_build/api.py b/conda_build/api.py index 571f08f534..cc866a865d 100644 --- a/conda_build/api.py +++ b/conda_build/api.py @@ -418,7 +418,7 @@ def convert( "Conversion from wheel packages is not implemented yet, stay tuned." ) else: - raise RuntimeError("cannot convert: %s" % package_file) + raise RuntimeError(f"cannot convert: {package_file}") def test_installable(channel: str = "defaults") -> bool: diff --git a/conda_build/build.py b/conda_build/build.py index b377d73bc1..7d6bb8c836 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -42,7 +42,12 @@ from . import environ, noarch_python, source, tarcheck, utils from .config import Config from .create_test import create_all_test_files -from .exceptions import CondaBuildException, DependencyNeedsBuildingError +from .deprecations import deprecated +from .exceptions import ( + BuildScriptException, + CondaBuildException, + DependencyNeedsBuildingError, +) from .index import _delegated_update_index, get_build_index from .metadata import FIELDS, MetaData from .os_utils import external @@ -774,7 +779,7 @@ def copy_readme(m): if readme: src = join(m.config.work_dir, readme) if not isfile(src): - sys.exit("Error: no readme file: %s" % readme) + sys.exit(f"Error: no readme file: {readme}") dst = join(m.config.info_dir, readme) utils.copy_into(src, dst, m.config.timeout, locking=m.config.locking) if os.path.split(readme)[1] not in {"README.md", "README.rst", "README"}: @@ -1187,7 +1192,7 @@ def record_prefix_files(m, files_with_prefix): if fn in text_has_prefix_files: text_has_prefix_files.remove(fn) else: - ignored_because = " (not in build/%s_has_prefix_files)" % (mode) + ignored_because = f" (not in build/{mode}_has_prefix_files)" print( "{fn} ({mode}): {action}{reason}".format( @@ -1204,10 +1209,10 @@ def record_prefix_files(m, files_with_prefix): # make sure we found all of the files expected errstr = "" for f in text_has_prefix_files: - errstr += "Did not detect hard-coded path in %s from has_prefix_files\n" % f + errstr += f"Did not detect hard-coded path in {f} from has_prefix_files\n" for f in binary_has_prefix_files: errstr += ( - "Did not detect hard-coded path in %s from binary_has_prefix_files\n" % f + f"Did not detect hard-coded path in {f} from binary_has_prefix_files\n" ) if errstr: raise RuntimeError(errstr) @@ -1276,7 +1281,7 @@ def write_about_json(m): with open(join(m.config.info_dir, "about.json"), "w") as fo: d = {} for key, default in FIELDS["about"].items(): - value = m.get_value("about/%s" % key) + value = m.get_value(f"about/{key}") if value: d[key] = value if default is list: @@ -1332,7 +1337,7 @@ def write_info_json(m: MetaData): "# $ conda create --name --file " ) for dist in sorted(runtime_deps + [" ".join(m.dist().rsplit("-", 2))]): - fo.write("%s\n" % "=".join(dist.split())) + fo.write("{}\n".format("=".join(dist.split()))) mode_dict = {"mode": "w", "encoding": "utf-8"} with open(join(m.config.info_dir, "index.json"), **mode_dict) as fo: @@ -1355,10 +1360,10 @@ def get_entry_point_script_names(entry_point_scripts): for entry_point in entry_point_scripts: cmd = entry_point[: entry_point.find("=")].strip() if utils.on_win: - scripts.append("Scripts\\%s-script.py" % cmd) - scripts.append("Scripts\\%s.exe" % cmd) + scripts.append(f"Scripts\\{cmd}-script.py") + scripts.append(f"Scripts\\{cmd}.exe") else: - scripts.append("bin/%s" % cmd) + scripts.append(f"bin/{cmd}") return scripts @@ -1520,7 +1525,7 @@ def _recurse_symlink_to_size(path, seen=None): return _recurse_symlink_to_size(dest, seen=seen) elif not isfile(dest): # this is a symlink that points to nowhere, so is zero bytes - warnings.warn("file %s is a symlink with no target" % path, UserWarning) + warnings.warn(f"file {path} is a symlink with no target", UserWarning) return 0 return 0 @@ -1771,8 +1776,7 @@ def bundle_conda( var = var.split("=", 1)[0] elif var not in os.environ: warnings.warn( - "The environment variable '%s' specified in script_env is undefined." - % var, + f"The environment variable '{var}' specified in script_env is undefined.", UserWarning, ) val = "" @@ -1789,12 +1793,15 @@ def bundle_conda( _write_activation_text(dest_file, metadata) bundle_stats = {} - utils.check_call_env( - [*args, dest_file], - cwd=metadata.config.work_dir, - env=env_output, - stats=bundle_stats, - ) + try: + utils.check_call_env( + [*args, dest_file], + cwd=metadata.config.work_dir, + env=env_output, + stats=bundle_stats, + ) + except subprocess.CalledProcessError as exc: + raise BuildScriptException(str(exc), caused_by=exc) from exc log_stats(bundle_stats, f"bundling {metadata.name()}") if stats is not None: stats[stats_key(metadata, f"bundle_{metadata.name()}")] = bundle_stats @@ -2489,9 +2496,12 @@ def build( with codecs.getwriter("utf-8")(open(build_file, "wb")) as bf: bf.write(script) - windows.build( - m, build_file, stats=build_stats, provision_only=provision_only - ) + try: + windows.build( + m, build_file, stats=build_stats, provision_only=provision_only + ) + except subprocess.CalledProcessError as exc: + raise BuildScriptException(str(exc), caused_by=exc) from exc else: build_file = join(m.path, "build.sh") if isfile(build_file) and script: @@ -2533,13 +2543,16 @@ def build( del env["CONDA_BUILD"] # this should raise if any problems occur while building - utils.check_call_env( - cmd, - env=env, - rewrite_stdout_env=rewrite_env, - cwd=src_dir, - stats=build_stats, - ) + try: + utils.check_call_env( + cmd, + env=env, + rewrite_stdout_env=rewrite_env, + cwd=src_dir, + stats=build_stats, + ) + except subprocess.CalledProcessError as exc: + raise BuildScriptException(str(exc), caused_by=exc) from exc utils.remove_pycache_from_scripts(m.config.host_prefix) if build_stats and not provision_only: log_stats(build_stats, f"building {m.name()}") @@ -3330,9 +3343,9 @@ def test( os.path.dirname(prefix), "_".join( ( - "%s_prefix_moved" % name, + f"{name}_prefix_moved", metadata.dist(), - getattr(metadata.config, "%s_subdir" % name), + getattr(metadata.config, f"{name}_subdir"), ) ), ) @@ -3550,6 +3563,11 @@ def tests_failed(package_or_metadata, move_broken, broken_dir, config): sys.exit("TESTS FAILED: " + os.path.basename(pkg)) +@deprecated( + "24.7", + "24.9", + addendum="`patchelf` is an explicit conda-build dependency on Linux so it will always be installed.", +) def check_external(): if on_linux: patchelf = external.find_executable("patchelf") diff --git a/conda_build/cli-32.exe b/conda_build/cli-32.exe index b17d9c7b23..eaf5188c31 100755 Binary files a/conda_build/cli-32.exe and b/conda_build/cli-32.exe differ diff --git a/conda_build/cli-64.exe b/conda_build/cli-64.exe index 7b7f9c67d2..0251e7a4bc 100755 Binary files a/conda_build/cli-64.exe and b/conda_build/cli-64.exe differ diff --git a/conda_build/cli/main_build.py b/conda_build/cli/main_build.py index a966677471..13e129910d 100644 --- a/conda_build/cli/main_build.py +++ b/conda_build/cli/main_build.py @@ -535,7 +535,6 @@ def execute(args: Sequence[str] | None = None) -> int: context.__init__(argparse_args=parsed) config = get_or_merge_config(None, **parsed.__dict__) - build.check_external() # change globals in build module, see comment there as well config.channel_urls = get_channel_urls(parsed.__dict__) diff --git a/conda_build/cli/main_render.py b/conda_build/cli/main_render.py index a5cbb8b443..6e6f2bfa41 100644 --- a/conda_build/cli/main_render.py +++ b/conda_build/cli/main_render.py @@ -66,7 +66,7 @@ def get_render_parser() -> ArgumentParser: "--version", action="version", help="Show the conda-build version number and exit.", - version="conda-build %s" % __version__, + version=f"conda-build {__version__}", ) p.add_argument( "-n", diff --git a/conda_build/conda_interface.py b/conda_build/conda_interface.py deleted file mode 100644 index 18056cc368..0000000000 --- a/conda_build/conda_interface.py +++ /dev/null @@ -1,550 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -from __future__ import annotations - -import configparser as _configparser -import os as _os -from builtins import input as _input -from functools import partial as _partial -from importlib import import_module as _import_module -from io import StringIO as _StringIO - -from conda import __version__ -from conda.auxlib.entity import EntityEncoder as _EntityEncoder -from conda.base.constants import PREFIX_PLACEHOLDER as _PREFIX_PLACEHOLDER -from conda.base.context import context as _context -from conda.base.context import determine_target_prefix as _determine_target_prefix -from conda.base.context import non_x86_machines as _non_x86_linux_machines -from conda.base.context import reset_context as _reset_context -from conda.cli.common import spec_from_line as _spec_from_line -from conda.cli.common import specs_from_args as _specs_from_args -from conda.cli.common import specs_from_url as _specs_from_url -from conda.cli.conda_argparse import ArgumentParser as _ArgumentParser -from conda.common.path import win_path_to_unix as _win_path_to_unix -from conda.common.toposort import _toposort as __toposort -from conda.core.package_cache_data import ( - ProgressiveFetchExtract as _ProgressiveFetchExtract, -) -from conda.exceptions import CondaError as _CondaError -from conda.exceptions import CondaHTTPError as _CondaHTTPError -from conda.exceptions import LinkError as _LinkError -from conda.exceptions import LockError as _LockError -from conda.exceptions import NoPackagesFoundError as _NoPackagesFoundError -from conda.exceptions import PaddingError as _PaddingError -from conda.exceptions import ResolvePackageNotFound as _ResolvePackageNotFound -from conda.exceptions import UnsatisfiableError as _UnsatisfiableError -from conda.exports import Completer as _Completer -from conda.exports import InstalledPackages as _InstalledPackages -from conda.exports import symlink_conda as _symlink_conda -from conda.gateways.connection.download import TmpDownload as _TmpDownload -from conda.gateways.connection.download import download as _download -from conda.gateways.connection.session import CondaSession as _CondaSession -from conda.gateways.disk.create import TemporaryDirectory as _TemporaryDirectory -from conda.gateways.disk.link import lchmod as _lchmod -from conda.misc import untracked as _untracked -from conda.misc import walk_prefix as _walk_prefix -from conda.models.channel import Channel as _Channel -from conda.models.channel import get_conda_build_local_url as _get_conda_build_local_url -from conda.models.enums import FileMode as _FileMode -from conda.models.enums import PathType as _PathType -from conda.models.match_spec import MatchSpec as _MatchSpec -from conda.models.records import PackageRecord as _PackageRecord -from conda.models.version import VersionOrder as _VersionOrder -from conda.models.version import normalized_version as _normalized_version -from conda.resolve import Resolve as _Resolve -from conda.utils import human_bytes as _human_bytes -from conda.utils import unix_path_to_win as _unix_path_to_win -from conda.utils import url_path as _url_path - -from .deprecations import deprecated -from .utils import rm_rf as _rm_rf - -try: - from conda.cli.helpers import add_parser_channels as _add_parser_channels - from conda.cli.helpers import add_parser_prefix as _add_parser_prefix -except ImportError: - # conda<23.11 - from conda.cli.conda_argparse import add_parser_channels as _add_parser_channels - from conda.cli.conda_argparse import add_parser_prefix as _add_parser_prefix - -deprecated.constant( - "24.5", - "24.7", - "Completer", - _Completer, - addendum="Unused.", -) -deprecated.constant( - "24.5", - "24.7", - "CondaSession", - _CondaSession, - addendum="Use `conda.gateways.connection.session.CondaSession` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "InstalledPackages", - _InstalledPackages, - addendum="Unused.", -) -deprecated.constant( - "24.5", - "24.7", - "NoPackagesFound", - _ResolvePackageNotFound, - addendum="Use `conda.exceptions.ResolvePackageNotFound` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "Unsatisfiable", - _UnsatisfiableError, - addendum="Use `conda.exceptions.UnsatisfiableError` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "symlink_conda", - _symlink_conda, - addendum="Unused.", -) - - -deprecated.constant( - "24.5", - "24.7", - "ArgumentParser", - _ArgumentParser, - addendum="Use `conda.cli.conda_argparse.ArgumentParser` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "add_parser_channels", - _add_parser_channels, - addendum="Use `conda.cli.helpers.add_parser_channels` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "add_parser_prefix", - _add_parser_prefix, - addendum="Use `conda.cli.helpers.add_parser_prefix` instead.", -) - -deprecated.constant( - "24.5", - "24.7", - "Channel", - _Channel, - addendum="Use `conda.models.channel.Channel` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "FileMode", - _FileMode, - addendum="Use `conda.models.enums.FileMode` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "PathType", - _PathType, - addendum="Use `conda.models.enums.PathType` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "MatchSpec", - _MatchSpec, - addendum="Use `conda.models.match_spec.MatchSpec` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "PackageRecord", - _PackageRecord, - addendum="Use `conda.models.records.PackageRecord` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "VersionOrder", - _VersionOrder, - addendum="Use `conda.models.version.VersionOrder` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "normalized_version", - _normalized_version, - addendum="Use `conda.models.version.normalized_version` instead.", -) - -deprecated.constant( - "24.5", - "24.7", - "EntityEncoder", - _EntityEncoder, - addendum="Use `conda.auxlib.entity.EntityEncoder` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "Resolve", - _Resolve, - addendum="Use `conda.resolve.Resolve` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "TemporaryDirectory", - _TemporaryDirectory, - addendum="Use `conda.gateways.disk.create.TemporaryDirectory` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "TmpDownload", - _TmpDownload, - addendum="Use `conda.gateways.connection.download.TmpDownload` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "download", - _download, - addendum="Use `conda.gateways.connection.download.download` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "_toposort", - __toposort, - addendum="Use `conda.common.toposort._toposort` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "human_bytes", - _human_bytes, - addendum="Use `conda.utils.human_bytes` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "lchmod", - _lchmod, - addendum="Use `conda.gateways.disk.link.lchmod` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "prefix_placeholder", - _PREFIX_PLACEHOLDER, - addendum="Use `conda.base.constants.PREFIX_PLACEHOLDER` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "rm_rf", - _rm_rf, - addendum="Use `conda_build.utils.rm_rf` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "spec_from_line", - _spec_from_line, - addendum="Use `conda.cli.common.spec_from_line` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "specs_from_args", - _specs_from_args, - addendum="Use `conda.cli.common.specs_from_args` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "specs_from_url", - _specs_from_url, - addendum="Use `conda.cli.common.specs_from_url` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "unix_path_to_win", - _unix_path_to_win, - addendum="Use `conda.utils.unix_path_to_win` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "untracked", - _untracked, - addendum="Use `conda.misc.untracked` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "url_path", - _url_path, - addendum="Use `conda.utils.url_path` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "walk_prefix", - _walk_prefix, - addendum="Use `conda.misc.walk_prefix` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "win_path_to_unix", - _win_path_to_unix, - addendum="Use `conda.common.path.win_path_to_unix` instead.", -) - -deprecated.constant( - "24.5", - "24.7", - "configparser", - _configparser, - addendum="Use `configparser` instead.", -) -deprecated.constant("24.5", "24.7", "os", _os, addendum="Use `os` instead.") -deprecated.constant( - "24.5", - "24.7", - "partial", - _partial, - addendum="Use `functools.partial` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "import_module", - _import_module, - addendum="Use `importlib.import_module` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "StringIO", - _StringIO, - addendum="Use `io.StringIO` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "input", - _input, - addendum="Use `input` instead.", -) - -deprecated.constant( - "24.5", - "24.7", - "context", - _context, - addendum="Use `conda.base.context.context` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "determine_target_prefix", - _determine_target_prefix, - addendum="Use `conda.base.context.determine_target_prefix` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "non_x86_linux_machines", - _non_x86_linux_machines, - addendum="Use `conda.base.context.non_x86_machines` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "ProgressiveFetchExtract", - _ProgressiveFetchExtract, - addendum="Use `conda.core.package_cache_data.ProgressiveFetchExtract` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "CondaError", - _CondaError, - addendum="Use `conda.exceptions.CondaError` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "CondaHTTPError", - _CondaHTTPError, - addendum="Use `conda.exceptions.CondaHTTPError` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "LinkError", - _LinkError, - addendum="Use `conda.exceptions.LinkError` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "LockError", - _LockError, - addendum="Use `conda.exceptions.LockError` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "NoPackagesFoundError", - _NoPackagesFoundError, - addendum="Use `conda.exceptions.NoPackagesFoundError` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "PaddingError", - _PaddingError, - addendum="Use `conda.exceptions.PaddingError` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "UnsatisfiableError", - _UnsatisfiableError, - addendum="Use `conda.exceptions.UnsatisfiableError` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "get_conda_build_local_url", - _get_conda_build_local_url, - addendum="Use `conda.models.channel.get_conda_build_local_url` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "reset_context", - _reset_context, - addendum="Use `conda.base.context.reset_context` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "binstar_upload", - _context.binstar_upload, - addendum="Use `conda.base.context.context.binstar_upload` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "default_python", - _context.default_python, - addendum="Use `conda.base.context.context.default_python` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "envs_dirs", - _context.envs_dirs, - addendum="Use `conda.base.context.context.envs_dirs` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "pkgs_dirs", - list(_context.pkgs_dirs), - addendum="Use `conda.base.context.context.pkgs_dirs` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "cc_platform", - _context.platform, - addendum="Use `conda.base.context.context.platform` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "root_dir", - _context.root_prefix, - addendum="Use `conda.base.context.context.root_prefix` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "root_writable", - _context.root_writable, - addendum="Use `conda.base.context.context.root_writable` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "subdir", - _context.subdir, - addendum="Use `conda.base.context.context.subdir` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "create_default_packages", - _context.create_default_packages, - addendum="Use `conda.base.context.context.create_default_packages` instead.", -) - -deprecated.constant( - "24.5", - "24.7", - "get_rc_urls", - lambda: list(_context.channels), - addendum="Use `conda.base.context.context.channels` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "get_prefix", - _partial(_determine_target_prefix, _context), - addendum="Use `conda.base.context.context.target_prefix` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "cc_conda_build", - _context.conda_build, - addendum="Use `conda.base.context.context.conda_build` instead.", -) - -deprecated.constant( - "24.5", - "24.7", - "get_conda_channel", - _Channel.from_value, - addendum="Use `conda.models.channel.Channel.from_value` instead.", -) - -deprecated.constant( - "24.5", - "24.7", - "env_path_backup_var_exists", - _os.getenv("CONDA_PATH_BACKUP"), - addendum="Unused.", -) - - -deprecated.constant( - "24.5", - "24.7", - "CONDA_VERSION", - __version__, - addendum="Use `conda.__version__` instead.", -) diff --git a/conda_build/config.py b/conda_build/config.py index 09ce6b0718..8e444c1a42 100644 --- a/conda_build/config.py +++ b/conda_build/config.py @@ -19,7 +19,6 @@ from conda.base.context import context from conda.utils import url_path -from .deprecations import deprecated from .utils import ( get_build_folders, get_conda_operation_locks, @@ -54,7 +53,6 @@ def set_invocation_time(): _src_cache_root_default = None error_overlinking_default = "false" error_overdepending_default = "false" -deprecated.constant("24.5", "24.7", "noarch_python_build_age_default", 0) enable_static_default = "false" no_rewrite_stdout_env_default = "false" ignore_verify_codes_default = [] @@ -777,15 +775,6 @@ def test_dir(self): def subdirs_same(self): return self.host_subdir == self.build_subdir - @property - @deprecated( - "24.5", - "24.7", - addendum="Use `conda.base.context.context.override_channels` instead.", - ) - def override_channels(self): - return context.override_channels - def clean(self, remove_folders=True): # build folder is the whole burrito containing envs and source folders # It will only exist if we download source, or create a build or test environment diff --git a/conda_build/create_test.py b/conda_build/create_test.py index 1a8a0f1c34..441fe4a17c 100644 --- a/conda_build/create_test.py +++ b/conda_build/create_test.py @@ -122,7 +122,7 @@ def _create_test_files( fo.write( f"{comment_char} tests for {m.dist()} (this is a generated file);\n" ) - fo.write("print('===== testing package: %s =====');\n" % m.dist()) + fo.write(f"print('===== testing package: {m.dist()} =====');\n") try: with open(test_file) as fi: @@ -134,7 +134,7 @@ def _create_test_files( fo.write( "# tests were not packaged with this module, and cannot be run\n" ) - fo.write("\nprint('===== %s OK =====');\n" % m.dist()) + fo.write(f"\nprint('===== {m.dist()} OK =====');\n") return ( out_file, bool(name) and isfile(out_file) and basename(test_file) != "no-file", @@ -175,8 +175,8 @@ def create_py_files(m: MetaData, test_dir: os.PathLike) -> bool: if imports: with open(tf, "a") as fo: for name in imports: - fo.write('print("import: %r")\n' % name) - fo.write("import %s\n" % name) + fo.write(f'print("import: {name!r}")\n') + fo.write(f"import {name}\n") fo.write("\n") return tf if (tf_exists or imports) else False @@ -202,8 +202,8 @@ def create_r_files(m: MetaData, test_dir: os.PathLike) -> bool: if imports: with open(tf, "a") as fo: for name in imports: - fo.write('print("library(%r)")\n' % name) - fo.write("library(%s)\n" % name) + fo.write(f'print("library({name!r})")\n') + fo.write(f"library({name})\n") fo.write("\n") return tf if (tf_exists or imports) else False @@ -225,11 +225,13 @@ def create_pl_files(m: MetaData, test_dir: os.PathLike) -> bool: break if tf_exists or imports: with open(tf, "a") as fo: - print(r'my $expected_version = "%s";' % m.version().rstrip("0"), file=fo) + print( + r'my $expected_version = "{}";'.format(m.version().rstrip("0")), file=fo + ) if imports: for name in imports: - print(r'print("import: %s\n");' % name, file=fo) - print("use %s;\n" % name, file=fo) + print(rf'print("import: {name}\n");', file=fo) + print(f"use {name};\n", file=fo) # Don't try to print version for complex imports if " " not in name: print( @@ -264,8 +266,8 @@ def create_lua_files(m: MetaData, test_dir: os.PathLike) -> bool: if imports: with open(tf, "a+") as fo: for name in imports: - print(r'print("require \"%s\"\n");' % name, file=fo) - print('require "%s"\n' % name, file=fo) + print(rf'print("require \"{name}\"\n");', file=fo) + print(f'require "{name}"\n', file=fo) return tf if (tf_exists or imports) else False diff --git a/conda_build/develop.py b/conda_build/develop.py index 59b31a3231..d0e3d59fd6 100644 --- a/conda_build/develop.py +++ b/conda_build/develop.py @@ -137,12 +137,11 @@ def execute( ) -> None: if not isdir(prefix): sys.exit( - """\ -Error: environment does not exist: %s + f"""\ +Error: environment does not exist: {prefix} # # Use 'conda create' to create the environment first. #""" - % prefix ) assert find_executable("python", prefix=prefix) diff --git a/conda_build/environ.py b/conda_build/environ.py index 5aae94e682..7a3a7ca8cb 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -536,8 +536,7 @@ def meta_vars(meta: MetaData, skip_build_id=False): value = os.getenv(var_name) if value is None: warnings.warn( - "The environment variable '%s' specified in script_env is undefined." - % var_name, + f"The environment variable '{var_name}' specified in script_env is undefined.", UserWarning, ) else: @@ -855,7 +854,7 @@ def get_install_actions( capture = utils.capture for feature, value in feature_list: if value: - specs.append("%s@" % feature) + specs.append(f"{feature}@") bldpkgs_dirs = ensure_list(bldpkgs_dirs) @@ -961,7 +960,7 @@ def get_install_actions( # specs are the raw specifications, not the conda-derived actual specs # We're testing that pip etc. are manually specified if not any( - re.match(r"^%s(?:$|[\s=].*)" % pkg, str(dep)) for dep in specs + re.match(rf"^{pkg}(?:$|[\s=].*)", str(dep)) for dep in specs ): precs = [prec for prec in precs if prec.name != pkg] cached_precs[(specs, env, subdir, channel_urls, disable_pip)] = precs.copy() @@ -1341,7 +1340,7 @@ def _display_actions(prefix, precs): builder = ["", "## Package Plan ##\n"] if prefix: - builder.append(" environment location: %s" % prefix) + builder.append(f" environment location: {prefix}") builder.append("") print("\n".join(builder)) @@ -1385,9 +1384,9 @@ def channel_filt(s): # string with new-style string formatting. fmt[pkg] = f"{{pkg:<{maxpkg}}} {{vers:<{maxver}}}" if maxchannels: - fmt[pkg] += " {channel:<%s}" % maxchannels + fmt[pkg] += f" {{channel:<{maxchannels}}}" if features[pkg]: - fmt[pkg] += " [{features:<%s}]" % maxfeatures + fmt[pkg] += f" [{{features:<{maxfeatures}}}]" lead = " " * 4 diff --git a/conda_build/exceptions.py b/conda_build/exceptions.py index f38706786a..8aa10149d9 100644 --- a/conda_build/exceptions.py +++ b/conda_build/exceptions.py @@ -2,12 +2,14 @@ # SPDX-License-Identifier: BSD-3-Clause import textwrap +from conda import CondaError + SEPARATOR = "-" * 70 indent = lambda s: textwrap.fill(textwrap.dedent(s)) -class CondaBuildException(Exception): +class CondaBuildException(CondaError): pass @@ -107,22 +109,26 @@ class BuildLockError(CondaBuildException): """Raised when we failed to acquire a lock.""" -class OverLinkingError(RuntimeError): +class OverLinkingError(RuntimeError, CondaBuildException): def __init__(self, error, *args): self.error = error - self.msg = "overlinking check failed \n%s" % (error) + self.msg = f"overlinking check failed \n{error}" super().__init__(self.msg) -class OverDependingError(RuntimeError): +class OverDependingError(RuntimeError, CondaBuildException): def __init__(self, error, *args): self.error = error - self.msg = "overdepending check failed \n%s" % (error) + self.msg = f"overdepending check failed \n{error}" super().__init__(self.msg) -class RunPathError(RuntimeError): +class RunPathError(RuntimeError, CondaBuildException): def __init__(self, error, *args): self.error = error - self.msg = "runpaths check failed \n%s" % (error) + self.msg = f"runpaths check failed \n{error}" super().__init__(self.msg) + + +class BuildScriptException(CondaBuildException): + pass diff --git a/conda_build/gui-32.exe b/conda_build/gui-32.exe index bee7e543c4..289c77ca9c 100755 Binary files a/conda_build/gui-32.exe and b/conda_build/gui-32.exe differ diff --git a/conda_build/gui-64.exe b/conda_build/gui-64.exe index 366a721736..c6cdccd2c5 100755 Binary files a/conda_build/gui-64.exe and b/conda_build/gui-64.exe differ diff --git a/conda_build/index.py b/conda_build/index.py index 3a2f9ab10b..fc72a3fd0d 100644 --- a/conda_build/index.py +++ b/conda_build/index.py @@ -1,9 +1,7 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -import json import logging import os -import time from functools import partial from os.path import dirname @@ -14,9 +12,7 @@ from conda_index.index import update_index as _update_index from . import utils -from .deprecations import deprecated from .utils import ( - JSONDecodeError, get_logger, ) @@ -28,8 +24,6 @@ local_subdir = "" local_output_folder = "" cached_channels = [] -_channel_data = {} -deprecated.constant("24.1", "24.7", "channel_data", _channel_data) # TODO: this is to make sure that the index doesn't leak tokens. It breaks use of private channels, though. # os.environ['CONDA_ADD_ANACONDA_TOKEN'] = "false" @@ -56,7 +50,6 @@ def get_build_index( global local_output_folder global cached_index global cached_channels - global _channel_data mtime = 0 channel_urls = list(utils.ensure_list(channel_urls)) @@ -131,55 +124,11 @@ def get_build_index( platform=subdir, ) - expanded_channels = {rec.channel for rec in cached_index} - - superchannel = {} - # we need channeldata.json too, as it is a more reliable source of run_exports data - for channel in expanded_channels: - if channel.scheme == "file": - location = channel.location - if utils.on_win: - location = location.lstrip("/") - elif not os.path.isabs(channel.location) and os.path.exists( - os.path.join(os.path.sep, channel.location) - ): - location = os.path.join(os.path.sep, channel.location) - channeldata_file = os.path.join( - location, channel.name, "channeldata.json" - ) - retry = 0 - max_retries = 1 - if os.path.isfile(channeldata_file): - while retry < max_retries: - try: - with open(channeldata_file, "r+") as f: - _channel_data[channel.name] = json.load(f) - break - except (OSError, JSONDecodeError): - time.sleep(0.2) - retry += 1 - else: - # download channeldata.json for url - if not context.offline: - try: - _channel_data[channel.name] = utils.download_channeldata( - channel.base_url + "/channeldata.json" - ) - except CondaHTTPError: - continue - # collapse defaults metachannel back into one superchannel, merging channeldata - if channel.base_url in context.default_channels and _channel_data.get( - channel.name - ): - packages = superchannel.get("packages", {}) - packages.update(_channel_data[channel.name]) - superchannel["packages"] = packages - _channel_data["defaults"] = superchannel local_index_timestamp = os.path.getmtime(index_file) local_subdir = subdir local_output_folder = output_folder cached_channels = channel_urls - return cached_index, local_index_timestamp, _channel_data + return cached_index, local_index_timestamp, None def _ensure_valid_channel(local_folder, subdir): diff --git a/conda_build/inspect_pkg.py b/conda_build/inspect_pkg.py index 19c0db7ca3..43fc401551 100644 --- a/conda_build/inspect_pkg.py +++ b/conda_build/inspect_pkg.py @@ -132,7 +132,7 @@ def print_linkages( else sort_order.get(key[0], (4, key[0])) ), ): - output_string += "%s:\n" % prec + output_string += f"{prec}:\n" if show_files: for lib, path, binary in sorted(links): output_string += f" {lib} ({path}) from {binary}\n" @@ -296,7 +296,7 @@ def inspect_linkages( output_string += print_linkages(inverted_map[dep], show_files=show_files) else: - raise ValueError("Unrecognized groupby: %s" % groupby) + raise ValueError(f"Unrecognized groupby: {groupby}") if hasattr(output_string, "decode"): output_string = output_string.decode("utf-8") return output_string diff --git a/conda_build/license_family.py b/conda_build/license_family.py index 976cc1b33a..ab101274ae 100644 --- a/conda_build/license_family.py +++ b/conda_build/license_family.py @@ -29,7 +29,7 @@ gpl3_regex = re.compile("GPL[^2]*3") # match GPL3 gpl23_regex = re.compile("GPL[^2]*>= *2") # match GPL >= 2 cc_regex = re.compile(r"CC\w+") # match CC -punk_regex = re.compile("[%s]" % re.escape(string.punctuation)) # removes punks +punk_regex = re.compile(f"[{re.escape(string.punctuation)}]") # removes punks def match_gpl3(family): diff --git a/conda_build/metadata.py b/conda_build/metadata.py index 6fd065e0b2..1c51246264 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -23,6 +23,7 @@ from . import exceptions, utils from .config import Config, get_or_merge_config +from .deprecations import deprecated from .features import feature_list from .license_family import ensure_valid_license_family from .utils import ( @@ -45,7 +46,10 @@ ) if TYPE_CHECKING: - from typing import Any, Literal + from typing import Any, Literal, Self + + OutputDict = dict[str, Any] + OutputTuple = tuple[OutputDict, "MetaData"] try: import yaml @@ -397,7 +401,7 @@ def ensure_valid_noarch_value(meta): build_noarch = meta.get("build", {}).get("noarch") if build_noarch and build_noarch not in NOARCH_TYPES: raise exceptions.CondaBuildException( - "Invalid value for noarch: %s" % build_noarch + f"Invalid value for noarch: {build_noarch}" ) @@ -408,7 +412,17 @@ def _get_all_dependencies(metadata, envs=("host", "build", "run")): return reqs -def check_circular_dependencies(render_order, config=None): +@deprecated( + "24.5.1", + "24.7.0", + addendum="Use `conda_build.metadata._check_circular_dependencies` instead.", +) +def check_circular_dependencies( + render_order: dict[dict[str, Any], MetaData], + config: Config | None = None, +): + # deprecated since the input type (render_order) changed + envs: tuple[str, ...] if config and config.host_subdir != config.build_subdir: # When cross compiling build dependencies are already built # and cannot come from the recipe as subpackages @@ -433,6 +447,39 @@ def check_circular_dependencies(render_order, config=None): raise exceptions.RecipeError(error) +def _check_circular_dependencies( + render_order: list[OutputTuple], + config: Config | None = None, +) -> None: + envs: tuple[str, ...] + if config and config.host_subdir != config.build_subdir: + # When cross compiling build dependencies are already built + # and cannot come from the recipe as subpackages + envs = ("host", "run") + else: + envs = ("build", "host", "run") + + pairs: list[tuple[str, str]] = [] + for idx, (_, metadata) in enumerate(render_order): + name = metadata.name() + for _, other_metadata in render_order[idx + 1 :]: + other_name = other_metadata.name() + if any( + name == dep.split(" ")[0] + for dep in _get_all_dependencies(other_metadata, envs=envs) + ) and any( + other_name == dep.split(" ")[0] + for dep in _get_all_dependencies(metadata, envs=envs) + ): + pairs.append((name, other_name)) + + if pairs: + error = "Circular dependencies in recipe: \n" + for pair in pairs: + error += " {} <-> {}\n".format(*pair) + raise exceptions.RecipeError(error) + + def _variants_equal(metadata, output_metadata): match = True for key, val in metadata.config.variant.items(): @@ -828,7 +875,7 @@ def _get_env_path(env_name_or_path): break bootstrap_metadir = os.path.join(env_name_or_path, "conda-meta") if not os.path.isdir(bootstrap_metadir): - print("Bootstrap environment '%s' not found" % env_name_or_path) + print(f"Bootstrap environment '{env_name_or_path}' not found") sys.exit(1) return env_name_or_path @@ -846,14 +893,13 @@ def _get_dependencies_from_environment(env_name_or_path): return {"requirements": {"build": bootstrap_requirements}} -def toposort(output_metadata_map): - """This function is used to work out the order to run the install scripts - for split packages based on any interdependencies. The result is just - a re-ordering of outputs such that we can run them in that order and - reset the initial set of files in the install prefix after each. This - will naturally lead to non-overlapping files in each package and also - the correct files being present during the install and test procedures, - provided they are run in this order.""" +@deprecated( + "24.5.1", + "24.7.0", + addendum="Use `conda_build.metadata.toposort_outputs` instead.", +) +def toposort(output_metadata_map: dict[OutputDict, MetaData]): + # deprecated since input type (output_metadata_map) and output changed from conda.common.toposort import _toposort # We only care about the conda packages built by this recipe. Non-conda @@ -863,9 +909,9 @@ def toposort(output_metadata_map): for output_d in output_metadata_map if output_d.get("type", "conda").startswith("conda") ] - topodict = dict() - order = dict() - endorder = set() + topodict: dict[str, set[str]] = dict() + order: dict[str, int] = dict() + endorder: set[int] = set() for idx, (output_d, output_m) in enumerate(output_metadata_map.items()): if output_d.get("type", "conda").startswith("conda"): @@ -907,6 +953,63 @@ def toposort(output_metadata_map): return result +def _toposort_outputs(output_tuples: list[OutputTuple]) -> list[OutputTuple]: + """This function is used to work out the order to run the install scripts + for split packages based on any interdependencies. The result is just + a re-ordering of outputs such that we can run them in that order and + reset the initial set of files in the install prefix after each. This + will naturally lead to non-overlapping files in each package and also + the correct files being present during the install and test procedures, + provided they are run in this order.""" + from conda.common.toposort import _toposort + + # We only care about the conda packages built by this recipe. Non-conda + # packages get sorted to the end. + conda_outputs: dict[str, list[OutputTuple]] = {} + non_conda_outputs: list[OutputTuple] = [] + for output_tuple in output_tuples: + output_d, _ = output_tuple + if output_d.get("type", "conda").startswith("conda"): + # conda packages must have a name + # the same package name may be seen multiple times (variants) + conda_outputs.setdefault(output_d["name"], []).append(output_tuple) + elif "name" in output_d: + non_conda_outputs.append(output_tuple) + else: + # TODO: is it even possible to get here? and if so should we silently ignore or error? + utils.get_logger(__name__).warn("Found an output without a name, skipping") + + # Iterate over conda packages, creating a mapping of package names to their + # dependencies to be used in toposort + name_to_dependencies: dict[str, set[str]] = {} + for name, same_name_outputs in conda_outputs.items(): + for output_d, output_metadata in same_name_outputs: + # dependencies for all of the variants + dependencies = ( + *output_metadata.get_value("requirements/run", []), + *output_metadata.get_value("requirements/host", []), + *( + output_metadata.get_value("requirements/build", []) + if not output_metadata.is_cross + else [] + ), + ) + name_to_dependencies.setdefault(name, set()).update( + dependency_name + for dependency in dependencies + if (dependency_name := dependency.split(" ")[0]) in conda_outputs + ) + + return [ + *( + output + for name in _toposort(name_to_dependencies) + for output in conda_outputs[name] + ), + *non_conda_outputs, + ] + + def get_output_dicts_from_metadata( metadata: MetaData, outputs: list[dict[str, Any]] | None = None, @@ -1478,7 +1581,7 @@ def check_field(key, section): if section == "extra": continue if section not in FIELDS: - raise ValueError("unknown section: %s" % section) + raise ValueError(f"unknown section: {section}") for key_or_dict in submeta: if section in OPTIONALLY_ITERABLE_FIELDS and isinstance( key_or_dict, dict @@ -1492,17 +1595,17 @@ def check_field(key, section): def name(self) -> str: name = self.get_value("package/name", "") if not name and self.final: - sys.exit("Error: package/name missing in: %r" % self.meta_path) + sys.exit(f"Error: package/name missing in: {self.meta_path!r}") name = str(name) if name != name.lower(): - sys.exit("Error: package/name must be lowercase, got: %r" % name) + sys.exit(f"Error: package/name must be lowercase, got: {name!r}") check_bad_chrs(name, "package/name") return name def version(self) -> str: version = self.get_value("package/version", "") if not version and not self.get_section("outputs") and self.final: - sys.exit("Error: package/version missing in: %r" % self.meta_path) + sys.exit(f"Error: package/version missing in: {self.meta_path!r}") version = str(version) check_bad_chrs(version, "package/version") if self.final and version.startswith("."): @@ -1571,7 +1674,7 @@ def ms_depends(self, typ="run"): try: ms = MatchSpec(spec) except AssertionError: - raise RuntimeError("Invalid package specification: %r" % spec) + raise RuntimeError(f"Invalid package specification: {spec!r}") except (AttributeError, ValueError) as e: raise RuntimeError( "Received dictionary as spec. Note that pip requirements are " @@ -1580,7 +1683,7 @@ def ms_depends(self, typ="run"): if ms.name == self.name() and not ( typ == "build" and self.config.host_subdir != self.config.build_subdir ): - raise RuntimeError("%s cannot depend on itself" % self.name()) + raise RuntimeError(f"{self.name()} cannot depend on itself") for name, ver in name_ver_list: if ms.name == name: if self.noarch: @@ -1708,7 +1811,7 @@ def build_id(self): out = build_string_from_metadata(self) if self.config.filename_hashing and self.final: hash_ = self.hash_dependencies() - if not re.findall("h[0-9a-f]{%s}" % self.config.hash_length, out): + if not re.findall(f"h[0-9a-f]{{{self.config.hash_length}}}", out): ret = out.rsplit("_", 1) try: int(ret[0]) @@ -1718,14 +1821,14 @@ def build_id(self): if len(ret) > 1: out = "_".join([out] + ret[1:]) else: - out = re.sub("h[0-9a-f]{%s}" % self.config.hash_length, hash_, out) + out = re.sub(f"h[0-9a-f]{{{self.config.hash_length}}}", hash_, out) return out def dist(self): return f"{self.name()}-{self.version()}-{self.build_id()}" def pkg_fn(self): - return "%s.tar.bz2" % self.dist() + return f"{self.dist()}.tar.bz2" def is_app(self): return bool(self.get_value("app/entry")) @@ -1733,8 +1836,8 @@ def is_app(self): def app_meta(self): d = {"type": "app"} if self.get_value("app/icon"): - d["icon"] = "%s.png" % compute_sum( - join(self.path, self.get_value("app/icon")), "md5" + d["icon"] = "{}.png".format( + compute_sum(join(self.path, self.get_value("app/icon")), "md5") ) for field, key in [ @@ -2268,7 +2371,7 @@ def validate_features(self): "character in your recipe." ) - def copy(self): + def copy(self: Self) -> MetaData: new = copy.copy(self) new.config = self.config.copy() new.config.variant = copy.deepcopy(self.config.variant) @@ -2319,7 +2422,7 @@ def variant_in_source(self): # constrain the stored variants to only this version in the output # variant mapping if re.search( - r"\s*\{\{\s*%s\s*(?:.*?)?\}\}" % key, self.extract_source_text() + rf"\s*\{{\{{\s*{key}\s*(?:.*?)?\}}\}}", self.extract_source_text() ): return True return False @@ -2520,10 +2623,10 @@ def get_output_metadata_set( permit_undefined_jinja: bool = False, permit_unsatisfiable_variants: bool = False, bypass_env_check: bool = False, - ) -> list[tuple[dict[str, Any], MetaData]]: + ) -> list[OutputTuple]: from .source import provide - out_metadata_map = {} + output_tuples: list[OutputTuple] = [] if self.final: outputs = get_output_dicts_from_metadata(self) output_tuples = [(outputs[0], self)] @@ -2579,27 +2682,26 @@ def get_output_metadata_set( } ), ] = (out, out_metadata) - out_metadata_map[deepfreeze(out)] = out_metadata + output_tuples.append((out, out_metadata)) ref_metadata.other_outputs = out_metadata.other_outputs = ( all_output_metadata ) except SystemExit: if not permit_undefined_jinja: raise - out_metadata_map = {} + output_tuples = [] - assert out_metadata_map, ( + assert output_tuples, ( "Error: output metadata set is empty. Please file an issue" " on the conda-build tracker at https://github.com/conda/conda-build/issues" ) - # format here is {output_dict: metadata_object} - render_order = toposort(out_metadata_map) - check_circular_dependencies(render_order, config=self.config) + render_order: list[OutputTuple] = _toposort_outputs(output_tuples) + _check_circular_dependencies(render_order, config=self.config) conda_packages = OrderedDict() non_conda_packages = [] - for output_d, m in render_order.items(): + for output_d, m in render_order: if not output_d.get("type") or output_d["type"] in ( "conda", "conda_v2", diff --git a/conda_build/noarch_python.py b/conda_build/noarch_python.py index fb81565b3d..1e80fcd2e4 100644 --- a/conda_build/noarch_python.py +++ b/conda_build/noarch_python.py @@ -26,7 +26,7 @@ def rewrite_script(fn, prefix): try: data = fi.read() except UnicodeDecodeError: # file is binary - sys.exit("[noarch_python] Noarch package contains binary script: %s" % fn) + sys.exit(f"[noarch_python] Noarch package contains binary script: {fn}") src_mode = os.stat(src).st_mode os.unlink(src) @@ -83,7 +83,7 @@ def handle_file(f, d, prefix): else: # this should be the built-in logging module, not conda-build's stuff, because this file is standalone. log = logging.getLogger(__name__) - log.debug("Don't know how to handle file: %s. Including it as-is." % f) + log.debug(f"Don't know how to handle file: {f}. Including it as-is.") def populate_files(m, files, prefix, entry_point_scripts=None): @@ -119,7 +119,7 @@ def transform(m, files, prefix): # Create *nix prelink script # Note: it's important to use LF newlines or it wont work if we build on Win - with open(join(bin_dir, ".%s-pre-link.sh" % name), "wb") as fo: + with open(join(bin_dir, f".{name}-pre-link.sh"), "wb") as fo: fo.write( b"""\ #!/bin/bash @@ -128,7 +128,7 @@ def transform(m, files, prefix): ) # Create windows prelink script (be nice and use Windows newlines) - with open(join(scripts_dir, ".%s-pre-link.bat" % name), "wb") as fo: + with open(join(scripts_dir, f".{name}-pre-link.bat"), "wb") as fo: fo.write( """\ @echo off diff --git a/conda_build/os_utils/ldd.py b/conda_build/os_utils/ldd.py index b2de763074..84e80b8e90 100644 --- a/conda_build/os_utils/ldd.py +++ b/conda_build/os_utils/ldd.py @@ -44,7 +44,7 @@ def ldd(path): continue if "ld-linux" in line: continue - raise RuntimeError("Unexpected output from ldd: %s" % line) + raise RuntimeError(f"Unexpected output from ldd: {line}") return res diff --git a/conda_build/os_utils/liefldd.py b/conda_build/os_utils/liefldd.py index d02cd2bd30..3823ce4a9d 100644 --- a/conda_build/os_utils/liefldd.py +++ b/conda_build/os_utils/liefldd.py @@ -353,12 +353,12 @@ def _get_path_dirs(prefix): yield "/".join((prefix, "bin")) -def get_uniqueness_key(file): +def get_uniqueness_key(filename, file): binary = ensure_binary(file) if not binary: return EXE_FORMATS.UNKNOWN elif binary.format == EXE_FORMATS.MACHO: - return str(file) + return filename elif binary.format == EXE_FORMATS.ELF and ( # noqa binary.type == lief.ELF.ELF_CLASS.CLASS32 or binary.type == lief.ELF.ELF_CLASS.CLASS64 @@ -369,8 +369,8 @@ def get_uniqueness_key(file): ] if result: return result[0] - return str(file) - return str(file) + return filename + return filename def _get_resolved_location( @@ -505,13 +505,13 @@ def inspect_linkages_lief( for element in todo: todo.pop(0) filename2 = element[0] - binary = element[1] - if not binary: + binary2 = element[1] + if not binary2: continue - uniqueness_key = get_uniqueness_key(binary) + uniqueness_key = get_uniqueness_key(filename2, binary2) if uniqueness_key not in already_seen: parent_exe_dirname = None - if binary.format == EXE_FORMATS.PE: + if binary2.format == EXE_FORMATS.PE: tmp_filename = filename2 while tmp_filename: if ( @@ -527,17 +527,17 @@ def inspect_linkages_lief( if ".pyd" in filename2 or (os.sep + "DLLs" + os.sep) in filename2: parent_exe_dirname = envroot.replace(os.sep, "/") + "/DLLs" rpaths_by_binary[filename2] = get_rpaths( - binary, parent_exe_dirname, envroot.replace(os.sep, "/"), sysroot + binary2, parent_exe_dirname, envroot.replace(os.sep, "/"), sysroot ) tmp_filename = filename2 rpaths_transitive = [] - if binary.format == EXE_FORMATS.PE: + if binary2.format == EXE_FORMATS.PE: rpaths_transitive = rpaths_by_binary[tmp_filename] else: while tmp_filename: rpaths_transitive[:0] = rpaths_by_binary[tmp_filename] tmp_filename = parents_by_filename[tmp_filename] - libraries = get_libraries(binary) + libraries = get_libraries(binary2) if filename2 in libraries: # Happens on macOS, leading to cycles. libraries.remove(filename2) # RPATH is implicit everywhere except macOS, make it explicit to simplify things. @@ -546,14 +546,14 @@ def inspect_linkages_lief( "$RPATH/" + lib if not lib.startswith("/") and not lib.startswith("$") - and binary.format != EXE_FORMATS.MACHO # noqa + and binary2.format != EXE_FORMATS.MACHO # noqa else lib ) for lib in libraries ] for lib, orig in zip(libraries, these_orig): resolved = _get_resolved_location( - binary, + binary2, orig, exedir, exedir, @@ -568,7 +568,7 @@ def inspect_linkages_lief( # can be run case-sensitively if the user wishes. # """ - if binary.format == EXE_FORMATS.PE: + if binary2.format == EXE_FORMATS.PE: import random path_fixed = ( os.path.dirname(path_fixed) @@ -596,7 +596,7 @@ def inspect_linkages_lief( if recurse: if os.path.exists(resolved[0]): todo.append([resolved[0], lief.parse(resolved[0])]) - already_seen.add(get_uniqueness_key(binary)) + already_seen.add(uniqueness_key) return results @@ -1125,9 +1125,9 @@ def get_symbols(file, defined=True, undefined=True, notexported=False, arch="nat ) if binary.__class__ != lief.MachO.Binary: if isinstance(s, str): - s_name = "%s" % s + s_name = f"{s}" else: - s_name = "%s" % s.name + s_name = f"{s.name}" if s.exported and s.imported: print(f"Weird, symbol {s.name} is both imported and exported") if s.exported: @@ -1136,16 +1136,16 @@ def get_symbols(file, defined=True, undefined=True, notexported=False, arch="nat elif s.imported: is_undefined = False else: - s_name = "%s" % s.name + s_name = f"{s.name}" is_notexported = False if s.type & 1 else True # print("{:32s} : s.type 0b{:020b}, s.value 0b{:020b}".format(s.name, s.type, s.value)) # print("s.value 0b{:020b} :: s.type 0b{:020b}, {:32s}".format(s.value, s.type, s.name)) if notexported is True or is_notexported is False: if is_undefined and undefined: - res.append("%s" % s_name) + res.append(f"{s_name}") elif not is_undefined and defined: - res.append("%s" % s_name) + res.append(f"{s_name}") return res diff --git a/conda_build/os_utils/macho.py b/conda_build/os_utils/macho.py index 516df7a0a6..17fc5d5a13 100644 --- a/conda_build/os_utils/macho.py +++ b/conda_build/os_utils/macho.py @@ -286,7 +286,7 @@ def add_rpath(path, rpath, build_prefix=None, verbose=False): args = ["-add_rpath", rpath, path] code, _, stderr = install_name_tool(args, build_prefix) if "Mach-O dynamic shared library stub file" in stderr: - print("Skipping Mach-O dynamic shared library stub file %s\n" % path) + print(f"Skipping Mach-O dynamic shared library stub file {path}\n") return elif "would duplicate path, file already has LC_RPATH for:" in stderr: print("Skipping -add_rpath, file already has LC_RPATH set") @@ -304,7 +304,7 @@ def delete_rpath(path, rpath, build_prefix=None, verbose=False): args = ["-delete_rpath", rpath, path] code, _, stderr = install_name_tool(args, build_prefix) if "Mach-O dynamic shared library stub file" in stderr: - print("Skipping Mach-O dynamic shared library stub file %s\n" % path) + print(f"Skipping Mach-O dynamic shared library stub file {path}\n") return elif "no LC_RPATH load command with path:" in stderr: print("Skipping -delete_rpath, file doesn't contain that LC_RPATH") @@ -341,7 +341,7 @@ def install_name_change(path, build_prefix, cb_func, dylibs, verbose=False): args.extend(("-change", dylibs[index]["name"], new_name, path)) code, _, stderr = install_name_tool(args, build_prefix) if "Mach-O dynamic shared library stub file" in stderr: - print("Skipping Mach-O dynamic shared library stub file %s" % path) + print(f"Skipping Mach-O dynamic shared library stub file {path}") ret = False continue else: diff --git a/conda_build/post.py b/conda_build/post.py index 30a4057a30..67c6a355a7 100644 --- a/conda_build/post.py +++ b/conda_build/post.py @@ -150,11 +150,11 @@ def write_pth(egg_path, config): with open( join( utils.get_site_packages(config.host_prefix, py_ver), - "%s.pth" % (fn.split("-")[0]), + "{}.pth".format(fn.split("-")[0]), ), "w", ) as fo: - fo.write("./%s\n" % fn) + fo.write(f"./{fn}\n") def remove_easy_install_pth(files, prefix, config, preserve_egg_dir=False): @@ -368,7 +368,7 @@ def find_lib(link, prefix, files, path=None): if link.startswith(prefix): link = normpath(link[len(prefix) + 1 :]) if not any(link == normpath(w) for w in files): - sys.exit("Error: Could not find %s" % link) + sys.exit(f"Error: Could not find {link}") return link if link.startswith("/"): # but doesn't start with the build prefix return @@ -382,7 +382,7 @@ def find_lib(link, prefix, files, path=None): for f in files: file_names[basename(f)].append(f) if link not in file_names: - sys.exit("Error: Could not find %s" % link) + sys.exit(f"Error: Could not find {link}") if len(file_names[link]) > 1: if path and basename(path) == link: # The link is for the file itself, just use it @@ -403,7 +403,7 @@ def find_lib(link, prefix, files, path=None): "Choosing the first one." ) return file_names[link][0] - print("Don't know how to find %s, skipping" % link) + print(f"Don't know how to find {link}, skipping") def osx_ch_link(path, link_dict, host_prefix, build_prefix, files): @@ -417,8 +417,7 @@ def osx_ch_link(path, link_dict, host_prefix, build_prefix, files): ) if not codefile_class(link, skip_symlinks=True): sys.exit( - "Error: Compiler runtime library in build prefix not found in host prefix %s" - % link + f"Error: Compiler runtime library in build prefix not found in host prefix {link}" ) else: print(f".. fixing linking of {link} in {path} instead") @@ -429,7 +428,7 @@ def osx_ch_link(path, link_dict, host_prefix, build_prefix, files): return print(f"Fixing linking of {link} in {path}") - print("New link location is %s" % (link_loc)) + print(f"New link location is {link_loc}") lib_to_link = relpath(dirname(link_loc), "lib") # path_to_lib = utils.relative(path[len(prefix) + 1:]) @@ -647,7 +646,7 @@ def assert_relative_osx(path, host_prefix, build_prefix): for prefix in (host_prefix, build_prefix): if prefix and name.startswith(prefix): raise RuntimeError( - "library at %s appears to have an absolute path embedded" % path + f"library at {path} appears to have an absolute path embedded" ) @@ -1770,7 +1769,7 @@ def check_symlinks(files, prefix, croot): if msgs: for msg in msgs: - print("Error: %s" % msg, file=sys.stderr) + print(f"Error: {msg}", file=sys.stderr) sys.exit(1) diff --git a/conda_build/render.py b/conda_build/render.py index b021f8a5b6..cc3bcd87c0 100644 --- a/conda_build/render.py +++ b/conda_build/render.py @@ -115,7 +115,7 @@ def _categorize_deps(m, specs, exclude_pattern, variant): # for sake of comparison, ignore dashes and underscores if dash_or_under.sub("", key) == dash_or_under.sub( "", spec_name - ) and not re.search(r"%s\s+[0-9a-zA-Z\_\.\<\>\=\*]" % spec_name, spec): + ) and not re.search(rf"{spec_name}\s+[0-9a-zA-Z\_\.\<\>\=\*]", spec): dependencies.append(" ".join((spec_name, value))) elif exclude_pattern.match(spec): pass_through_deps.append(spec) diff --git a/conda_build/skeletons/cpan.py b/conda_build/skeletons/cpan.py index c9bd5c398c..31213054d1 100644 --- a/conda_build/skeletons/cpan.py +++ b/conda_build/skeletons/cpan.py @@ -511,9 +511,7 @@ def skeletonize( # packages, unless we're newer than what's in core if metacpan_api_is_core_version(meta_cpan_url, package): if not write_core: - print( - "We found core module %s. Skipping recipe creation." % packagename - ) + print(f"We found core module {packagename}. Skipping recipe creation.") continue d["useurl"] = "#" @@ -577,12 +575,11 @@ def skeletonize( version = None if exists(dir_path) and not force: print( - "Directory %s already exists and you have not specified --force " - % dir_path + f"Directory {dir_path} already exists and you have not specified --force " ) continue elif exists(dir_path) and force: - print("Directory %s already exists, but forcing recipe creation" % dir_path) + print(f"Directory {dir_path} already exists, but forcing recipe creation") try: d["homeurl"] = release_data["resources"]["homepage"] @@ -756,7 +753,7 @@ def deps_for_package( } packages_to_append = set() - print("Processing dependencies for %s..." % package, end="") + print(f"Processing dependencies for {package}...", end="") sys.stdout.flush() if not release_data.get("dependency"): @@ -1052,11 +1049,8 @@ def metacpan_api_is_core_version(cpan_url, module): return True else: sys.exit( - ( - "Error: Could not find module or distribution named" - " %s on MetaCPAN." - ) - % (module) + "Error: Could not find module or distribution named" + f" {module} on MetaCPAN." ) diff --git a/conda_build/skeletons/cran.py b/conda_build/skeletons/cran.py index 38628a52f4..93958333fb 100755 --- a/conda_build/skeletons/cran.py +++ b/conda_build/skeletons/cran.py @@ -489,7 +489,7 @@ def dict_from_cran_lines(lines): # - Suggests in corpcor (k, v) = line.split(":", 1) except ValueError: - sys.exit("Error: Could not parse metadata (%s)" % line) + sys.exit(f"Error: Could not parse metadata ({line})") d[k] = v # if k not in CRAN_KEYS: # print("Warning: Unknown key %s" % k) @@ -597,7 +597,7 @@ def read_description_contents(fp): def get_archive_metadata(path, verbose=True): if verbose: - print("Reading package metadata from %s" % path) + print(f"Reading package metadata from {path}") if basename(path) == "DESCRIPTION": with open(path, "rb") as fp: return read_description_contents(fp) @@ -614,8 +614,8 @@ def get_archive_metadata(path, verbose=True): fp = zf.open(member, "r") return read_description_contents(fp) else: - sys.exit("Cannot extract a DESCRIPTION from file %s" % path) - sys.exit("%s does not seem to be a CRAN package (no DESCRIPTION) file" % path) + sys.exit(f"Cannot extract a DESCRIPTION from file {path}") + sys.exit(f"{path} does not seem to be a CRAN package (no DESCRIPTION) file") def get_latest_git_tag(config): @@ -638,12 +638,12 @@ def get_latest_git_tag(config): stdout = stdout.decode("utf-8") stderr = stderr.decode("utf-8") if stderr or p.returncode: - sys.exit("Error: git tag failed (%s)" % stderr) + sys.exit(f"Error: git tag failed ({stderr})") tags = stdout.strip().splitlines() if not tags: sys.exit("Error: no tags found") - print("Using tag %s" % tags[-1]) + print(f"Using tag {tags[-1]}") return tags[-1] @@ -683,7 +683,7 @@ def get_cran_archive_versions(cran_url, session, package, verbose=True): r.raise_for_status() except requests.exceptions.HTTPError as e: if e.response.status_code == 404: - print("No archive directory for package %s" % package) + print(f"No archive directory for package {package}") return [] raise versions = [] @@ -698,7 +698,7 @@ def get_cran_archive_versions(cran_url, session, package, verbose=True): def get_cran_index(cran_url, session, verbose=True): if verbose: - print("Fetching main index from %s" % cran_url) + print(f"Fetching main index from {cran_url}") r = session.get(cran_url + "/src/contrib/") r.raise_for_status() records = {} @@ -775,7 +775,7 @@ def package_to_inputs_dict( """ if isfile(package): return None - print("Parsing input package %s:" % package) + print(f"Parsing input package {package}:") package = strip_end(package, "/") package = strip_end(package, sep) if "github.com" in package: @@ -1037,7 +1037,7 @@ def skeletonize( session = get_session(output_dir) cran_index = get_cran_index(cran_url, session) if pkg_name.lower() not in cran_index: - sys.exit("Package %s not found" % pkg_name) + sys.exit(f"Package {pkg_name} not found") package, cran_version = cran_index[pkg_name.lower()] if cran_version and (not version or version == cran_version): version = cran_version @@ -1048,8 +1048,7 @@ def skeletonize( sys.exit(1) elif not version and not cran_version and not allow_archived: print( - "ERROR: Package %s is archived; to build, use --allow-archived or a --version value" - % pkg_name + f"ERROR: Package {pkg_name} is archived; to build, use --allow-archived or a --version value" ) sys.exit(1) else: @@ -1325,7 +1324,7 @@ def skeletonize( if cran_package is None: cran_package = get_archive_metadata(description_path) d["cran_metadata"] = "\n".join( - ["# %s" % line for line in cran_package["orig_lines"] if line] + [f"# {line}" for line in cran_package["orig_lines"] if line] ) # Render the source and binaryN keys @@ -1377,7 +1376,7 @@ def skeletonize( d["summary"] = " " + yaml_quote_string(cran_package["Description"]) if "Suggests" in cran_package and not no_comments: - d["suggests"] = "# Suggests: %s" % cran_package["Suggests"] + d["suggests"] = "# Suggests: {}".format(cran_package["Suggests"]) else: d["suggests"] = "" @@ -1589,7 +1588,7 @@ def skeletonize( ) package_list.append(lower_name) - d["%s_depends" % dep_type] = "".join(deps) + d[f"{dep_type}_depends"] = "".join(deps) if no_comments: global CRAN_BUILD_SH_SOURCE, CRAN_META @@ -1603,7 +1602,7 @@ def skeletonize( if update_policy == "error": raise RuntimeError( "directory already exists " - "(and --update-policy is 'error'): %s" % dir_path + f"(and --update-policy is 'error'): {dir_path}" ) elif update_policy == "overwrite": rm_rf(dir_path) @@ -1626,7 +1625,7 @@ def skeletonize( makedirs(join(dir_path)) except: pass - print("Writing recipe for %s" % package.lower()) + print(f"Writing recipe for {package.lower()}") with open(join(dir_path, "meta.yaml"), "w") as f: f.write(clear_whitespace(CRAN_META.format(**d))) if not exists(join(dir_path, "build.sh")) or update_policy == "overwrite": @@ -1683,14 +1682,14 @@ def get_outdated(output_dir, cran_index, packages=()): continue if recipe_name not in cran_index: - print("Skipping %s, not found on CRAN" % recipe) + print(f"Skipping {recipe}, not found on CRAN") continue version_compare( join(output_dir, recipe), cran_index[recipe_name][1].replace("-", "_") ) - print("Updating %s" % recipe) + print(f"Updating {recipe}") to_update.append(recipe_name) return to_update diff --git a/conda_build/skeletons/luarocks.py b/conda_build/skeletons/luarocks.py index da8e641928..41ec499bad 100644 --- a/conda_build/skeletons/luarocks.py +++ b/conda_build/skeletons/luarocks.py @@ -174,7 +174,7 @@ def package_exists(package_name): def getval(spec, k): if k not in spec: - raise Exception("Required key %s not in spec" % k) + raise Exception(f"Required key {k} not in spec") else: return spec[k] @@ -184,7 +184,7 @@ def warn_against_branches(branch): print("=========================================") print("") print("WARNING:") - print("Building a rock referenced to branch %s." % branch) + print(f"Building a rock referenced to branch {branch}.") print("This is not a tag. This is dangerous, because rebuilding") print("at a later date may produce a different package.") print("Please replace with a tag, git commit, or tarball.") @@ -253,7 +253,7 @@ def skeletonize( package = packages.pop() packagename = ( - "lua-%s" % package.lower() if package[:4] != "lua-" else package.lower() + f"lua-{package.lower()}" if package[:4] != "lua-" else package.lower() ) d = package_dicts.setdefault( package, @@ -372,13 +372,13 @@ def skeletonize( modules = spec["build"]["platforms"][our_plat]["modules"] if modules: d["test_commands"] = INDENT.join( - [""] + ["""lua -e "require '%s'\"""" % r for r in modules.keys()] + [""] + [f"""lua -e "require '{r}'\"""" for r in modules.keys()] ) # If we didn't find any modules to import, import the base name if d["test_commands"] == "": d["test_commands"] = INDENT.join( - [""] + ["""lua -e "require '%s'" """ % d["rockname"]] + [""] + ["""lua -e "require '{}'" """.format(d["rockname"])] ) # Build the luarocks skeleton diff --git a/conda_build/skeletons/pypi.py b/conda_build/skeletons/pypi.py index c45c843a6d..d3b716bc8b 100644 --- a/conda_build/skeletons/pypi.py +++ b/conda_build/skeletons/pypi.py @@ -300,7 +300,7 @@ def skeletonize( if not is_url: dir_path = join(output_dir, package.lower()) if exists(dir_path) and not version_compare: - raise RuntimeError("directory already exists: %s" % dir_path) + raise RuntimeError(f"directory already exists: {dir_path}") d = package_dicts.setdefault( package, { @@ -343,14 +343,12 @@ def skeletonize( else: # select the most visible version from PyPI. if not versions: - sys.exit( - "Error: Could not find any versions of package %s" % package - ) + sys.exit(f"Error: Could not find any versions of package {package}") if len(versions) > 1: - print("Warning, the following versions were found for %s" % package) + print(f"Warning, the following versions were found for {package}") for ver in versions: print(ver) - print("Using %s" % versions[-1]) + print(f"Using {versions[-1]}") print("Use --version to specify a different version.") d["version"] = versions[-1] @@ -404,7 +402,7 @@ def skeletonize( d = package_dicts[package] name = d["packagename"].lower() makedirs(join(output_dir, name)) - print("Writing recipe for %s" % package.lower()) + print(f"Writing recipe for {package.lower()}") with open(join(output_dir, name, "meta.yaml"), "w") as f: rendered_recipe = PYPI_META_HEADER.format(**d) @@ -642,8 +640,8 @@ def get_download_data( if not urls[0]["url"]: # The package doesn't have a url, or maybe it only has a wheel. sys.exit( - "Error: Could not build recipe for %s. " - "Could not find any valid urls." % package + f"Error: Could not build recipe for {package}. " + "Could not find any valid urls." ) U = parse_url(urls[0]["url"]) if not U.path: @@ -652,9 +650,9 @@ def get_download_data( fragment = U.fragment or "" digest = fragment.split("=") else: - sys.exit("Error: No source urls found for %s" % package) + sys.exit(f"Error: No source urls found for {package}") if len(urls) > 1 and not noprompt: - print("More than one source version is available for %s:" % package) + print(f"More than one source version is available for {package}:") if manual_url: for i, url in enumerate(urls): print( @@ -689,7 +687,7 @@ def get_download_data( filename = url["filename"] or "package" else: # User provided a URL, try to use it. - print("Using url %s" % package) + print(f"Using url {package}") pypiurl = package U = parse_url(package) digest = U.fragment.split("=") @@ -711,7 +709,7 @@ def version_compare(package, versions): recipe_dir = abspath(package.lower()) if not isdir(recipe_dir): - sys.exit("Error: no such directory: %s" % recipe_dir) + sys.exit(f"Error: no such directory: {recipe_dir}") m = MetaData(recipe_dir) local_version = nv(m.version()) print(f"Local recipe for {package} has version {local_version}") @@ -721,11 +719,11 @@ def version_compare(package, versions): # Comparing normalized versions, displaying non normalized ones new_versions = versions[: norm_versions.index(local_version)] if len(new_versions) > 0: - print("Following new versions of %s are avaliable" % (package)) + print(f"Following new versions of {package} are avaliable") for ver in new_versions: print(ver) else: - print("No new version for %s is available" % (package)) + print(f"No new version for {package} is available") sys.exit() @@ -828,7 +826,7 @@ def get_package_metadata( config, setup_options, ): - print("Downloading %s" % package) + print(f"Downloading {package}") print("PyPI URL: ", metadata["pypiurl"]) pkginfo = get_pkginfo( package, @@ -982,7 +980,7 @@ def _spec_from_line(line): ) spec = _spec_from_line(dep_orig) if spec is None: - sys.exit("Error: Could not parse: %s" % dep) + sys.exit(f"Error: Could not parse: {dep}") if marker: spec = " ".join((spec, marker)) @@ -1058,10 +1056,10 @@ def get_license_name(package, pkginfo, no_prompt=False, data=None): if no_prompt: return license_name elif "\n" not in license_name: - print('Using "%s" for the license' % license_name) + print(f'Using "{license_name}" for the license') else: # Some projects put the whole license text in this field - print("This is the license for %s" % package) + print(f"This is the license for {package}") print() print(license_name) print() @@ -1070,8 +1068,8 @@ def get_license_name(package, pkginfo, no_prompt=False, data=None): license_name = "UNKNOWN" else: license_name = input( - "No license could be found for %s on PyPI or in the source. " - "What license should I use? " % package + f"No license could be found for {package} on PyPI or in the source. " + "What license should I use? " ) return license_name @@ -1175,7 +1173,7 @@ def unpack(src_path, tempdir): if src_path.lower().endswith(decompressible_exts): tar_xf(src_path, tempdir) else: - raise Exception("not a valid source: %s" % src_path) + raise Exception(f"not a valid source: {src_path}") def get_dir(tempdir): @@ -1209,7 +1207,7 @@ def get_requirements(package, pkginfo, all_extras=True): try: extras_require = [pkginfo["extras_require"][x] for x in extras] except KeyError: - sys.exit("Error: Invalid extra features: [%s]" % ",".join(extras)) + sys.exit("Error: Invalid extra features: [{}]".format(",".join(extras))) # match PEP 508 environment markers; currently only matches the # subset of environment markers that compare to python_version # using a single basic Python comparison operator @@ -1297,10 +1295,10 @@ def get_pkginfo( else: new_hash_value = "" - print("Unpacking %s..." % package) + print(f"Unpacking {package}...") unpack(join(config.src_cache, filename), tempdir) print("done") - print("working in %s" % tempdir) + print(f"working in {tempdir}") src_dir = get_dir(tempdir) # TODO: find args parameters needed by run_setuppy run_setuppy( @@ -1366,7 +1364,7 @@ def run_setuppy(src_dir, temp_dir, python_version, extra_specs, config, setup_op ) stdlib_dir = join( config.host_prefix, - "Lib" if on_win else "lib/python%s" % python_version, + "Lib" if on_win else f"lib/python{python_version}", ) patch = join(temp_dir, "pypi-distutils.patch") @@ -1421,8 +1419,8 @@ def run_setuppy(src_dir, temp_dir, python_version, extra_specs, config, setup_op try: check_call_env(cmdargs, env=env) except subprocess.CalledProcessError: - print("$PYTHONPATH = %s" % env["PYTHONPATH"]) - sys.exit("Error: command failed: %s" % " ".join(cmdargs)) + print("$PYTHONPATH = {}".format(env["PYTHONPATH"])) + sys.exit("Error: command failed: {}".format(" ".join(cmdargs))) finally: chdir(cwd) diff --git a/conda_build/source.py b/conda_build/source.py index c7b3d1921b..903f5d7ca0 100644 --- a/conda_build/source.py +++ b/conda_build/source.py @@ -55,7 +55,7 @@ def append_hash_to_fn(fn, hash_value): def download_to_cache(cache_folder, recipe_path, source_dict, verbose=False): """Download a source to the local cache.""" if verbose: - log.info("Source cache directory is: %s" % cache_folder) + log.info(f"Source cache directory is: {cache_folder}") if not isdir(cache_folder) and not os.path.islink(cache_folder): os.makedirs(cache_folder) @@ -81,10 +81,10 @@ def download_to_cache(cache_folder, recipe_path, source_dict, verbose=False): path = join(cache_folder, fn) if isfile(path): if verbose: - log.info("Found source in cache: %s" % fn) + log.info(f"Found source in cache: {fn}") else: if verbose: - log.info("Downloading source to cache: %s" % fn) + log.info(f"Downloading source to cache: {fn}") for url in source_urls: if "://" not in url: @@ -98,14 +98,14 @@ def download_to_cache(cache_folder, recipe_path, source_dict, verbose=False): url = "file:///" + expanduser(url[8:]).replace("\\", "/") try: if verbose: - log.info("Downloading %s" % url) + log.info(f"Downloading {url}") with LoggingContext(): download(url, path) except CondaHTTPError as e: - log.warn("Error: %s" % str(e).strip()) + log.warn(f"Error: {str(e).strip()}") rm_rf(path) except RuntimeError as e: - log.warn("Error: %s" % str(e).strip()) + log.warn(f"Error: {str(e).strip()}") rm_rf(path) else: if verbose: @@ -113,7 +113,7 @@ def download_to_cache(cache_folder, recipe_path, source_dict, verbose=False): break else: # no break rm_rf(path) - raise RuntimeError("Could not download %s" % url) + raise RuntimeError(f"Could not download {url}") hashed = None for tp in ("md5", "sha1", "sha256"): @@ -344,7 +344,7 @@ def git_mirror_checkout_recursive( ) checkout = output.decode("utf-8") if verbose: - print("checkout: %r" % checkout) + print(f"checkout: {checkout!r}") if checkout: check_call_env( [git, "checkout", checkout], @@ -492,7 +492,7 @@ def git_info(src_dir, build_prefix, git=None, verbose=True, fo=None): stdout = check_output_env(cmd, stderr=stderr, cwd=src_dir, env=env) except CalledProcessError as e: if check_error: - raise Exception("git error: %s" % str(e)) + raise Exception(f"git error: {str(e)}") encoding = locale.getpreferredencoding() if not fo: encoding = sys.stdout.encoding @@ -535,7 +535,7 @@ def hg_source(source_dict, src_dir, hg_cache, verbose): # now clone in to work directory update = source_dict.get("hg_tag") or "tip" if verbose: - print("checkout: %r" % update) + print(f"checkout: {update!r}") check_call_env(["hg", "clone", cache_repo, src_dir], stdout=stdout, stderr=stderr) check_call_env( @@ -953,7 +953,7 @@ def try_apply_patch(patch, patch_args, cwd, stdout, stderr): exception = None if not isfile(path): - raise RuntimeError("Error: no such patch: %s" % path) + raise RuntimeError(f"Error: no such patch: {path}") if config.verbose: stdout = None diff --git a/conda_build/tarcheck.py b/conda_build/tarcheck.py index 3a98559187..374422f1e1 100644 --- a/conda_build/tarcheck.py +++ b/conda_build/tarcheck.py @@ -13,7 +13,7 @@ def dist_fn(fn): elif fn.endswith(".tar.bz2"): return fn[:-8] else: - raise Exception("did not expect filename: %r" % fn) + raise Exception(f"did not expect filename: {fn!r}") class TarCheck: @@ -51,9 +51,9 @@ def info_files(self): return for p in sorted(seta | setb): if p not in seta: - print("%r not in info/files" % p) + print(f"{p!r} not in info/files") if p not in setb: - print("%r not in tarball" % p) + print(f"{p!r} not in tarball") raise Exception("info/files") def index_json(self): diff --git a/conda_build/utils.py b/conda_build/utils.py index 4a3e1f782c..a2c456c66e 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -66,7 +66,6 @@ from conda.models.version import VersionOrder from conda.utils import unix_path_to_win -from .deprecations import deprecated from .exceptions import BuildLockError if TYPE_CHECKING: @@ -425,7 +424,7 @@ def bytes2human(n): if n >= prefix[s]: value = float(n) / prefix[s] return f"{value:.1f}{s}" - return "%sB" % n + return f"{n}B" def seconds2human(s): @@ -458,7 +457,7 @@ def get_recipe_abspath(recipe): tar_xf(recipe_tarfile, os.path.join(recipe_dir, "info")) need_cleanup = True else: - print("Ignoring non-recipe: %s" % recipe) + print(f"Ignoring non-recipe: {recipe}") return (None, None) else: recipe_dir = abspath(os.path.join(os.getcwd(), recipe)) @@ -1054,7 +1053,7 @@ def iter_entry_points(items): for item in items: m = entry_pat.match(item) if m is None: - sys.exit("Error cound not match entry point: %r" % item) + sys.exit(f"Error cound not match entry point: {item!r}") yield m.groups() @@ -1076,7 +1075,7 @@ def create_entry_point(path, module, func, config): os.remove(path) with open(path, "w") as fo: if not config.noarch: - fo.write("#!%s\n" % config.host_python) + fo.write(f"#!{config.host_python}\n") fo.write(pyscript) os.chmod(path, 0o775) @@ -1409,47 +1408,6 @@ def get_installed_packages(path): return installed -@deprecated("24.5", "24.7", addendum="Use `frozendict.deepfreeze` instead.") -def _convert_lists_to_sets(_dict): - for k, v in _dict.items(): - if hasattr(v, "keys"): - _dict[k] = HashableDict(_convert_lists_to_sets(v)) - elif hasattr(v, "__iter__") and not isinstance(v, str): - try: - _dict[k] = sorted(list(set(v))) - except TypeError: - _dict[k] = sorted(list({tuple(_) for _ in v})) - return _dict - - -@deprecated("24.5", "24.7", addendum="Use `frozendict.deepfreeze` instead.") -class HashableDict(dict): - """use hashable frozen dictionaries for resources and resource types so that they can be in sets""" - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self = _convert_lists_to_sets(self) - - def __hash__(self): - return hash(json.dumps(self, sort_keys=True)) - - -@deprecated("24.5", "24.7", addendum="Use `frozendict.deepfreeze` instead.") -def represent_hashabledict(dumper, data): - value = [] - - for item_key, item_value in data.items(): - node_key = dumper.represent_data(item_key) - node_value = dumper.represent_data(item_value) - - value.append((node_key, node_value)) - - return yaml.nodes.MappingNode("tag:yaml.org,2002:map", value) - - -yaml.add_representer(HashableDict, represent_hashabledict) - - # http://stackoverflow.com/a/10743550/1170370 @contextlib.contextmanager def capture(): @@ -1622,7 +1580,6 @@ def filter_info_files(files_list, prefix): ) -@deprecated.argument("24.5", "24.7", "config") def rm_rf(path): from conda.core.prefix_data import delete_prefix_from_linked_data from conda.gateways.disk.delete import rm_rf as rm_rf @@ -1951,7 +1908,7 @@ def insert_variant_versions(requirements_dict, variant, env): ) reqs = ensure_list(requirements_dict.get(env)) for key, val in variant.items(): - regex = re.compile(r"^(%s)(?:\s*$)" % key.replace("_", "[-_]")) + regex = re.compile(r"^({})(?:\s*$)".format(key.replace("_", "[-_]"))) matches = [regex.match(pkg) for pkg in reqs] if any(matches): for i, x in enumerate(matches): diff --git a/conda_build/variants.py b/conda_build/variants.py index 1e2b1adc0c..b185a7eb34 100644 --- a/conda_build/variants.py +++ b/conda_build/variants.py @@ -18,7 +18,6 @@ import yaml from conda.base.context import context -from .deprecations import deprecated from .utils import ensure_list, get_logger, islist, on_win, trim_empty_keys from .version import _parse as parse_version @@ -701,7 +700,6 @@ def get_package_variants(recipedir_or_metadata, config=None, variants=None): return filter_combined_spec_to_used_keys(combined_spec, specs=specs) -@deprecated.argument("24.5", "24.7", "loop_only") def get_vars(variants: Iterable[dict[str, Any]]) -> set[str]: """For purposes of naming/identifying, provide a way of identifying which variables contribute to the matrix dimensionality""" @@ -745,13 +743,13 @@ def find_used_variables_in_text(variant, recipe_text, selectors_only=False): continue v_regex = re.escape(v) v_req_regex = "[-_]".join(map(re.escape, v.split("_"))) - variant_regex = r"\{\s*(?:pin_[a-z]+\(\s*?['\"])?%s[^'\"]*?\}\}" % v_regex - selector_regex = r"^[^#\[]*?\#?\s\[[^\]]*?(?!\]]" % v_regex + variant_regex = rf"\{{\s*(?:pin_[a-z]+\(\s*?['\"])?{v_regex}[^'\"]*?\}}\}}" + selector_regex = rf"^[^#\[]*?\#?\s\[[^\]]*?(?!\]]" conditional_regex = ( r"(?:^|[^\{])\{%\s*(?:el)?if\s*.*" + v_regex + r"\s*(?:[^%]*?)?%\}" ) # plain req name, no version spec. Look for end of line after name, or comment or selector - requirement_regex = r"^\s+\-\s+%s\s*(?:\s[\[#]|$)" % v_req_regex + requirement_regex = rf"^\s+\-\s+{v_req_regex}\s*(?:\s[\[#]|$)" if selectors_only: all_res.insert(0, selector_regex) else: diff --git a/conda_build/windows.py b/conda_build/windows.py index 706b499265..00287c50bf 100644 --- a/conda_build/windows.py +++ b/conda_build/windows.py @@ -56,16 +56,13 @@ def fix_staged_scripts(scripts_dir, config): # If it's a #!python script if not (line.startswith(b"#!") and b"python" in line.lower()): continue - print( - "Adjusting unix-style #! script %s, " - "and adding a .bat file for it" % fn - ) + print(f"Adjusting unix-style #! script {fn}, and adding a .bat file for it") # copy it with a .py extension (skipping that first #! line) with open(join(scripts_dir, fn + "-script.py"), "wb") as fo: fo.write(f.read()) # now create the .exe file copy_into( - join(dirname(__file__), "cli-%s.exe" % config.host_arch), + join(dirname(__file__), f"cli-{config.host_arch}.exe"), join(scripts_dir, fn + ".exe"), ) @@ -338,7 +335,7 @@ def build(m, bld_bat, stats, provision_only=False): rewrite_env = { k: env[k] for k in ["PREFIX", "BUILD_PREFIX", "SRC_DIR"] if k in env } - print("Rewriting env in output: %s" % pprint.pformat(rewrite_env)) + print(f"Rewriting env in output: {pprint.pformat(rewrite_env)}") check_call_env( cmd, cwd=m.config.work_dir, stats=stats, rewrite_stdout_env=rewrite_env ) diff --git a/docs/scrape_help.py b/docs/scrape_help.py index 2f99fbb403..66d5af1e57 100755 --- a/docs/scrape_help.py +++ b/docs/scrape_help.py @@ -112,7 +112,7 @@ def external_commands(): def get_help(command): command_help[command] = conda_command_help(command) - print("Checked for subcommand help for %s" % command) + print(f"Checked for subcommand help for {command}") with ThreadPoolExecutor(len(commands)) as executor: # list() is needed for force exceptions to be raised @@ -164,7 +164,7 @@ def generate_man(command): [ "help2man", "--name", - "conda %s" % command, + f"conda {command}", "--section", "1", "--source", @@ -172,36 +172,34 @@ def generate_man(command): "--version-string", conda_version, "--no-info", - "conda %s" % command, + f"conda {command}", ] ) retries -= 1 if not manpage: - sys.exit("Error: Could not get help for conda %s" % command) + sys.exit(f"Error: Could not get help for conda {command}") replacements = man_replacements() for text in replacements: manpage = manpage.replace(text, replacements[text]) - with open(join(manpath, "conda-%s.1" % command.replace(" ", "-")), "w") as f: + with open(join(manpath, "conda-{}.1".format(command.replace(" ", "-"))), "w") as f: f.write(manpage) - print("Generated manpage for conda %s" % command) + print(f"Generated manpage for conda {command}") def generate_html(command): command_file = command.replace(" ", "-") # Use abspath so that it always has a path separator - man = Popen( - ["man", abspath(join(manpath, "conda-%s.1" % command_file))], stdout=PIPE - ) + man = Popen(["man", abspath(join(manpath, f"conda-{command_file}.1"))], stdout=PIPE) htmlpage = check_output( [ "man2html", "-bare", # Don't use HTML, HEAD, or BODY tags "title", - "conda-%s" % command_file, + f"conda-{command_file}", "-topm", "0", # No top margin "-botm", @@ -210,14 +208,14 @@ def generate_html(command): stdin=man.stdout, ) - with open(join(manpath, "conda-%s.html" % command_file), "wb") as f: + with open(join(manpath, f"conda-{command_file}.html"), "wb") as f: f.write(htmlpage) - print("Generated html for conda %s" % command) + print(f"Generated html for conda {command}") def write_rst(command, sep=None): command_file = command.replace(" ", "-") - with open(join(manpath, "conda-%s.html" % command_file)) as f: + with open(join(manpath, f"conda-{command_file}.html")) as f: html = f.read() rp = rstpath @@ -225,13 +223,13 @@ def write_rst(command, sep=None): rp = join(rp, sep) if not isdir(rp): makedirs(rp) - with open(join(rp, "conda-%s.rst" % command_file), "w") as f: + with open(join(rp, f"conda-{command_file}.rst"), "w") as f: f.write(RST_HEADER.format(command=command)) for line in html.splitlines(): f.write(" ") f.write(line) f.write("\n") - print("Generated rst for conda %s" % command) + print(f"Generated rst for conda {command}") def main(): diff --git a/news/5222-deprecating-conda_interface b/news/5222-deprecating-conda_interface deleted file mode 100644 index d7737f9368..0000000000 --- a/news/5222-deprecating-conda_interface +++ /dev/null @@ -1,32 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* Deprecate `conda_build.conda_interface.CONDA_VERSION` constant. Use `conda.__version__` instead. (#5222) -* Deprecate `conda_build.conda_interface.binstar_upload` constant. Use `conda.base.context.context.binstar_upload` instead. (#5222) -* Deprecate `conda_build.conda_interface.default_python` constant. Use `conda.base.context.context.default_python` instead. (#5222) -* Deprecate `conda_build.conda_interface.envs_dirs` constant. Use `conda.base.context.context.envs_dirs` instead. (#5222) -* Deprecate `conda_build.conda_interface.pkgs_dirs` constant. Use `conda.base.context.context.pkgs_dirs` instead. (#5222) -* Deprecate `conda_build.conda_interface.cc_platform` constant. Use `conda.base.context.context.platform` instead. (#5222) -* Deprecate `conda_build.conda_interface.root_dir` constant. Use `conda.base.context.context.root_prefix` instead. (#5222) -* Deprecate `conda_build.conda_interface.root_writable` constant. Use `conda.base.context.context.root_writable` instead. (#5222) -* Deprecate `conda_build.conda_interface.subdir` constant. Use `conda.base.context.context.subdir` instead. (#5222) -* Deprecate `conda_build.conda_interface.create_default_packages` constant. Use `conda.base.context.context.create_default_packages` instead. (#5222) -* Deprecate `conda_build.conda_interface.get_rc_urls` function. Use `conda.base.context.context.channels` instead. (#5222) -* Deprecate `conda_build.conda_interface.get_prefix` function. Use `conda.base.context.context.target_prefix` instead. (#5222) -* Deprecate `conda_build.conda_interface.get_conda_channel` function. Use `conda.models.channel.Channel.from_value` instead. (#5222) -* Deprecate `conda_build.conda_interface.reset_context` function. Use `conda.base.context.reset_context` instead. (#5222) - -### Docs - -* - -### Other - -* diff --git a/news/5233-enable-codspeed b/news/5233-enable-codspeed deleted file mode 100644 index efb32df4d1..0000000000 --- a/news/5233-enable-codspeed +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* - -### Other - -* Enable CodSpeed benchmarks for select tests. (#5233) diff --git a/news/5237-select_lines-caching b/news/5237-select_lines-caching deleted file mode 100644 index 434a832350..0000000000 --- a/news/5237-select_lines-caching +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* Add `conda_build.metadata._split_line_selector` to cache line-selector parsed text. (#5237) - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/5238-open_recipe b/news/5238-open_recipe deleted file mode 100644 index 9d5d42c4c5..0000000000 --- a/news/5238-open_recipe +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* Add `conda_build.render.open_recipe` context manager to detect the recipe type (file/`meta.yaml`, directory/recipe, or tarball/package) and properly handling any exit/close behavior. (#5238) - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/5251-deprecating-conda_interface b/news/5251-deprecating-conda_interface deleted file mode 100644 index 9f5e48d6cd..0000000000 --- a/news/5251-deprecating-conda_interface +++ /dev/null @@ -1,34 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* Deprecate `conda_build.conda_interface.context` singleton. Use `conda.base.context.context` instead. (#5251) -* Deprecate `conda_build.conda_interface.configparser` module. Use `configparser` instead. (#5251) -* Deprecate `conda_build.conda_interface.os` module. Use `os` instead. (#5251) -* Deprecate `conda_build.conda_interface.partial` function. Use `functools.partial` instead. (#5251) -* Deprecate `conda_build.conda_interface.import_module` function. Use `importlib.import_module` instead. (#5251) -* Deprecate `conda_build.conda_interface.determine_target_prefix` function. Use `conda.base.context.determine_target_prefix` instead. (#5251) -* Deprecate `conda_build.conda_interface.non_x86_linux_machines` constant. Use `conda.base.context.non_x86_machines` instead. (#5251) -* Deprecate `conda_build.conda_interface.ProgressiveFetchExtract` class. Use `conda.core.package_cache.ProgressiveFetchExtract` instead. (#5251) -* Deprecate `conda_build.conda_interface.CondaError` class. Use `conda.exceptions.CondaError` instead. (#5251) -* Deprecate `conda_build.conda_interface.CondaHTTPError` class. Use `conda.exceptions.CondaHTTPError` instead. (#5251) -* Deprecate `conda_build.conda_interface.LinkError` class. Use `conda.exceptions.LinkError` instead. (#5251) -* Deprecate `conda_build.conda_interface.LockError` class. Use `conda.exceptions.LockError` instead. (#5251) -* Deprecate `conda_build.conda_interface.NoPackagesFoundError` class. Use `conda.exceptions.NoPackagesFoundError` instead. (#5251) -* Deprecate `conda_build.conda_interface.PaddingError` class. Use `conda.exceptions.PaddingError` instead. (#5251) -* Deprecate `conda_build.conda_interface.UnsatisfiableError` class. Use `conda.exceptions.UnsatisfiableError` instead. (#5251) -* Deprecate `conda_build.conda_interface.get_conda_build_local_url` class. Use `conda.models.channel.get_conda_build_local_url` instead. (#5251) - -### Docs - -* - -### Other - -* diff --git a/news/5271-context b/news/5271-context deleted file mode 100644 index b4143e00f4..0000000000 --- a/news/5271-context +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* Require `conda >=23.7.0`. (#5271) - -### Bug fixes - -* Fix all CLI arguments to properly initialize `conda.base.context.context` with parsed arguments. Fixes issue with arguments not being processed (e.g., `--override-channels` was previously ignored). (#3693 via #5271) - -### Deprecations - -* Deprecate `conda_build.config.Config.override_channels`. Use `conda.base.context.context.override_channels` instead. (#5271) - -### Docs - -* - -### Other - -* diff --git a/news/5276-deprecating-conda_interface b/news/5276-deprecating-conda_interface deleted file mode 100644 index 701b9a53f1..0000000000 --- a/news/5276-deprecating-conda_interface +++ /dev/null @@ -1,56 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* Deprecate `conda_build.conda_interface._toposort`. Use `conda.common.toposort._toposort` instead. (#5276) -* Deprecate `conda_build.conda_interface.add_parser_channels`. Use `conda.cli.helpers.add_parser_channels` instead. (#5276) -* Deprecate `conda_build.conda_interface.add_parser_prefix`. Use `conda.cli.helpers.add_parser_prefix` instead. (#5276) -* Deprecate `conda_build.conda_interface.ArgumentParser`. Use `conda.cli.conda_argparse.ArgumentParser` instead. (#5276) -* Deprecate `conda_build.conda_interface.cc_conda_build`. Use `conda.base.context.context.conda_build` instead. (#5276) -* Deprecate `conda_build.conda_interface.Channel`. Use `conda.models.channel.Channel` instead. (#5276) -* Deprecate `conda_build.conda_interface.Completer`. Unused. (#5276) -* Deprecate `conda_build.conda_interface.CondaSession`. Use `conda.gateways.connection.session.CondaSession` instead. (#5276) -* Deprecate `conda_build.conda_interface.download`. Use `conda.gateways.connection.download.download` instead. (#5276) -* Deprecate `conda_build.conda_interface.EntityEncoder`. Use `conda.auxlib.entity.EntityEncoder` instead. (#5276) -* Deprecate `conda_build.conda_interface.env_path_backup_var_exists`. Unused. (#5276) -* Deprecate `conda_build.conda_interface.FileMode`. Use `conda.models.enums.FileMode` instead. (#5276) -* Deprecate `conda_build.conda_interface.human_bytes`. Use `conda.utils.human_bytes` instead. (#5276) -* Deprecate `conda_build.conda_interface.input`. Use `input` instead. (#5276) -* Deprecate `conda_build.conda_interface.InstalledPackages`. Unused. (#5276) -* Deprecate `conda_build.conda_interface.lchmod`. Use `conda.gateways.disk.link.lchmod` instead. (#5276) -* Deprecate `conda_build.conda_interface.MatchSpec`. Use `conda.models.match_spec.MatchSpec` instead. (#5276) -* Deprecate `conda_build.conda_interface.NoPackagesFound`. Use `conda.exceptions.ResolvePackageNotFound` instead. (#5276) -* Deprecate `conda_build.conda_interface.normalized_version`. Use `conda.models.version.normalized_version` instead. (#5276) -* Deprecate `conda_build.conda_interface.PackageRecord`. Use `conda.models.records.PackageRecord` instead. (#5276) -* Deprecate `conda_build.conda_interface.PathType`. Use `conda.models.enums.PathType` instead. (#5276) -* Deprecate `conda_build.conda_interface.prefix_placeholder`. Use `conda.base.constants.PREFIX_PLACEHOLDER` instead. (#5276) -* Deprecate `conda_build.conda_interface.Resolve`. Use `conda.resolve.Resolve` instead. (#5276) -* Deprecate `conda_build.conda_interface.rm_rf`. Use `conda_build.utils.rm_rf` instead. (#5276) -* Deprecate `conda_build.conda_interface.spec_from_line`. Use `conda.cli.common.spec_from_line` instead. (#5276) -* Deprecate `conda_build.conda_interface.specs_from_args`. Use `conda.cli.common.specs_from_args` instead. (#5276) -* Deprecate `conda_build.conda_interface.specs_from_url`. Use `conda.cli.common.specs_from_url` instead. (#5276) -* Deprecate `conda_build.conda_interface.StringIO`. Use `io.StringIO` instead. (#5276) -* Deprecate `conda_build.conda_interface.symlink_conda`. Unused. (#5276) -* Deprecate `conda_build.conda_interface.TempDirectory`. Use `conda.gateways.disk.create.TemporaryDirectory` instead. (#5276) -* Deprecate `conda_build.conda_interface.TmpDownload`. Use `conda.gateways.connection.download.TmpDownload` instead. (#5276) -* Deprecate `conda_build.conda_interface.unix_path_to_win`. Use `conda.utils.unix_path_to_win` instead. (#5276) -* Deprecate `conda_build.conda_interface.Unsatisfiable`. Use `conda.exceptions.UnsatisfiableError` instead. (#5276) -* Deprecate `conda_build.conda_interface.untracked`. Use `conda.misc.untracked` instead. (#5276) -* Deprecate `conda_build.conda_interface.url_path`. Use `conda.utils.url_path` instead. (#5276) -* Deprecate `conda_build.conda_interface.VersionOrder`. Use `conda.models.version.VersionOrder` instead. (#5276) -* Deprecate `conda_build.conda_interface.walk_prefix`. Use `conda.misc.walk_prefix` instead. (#5276) -* Deprecate `conda_build.conda_interface.win_path_to_unix`. Use `conda.common.path.win_path_to_unix` instead. (#5276) - -### Docs - -* - -### Other - -* diff --git a/news/5280-deprecate-get_vars-loop_only b/news/5280-deprecate-get_vars-loop_only deleted file mode 100644 index e18d5cfe8c..0000000000 --- a/news/5280-deprecate-get_vars-loop_only +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* Deprecate `conda_build.variants.get_vars(loop_only)`. Unused. (#5280) - -### Docs - -* - -### Other - -* diff --git a/news/5284-deprecate-HashableDict b/news/5284-deprecate-HashableDict deleted file mode 100644 index c411443395..0000000000 --- a/news/5284-deprecate-HashableDict +++ /dev/null @@ -1,21 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* Deprecate `conda_build.utils.HashableDict`. Use `frozendict.deepfreeze` instead. (#5284) -* Deprecate `conda_build.utils._convert_lists_to_sets`. Use `frozendict.deepfreeze` instead. (#5284) -* Deprecate `conda_build.utils.represent_hashabledict`. Use `frozendict.deepfreeze` instead. (#5284) - -### Docs - -* - -### Other - -* diff --git a/news/5299-remove-deprecations b/news/5299-remove-deprecations deleted file mode 100644 index c78531ea4d..0000000000 --- a/news/5299-remove-deprecations +++ /dev/null @@ -1,39 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* Postpone `conda_build.index.channel_data` deprecation. (#5299) -* Remove `conda_build.api.get_output_file_path`. Use `conda_build.api.get_output_file_paths` instead. (#5299) -* Remove `conda_build.bdist_conda`. (#5299) -* Remove `conda_build.build.have_prefix_files`. (#5299) -* Remove `conda_build.conda_interface.get_index`. Use `conda.core.index.get_index` instead. (#5299) -* Remove `conda_build.conda_interface.get_version_from_git_tag`. Use `conda_build.environ.get_version_from_git_tag` instead. (#5299) -* Remove `conda_build.conda_interface.handle_proxy_407`. Handled by `conda.gateways.connection.session.CondaSession`. (#5299) -* Remove `conda_build.conda_interface.hashsum_file`. Use `conda.gateways.disk.read.compute_sum` instead. (#5299) -* Remove `conda_build.conda_interface.md5_file`. Use `conda.gateways.disk.read.compute_sum(path, 'md5')` instead. (#5299) -* Remove `conda_build.environ._load_all_json`. (#5299) -* Remove `conda_build.environ._load_json`. (#5299) -* Remove `conda_build.environ.cached_actions`. (#5299) -* Remove `conda_build.environ.Environment`. Use `conda.core.prefix_data.PrefixData` instead. (#5299) -* Remove `conda_build.environ.InvalidEnvironment`. (#5299) -* Remove `conda_build.environ.LINK_ACTION`. (#5299) -* Remove `conda_build.environ.PREFIX_ACTION`. (#5299) -* Remove `conda_build.index._apply_instructions`. Use `conda_index._apply_instructions` instead. (#5299) -* Remove `conda_build.index.DummyExecutor`. (#5299) -* Remove `conda_build.index.LOCK_TIMEOUT_SECS`. (#5299) -* Remove `conda_build.index.LOCKFILE_NAME`. (#5299) -* Remove `conda_build.index.MAX_THREADS_DEFAULT`. (#5299) - -### Docs - -* - -### Other - -* diff --git a/news/5333-remove-24.7.x-deprecations b/news/5333-remove-24.7.x-deprecations new file mode 100644 index 0000000000..332176a70c --- /dev/null +++ b/news/5333-remove-24.7.x-deprecations @@ -0,0 +1,95 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* Remove the following deprecations (#5333): + * `conda_build.config.Config.override_channels` (use `conda.base.context.context.channels` instead) + * `conda_build.config.noarch_python_build_age_default` + * `conda_build.conda_interface.add_parser_channels` (use `conda.cli.helpers.add_parser_channels` instead) + * `conda_build.conda_interface.add_parser_prefix` (use `conda.cli.helpers.add_parser_prefix` instead) + * `conda_build.conda_interface.ArgumentParser` (use `conda.cli.conda_argparse.ArgumentParser` instead) + * `conda_build.conda_interface.binstar_upload` (use `conda.base.context.context.binstar_upload` instead) + * `conda_build.conda_interface.cc_conda_build` (use `conda.base.context.context.conda_build` instead) + * `conda_build.conda_interface.cc_platform` (use `conda.base.context.context.platform` instead) + * `conda_build.conda_interface.Channel` (use `conda.models.channel.Channel` instead) + * `conda_build.conda_interface.Completer` + * `conda_build.conda_interface.configparser` (use `configparser` instead) + * `conda_build.conda_interface.CondaError` (use `conda.exceptions.CondaError` instead) + * `conda_build.conda_interface.CondaHTTPError` (use `conda.exceptions.CondaHTTPError` instead) + * `conda_build.conda_interface.CondaSession` (use `conda.gateways.connection.session.CondaSession` instead) + * `conda_build.conda_interface.CONDA_VERSION` (use `conda.__version__` instead) + * `conda_build.conda_interface.context` (use `conda.base.context.context` instead) + * `conda_build.conda_interface.create_default_packages` (use `conda.base.context.context.create_default_packages` instead) + * `conda_build.conda_interface.default_python` (use `conda.base.context.context.default_python` instead) + * `conda_build.conda_interface.determine_target_prefix` (use `conda.base.context.determine_target_prefix` instead) + * `conda_build.conda_interface.download` (use `conda.gateways.connection.download.download` instead) + * `conda_build.conda_interface.env_path_backup_var_exists` + * `conda_build.conda_interface.envs_dirs` (use `conda.base.context.context.envs_dirs` instead) + * `conda_build.conda_interface.EntityEncoder` (use `conda.auxlib.entity.EntityEncoder` instead) + * `conda_build.conda_interface.FileMode` (use `conda.models.enums.FileMode` instead) + * `conda_build.conda_interface.get_conda_build_local_url` (use `conda.models.channel.get_conda_build_local_url` instead) + * `conda_build.conda_interface.get_conda_channel` (use `conda.models.channel.Channel.from_value` instead) + * `conda_build.conda_interface.get_prefix` (use `conda.base.context.context.target_prefix` instead) + * `conda_build.conda_interface.get_rc_urls` (use `conda.base.context.context.channels` instead) + * `conda_build.conda_interface.human_bytes` (use `conda.utils.human_bytes` instead) + * `conda_build.conda_interface.import_module` (use `importlib.import_module` instead) + * `conda_build.conda_interface.input` (use `input` instead) + * `conda_build.conda_interface.InstalledPackages` + * `conda_build.conda_interface.lchmod` (use `conda.gateways.disk.link.lchmod` instead) + * `conda_build.conda_interface.LinkError` (use `conda.exceptions.LinkError` instead) + * `conda_build.conda_interface.LockError` (use `conda.exceptions.LockError` instead) + * `conda_build.conda_interface.MatchSpec` (use `conda.models.match_spec.MatchSpec` instead) + * `conda_build.conda_interface.non_x86_linux_machines` (use `conda.base.context.non_x86_machines` instead) + * `conda_build.conda_interface.NoPackagesFound` (use `conda.exceptions.ResolvePackageNotFound` instead) + * `conda_build.conda_interface.NoPackagesFoundError` (use `conda.exceptions.NoPackagesFoundError` instead) + * `conda_build.conda_interface.normalized_version` (use `conda.models.version.normalized_version` instead) + * `conda_build.conda_interface.os` (use `os` instead) + * `conda_build.conda_interface.PackageRecord` (use `conda.models.records.PackageRecord` instead) + * `conda_build.conda_interface.PaddingError` (use `conda.exceptions.PaddingError` instead) + * `conda_build.conda_interface.partial` (use `functools.partial` instead) + * `conda_build.conda_interface.PathType` (use `conda.models.enums.PathType` instead) + * `conda_build.conda_interface.pkgs_dirs` (use `conda.base.context.context.pkgs_dirs` instead) + * `conda_build.conda_interface.prefix_placeholder` (use `conda.base.constants.PREFIX_PLACEHOLDER` instead) + * `conda_build.conda_interface.ProgressiveFetchExtract` (use `conda.core.package_cache_data.ProgressiveFetchExtract` instead) + * `conda_build.conda_interface.reset_context` (use `conda.base.context.reset_context` instead) + * `conda_build.conda_interface.Resolve` (use `conda.resolve.Resolve` instead) + * `conda_build.conda_interface.rm_rf` (use `conda_build.utils.rm_rf` instead) + * `conda_build.conda_interface.root_dir` (use `conda.base.context.context.root_prefix` instead) + * `conda_build.conda_interface.root_writable` (use `conda.base.context.context.root_writable` instead) + * `conda_build.conda_interface.spec_from_line` (use `conda.cli.common.spec_from_line` instead) + * `conda_build.conda_interface.specs_from_args` (use `conda.cli.common.specs_from_args` instead) + * `conda_build.conda_interface.specs_from_url` (use `conda.cli.common.specs_from_url` instead) + * `conda_build.conda_interface.StringIO` (use `io.StringIO` instead) + * `conda_build.conda_interface.subdir` (use `conda.base.context.context.subdir` instead) + * `conda_build.conda_interface.symlink_conda` + * `conda_build.conda_interface.TemporaryDirectory` (use `conda.gateways.disk.create.TemporaryDirectory` instead) + * `conda_build.conda_interface.TmpDownload` (use `conda.gateways.connection.download.TmpDownload` instead) + * `conda_build.conda_interface._toposort` (use `conda.common.toposort._toposort` instead) + * `conda_build.conda_interface.unix_path_to_win` (use `conda.utils.unix_path_to_win` instead) + * `conda_build.conda_interface.untracked` (use `conda.misc.untracked` instead) + * `conda_build.conda_interface.Unsatisfiable` (use `conda.exceptions.UnsatisfiableError` instead) + * `conda_build.conda_interface.UnsatisfiableError` (use `conda.exceptions.UnsatisfiableError` instead) + * `conda_build.conda_interface.url_path` (use `conda.utils.url_path` instead) + * `conda_build.conda_interface.VersionOrder` (use `conda.models.version.VersionOrder` instead) + * `conda_build.conda_interface.walk_prefix` (use `conda.misc.walk_prefix` instead) + * `conda_build.conda_interface.win_path_to_unix` (use `conda.common.path.win_path_to_unix` instead) + * `conda_build.index.channel_data`; `conda_build.index.get_build_index` return value for `channel_data` is now always `None` + * `conda_build.utils._convert_lists_to_sets` (use `frozendict.deepfreeze` instead) + * `conda_build.utils.HashableDict` (use `frozendict.deepfreeze` instead) + * `conda_build.utils.represent_hashabledict` (use `frozendict.deepfreeze` instead) + * `conda_build.utils.rm_rf(config)` + * `conda_build.variants.get_vars(loop_only)` + +### Docs + +* + +### Other + +* diff --git a/news/5355-deprecate-check_external b/news/5355-deprecate-check_external new file mode 100644 index 0000000000..2a6939ce8e --- /dev/null +++ b/news/5355-deprecate-check_external @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* Mark `conda_build.build.check_external` for deprecation. `patchelf` is an explicit conda-build dependency on Linux so it will always be installed. (#5355) + +### Docs + +* + +### Other + +* diff --git a/tests/test-recipes/metadata/_build_script_errors/output_build_script/meta.yaml b/tests/test-recipes/metadata/_build_script_errors/output_build_script/meta.yaml new file mode 100644 index 0000000000..406ba464c0 --- /dev/null +++ b/tests/test-recipes/metadata/_build_script_errors/output_build_script/meta.yaml @@ -0,0 +1,10 @@ +package: + name: pkg + version: '1.0' +source: + path: . +outputs: + - name: pkg-output + build: + script: + - exit 1 diff --git a/tests/test-recipes/metadata/_build_script_errors/output_script/exit_1.bat b/tests/test-recipes/metadata/_build_script_errors/output_script/exit_1.bat new file mode 100644 index 0000000000..6dedc57766 --- /dev/null +++ b/tests/test-recipes/metadata/_build_script_errors/output_script/exit_1.bat @@ -0,0 +1 @@ +exit 1 \ No newline at end of file diff --git a/tests/test-recipes/metadata/_build_script_errors/output_script/exit_1.sh b/tests/test-recipes/metadata/_build_script_errors/output_script/exit_1.sh new file mode 100644 index 0000000000..6dedc57766 --- /dev/null +++ b/tests/test-recipes/metadata/_build_script_errors/output_script/exit_1.sh @@ -0,0 +1 @@ +exit 1 \ No newline at end of file diff --git a/tests/test-recipes/metadata/_build_script_errors/output_script/meta.yaml b/tests/test-recipes/metadata/_build_script_errors/output_script/meta.yaml new file mode 100644 index 0000000000..43c2f9d054 --- /dev/null +++ b/tests/test-recipes/metadata/_build_script_errors/output_script/meta.yaml @@ -0,0 +1,9 @@ +package: + name: pkg + version: '1.0' +source: + path: . +outputs: + - name: pkg-output + script: exit_1.sh # [unix] + script: exit_1.bat # [win] diff --git a/tests/test-recipes/metadata/_build_script_errors/toplevel/meta.yaml b/tests/test-recipes/metadata/_build_script_errors/toplevel/meta.yaml new file mode 100644 index 0000000000..df710d103b --- /dev/null +++ b/tests/test-recipes/metadata/_build_script_errors/toplevel/meta.yaml @@ -0,0 +1,7 @@ +package: + name: pkg + version: '1.0' +source: + path: . +build: + script: exit 1 diff --git a/tests/test-recipes/metadata/gh-5342/meta.yaml b/tests/test-recipes/metadata/gh-5342/meta.yaml new file mode 100644 index 0000000000..f083f1c95e --- /dev/null +++ b/tests/test-recipes/metadata/gh-5342/meta.yaml @@ -0,0 +1,15 @@ +{% set name = "gh-5342" %} + +package: + name: {{ name }} + version: 1.0 + +outputs: + - name: {{ name }} + build: + skip: true + + - name: {{ name }}-dev + build: + files: + - file diff --git a/tests/test_api_build.py b/tests/test_api_build.py index a663f18e73..8f431bfae2 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -36,6 +36,7 @@ from conda_build import __version__, api, exceptions from conda_build.config import Config from conda_build.exceptions import ( + BuildScriptException, CondaBuildException, DependencyNeedsBuildingError, OverDependingError, @@ -383,7 +384,7 @@ def test_dirty_variable_available_in_build_scripts(testing_config): testing_config.dirty = True api.build(recipe, config=testing_config) - with pytest.raises(subprocess.CalledProcessError): + with pytest.raises(BuildScriptException): testing_config.dirty = False api.build(recipe, config=testing_config) @@ -816,13 +817,13 @@ def test_disable_pip(testing_metadata): testing_metadata.meta["build"]["script"] = ( 'python -c "import pip; print(pip.__version__)"' ) - with pytest.raises(subprocess.CalledProcessError): + with pytest.raises(BuildScriptException): api.build(testing_metadata) testing_metadata.meta["build"]["script"] = ( 'python -c "import setuptools; print(setuptools.__version__)"' ) - with pytest.raises(subprocess.CalledProcessError): + with pytest.raises(BuildScriptException): api.build(testing_metadata) @@ -1539,7 +1540,7 @@ def test_setup_py_data_in_env(testing_config): # should pass with any modern python (just not 3.5) api.build(recipe, config=testing_config) # make sure it fails with our special python logic - with pytest.raises(subprocess.CalledProcessError): + with pytest.raises(BuildScriptException): api.build(recipe, config=testing_config, python="3.5") @@ -1945,7 +1946,7 @@ def test_add_pip_as_python_dependency_from_condarc_file( testing_metadata, testing_workdir, add_pip_as_python_dependency, monkeypatch ): """ - Test whether settings from .condarc files are heeded. + Test whether settings from .condarc files are needed. ref: https://github.com/conda/conda-libmamba-solver/issues/393 """ # TODO: SubdirData._cache_ clearing might not be needed for future conda versions. @@ -1961,10 +1962,44 @@ def test_add_pip_as_python_dependency_from_condarc_file( if add_pip_as_python_dependency: check_build_fails = nullcontext() else: - check_build_fails = pytest.raises(subprocess.CalledProcessError) + check_build_fails = pytest.raises(BuildScriptException) conda_rc = Path(testing_workdir, ".condarc") conda_rc.write_text(f"add_pip_as_python_dependency: {add_pip_as_python_dependency}") with env_var("CONDARC", conda_rc, reset_context): with check_build_fails: api.build(testing_metadata) + + +@pytest.mark.parametrize( + "recipe", sorted(Path(metadata_dir, "_build_script_errors").glob("*")) +) +@pytest.mark.parametrize("debug", (False, True)) +def test_conda_build_script_errors_without_conda_info_handlers(tmp_path, recipe, debug): + env = os.environ.copy() + if debug: + env["CONDA_VERBOSITY"] = "3" + process = subprocess.run( + ["conda", "build", recipe], + env=env, + capture_output=True, + text=True, + check=False, + cwd=tmp_path, + ) + assert process.returncode > 0 + all_output = process.stdout + "\n" + process.stderr + + # These should NOT appear in the output + assert ">>> ERROR REPORT <<<" not in all_output + assert "An unexpected error has occurred." not in all_output + assert "Conda has prepared the above report." not in all_output + + # These should appear + assert "returned non-zero exit status 1" in all_output + + # With verbose mode, we should actually see the traceback + if debug: + assert "Traceback" in all_output + assert "CalledProcessError" in all_output + assert "returned non-zero exit status 1" in all_output diff --git a/tests/test_api_build_conda_v2.py b/tests/test_api_build_conda_v2.py index 4c0c09b9ac..dc4078e61f 100644 --- a/tests/test_api_build_conda_v2.py +++ b/tests/test_api_build_conda_v2.py @@ -40,4 +40,4 @@ def test_conda_pkg_format( # Verify that test pass ran through api assert "Manual entry point" in out - assert "TEST END: %s" % output_file in out + assert f"TEST END: {output_file}" in out diff --git a/tests/test_build.py b/tests/test_build.py index 839cce4b9e..49fc1a4924 100644 --- a/tests/test_build.py +++ b/tests/test_build.py @@ -324,3 +324,8 @@ def test_guess_interpreter( ): with pytest.raises(error) if error else nullcontext(): assert build.guess_interpreter(script) == interpreter + + +def test_check_external(): + with pytest.deprecated_call(): + build.check_external() diff --git a/tests/test_codesigned.py b/tests/test_codesigned.py new file mode 100644 index 0000000000..3ed13086da --- /dev/null +++ b/tests/test_codesigned.py @@ -0,0 +1,97 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + +import os +from functools import lru_cache +from pathlib import Path +from shutil import which +from subprocess import CalledProcessError, check_output, run + +import pytest + +from conda_build.utils import on_win + +HERE = os.path.abspath(os.path.dirname(__file__)) +REPO_ROOT = (Path(HERE) / "..").resolve().absolute() +STUB_FOLDER = REPO_ROOT / "conda_build" + + +@lru_cache(maxsize=None) +def find_signtool() -> str | None: + """Tries to find signtool + + Prefers signtool on PATH otherwise searches system. + Ref: + - https://learn.microsoft.com/en-us/dotnet/framework/tools/signtool-exe + - https://learn.microsoft.com/en-us/windows/win32/seccrypto/signtool + - https://learn.microsoft.com/en-us/windows/win32/seccrypto/using-signtool-to-verify-a-file-signature + """ + signtool_path = which("signtool") + if signtool_path: + return signtool_path + + # Common installation directories where signtool might be located + common_paths = [ + "C:\\Program Files (x86)\\Windows Kits\\10\\bin", + "C:\\Program Files\\Windows Kits\\10\\bin", + "C:\\Windows\\System32", + ] + + signtool_path = None + # Search for signtool in common paths + for path in common_paths: + if signtool_path: + # We found one already + return signtool_path + if not os.path.exists(path): + continue + signtool_path = os.path.join(path, "signtool.exe") + if os.path.exists(signtool_path): + return signtool_path + elif "Windows Kits" in path: + signtool_path = None + max_version = 0 + for dirname in os.listdir(path): + # Use most recent signtool version + if not dirname.endswith(".0"): + continue # next dirname + if int(dirname.replace(".", "")) < max_version: + continue # next dirname + + maybe_signtool_path = os.path.join(path, dirname, "x64", "signtool.exe") + if os.path.exists(maybe_signtool_path): + signtool_path = maybe_signtool_path + return signtool_path + + +@lru_cache(maxsize=None) +def signtool_unsupported_because() -> str: + reason = "" + if not on_win: + reason = "Only verifying signatures of stub exe's on windows" + return reason + signtool = find_signtool() + if not signtool: + reason = "signtool: unable to locate signtool.exe" + try: + check_output([signtool, "verify", "/?"]) + except CalledProcessError as exc: + reason = f"signtool: something went wrong while running 'signtool verify /?', output:\n{exc.output}\n" + return reason + + +def signtool_unsupported() -> bool: + return bool(signtool_unsupported_because()) + + +@pytest.mark.skipif(signtool_unsupported(), reason=signtool_unsupported_because()) +@pytest.mark.parametrize( + "stub_file_name", ["cli-32.exe", "cli-64.exe", "gui-32.exe", "gui-64.exe"] +) +def test_stub_exe_signatures(stub_file_name: str) -> None: + """Verify that signtool verifies the signature of the stub exes""" + stub_file = STUB_FOLDER / stub_file_name + signtool_exe = find_signtool() + completed_process = run([signtool_exe, "verify", "/pa", "/v", stub_file]) + assert completed_process.returncode == 0 diff --git a/tests/utils.py b/tests/utils.py index b4ed64912b..4d6803f09d 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -91,8 +91,7 @@ def assert_package_consistency(package_path): has_prefix_present = False except tarfile.ReadError: raise RuntimeError( - "Could not extract metadata from %s. " - "File probably corrupt." % package_path + f"Could not extract metadata from {package_path}. File probably corrupt." ) errors = [] member_set = set(member_list) # The tar format allows duplicates in member_list @@ -101,7 +100,7 @@ def assert_package_consistency(package_path): file_set = set(file_list) # Check that there are no duplicates in info/files if len(file_list) != len(file_set): - errors.append("Duplicate files in info/files in %s" % package_path) + errors.append(f"Duplicate files in info/files in {package_path}") # Compare the contents of files and members unlisted_members = member_set.difference(file_set) missing_members = file_set.difference(member_set) @@ -109,14 +108,16 @@ def assert_package_consistency(package_path): missing_files = [m for m in unlisted_members if not m.startswith("info/")] if len(missing_files) > 0: errors.append( - "The following package files are not listed in " - "info/files: %s" % ", ".join(missing_files) + "The following package files are not listed in info/files: {}".format( + ", ".join(missing_files) + ) ) # Find any files missing in the archive if len(missing_members) > 0: errors.append( - "The following files listed in info/files are missing: " - "%s" % ", ".join(missing_members) + "The following files listed in info/files are missing: {}".format( + ", ".join(missing_members) + ) ) # Find any files in has_prefix that are not present in files if has_prefix_present: @@ -129,15 +130,15 @@ def assert_package_consistency(package_path): elif len(parts) == 3: prefix_path_list.append(parts[2]) else: - errors.append("Invalid has_prefix file in package: %s" % package_path) + errors.append(f"Invalid has_prefix file in package: {package_path}") prefix_path_set = set(prefix_path_list) if len(prefix_path_list) != len(prefix_path_set): - errors.append("Duplicate files in info/has_prefix in %s" % package_path) + errors.append(f"Duplicate files in info/has_prefix in {package_path}") prefix_not_in_files = prefix_path_set.difference(file_set) if len(prefix_not_in_files) > 0: errors.append( "The following files listed in info/has_prefix are missing " - "from info/files: %s" % ", ".join(prefix_not_in_files) + "from info/files: {}".format(", ".join(prefix_not_in_files)) ) # Assert that no errors are detected