diff --git a/.github/workflows/cronjobs.yaml b/.github/workflows/cronjobs.yaml index 10a88b9..3f0451d 100644 --- a/.github/workflows/cronjobs.yaml +++ b/.github/workflows/cronjobs.yaml @@ -13,6 +13,6 @@ jobs: branch: [main] with: branch: ${{ matrix.branch }} - images: '["registry.access.redhat.com/ubi9/python-311"]' + images: '["registry.access.redhat.com/ubi9/python-312"]' secrets: token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.tekton/segment-backup-job-pull-request.yaml b/.tekton/segment-backup-job-pull-request.yaml index 733339e..83b4c5c 100644 --- a/.tekton/segment-backup-job-pull-request.yaml +++ b/.tekton/segment-backup-job-pull-request.yaml @@ -7,8 +7,7 @@ metadata: build.appstudio.redhat.com/pull_request_number: '{{pull_request_number}}' build.appstudio.redhat.com/target_branch: '{{target_branch}}' pipelinesascode.tekton.dev/max-keep-runs: "3" - pipelinesascode.tekton.dev/on-cel-expression: event == "pull_request" && target_branch - == "main" + pipelinesascode.tekton.dev/on-cel-expression: event == "pull_request" && target_branch == "main" creationTimestamp: null labels: appstudio.openshift.io/application: segment-backup-job @@ -25,7 +24,7 @@ spec: - name: image-expires-after value: 5d - name: output-image - value: quay.io/redhat-user-workloads/rhtas-tenant/segment-backup-job/segment-backup-job:on-pr-{{revision}} + value: quay.io/securesign/segment-backup-job:on-pr-{{revision}} - name: path-context value: . - name: revision @@ -47,7 +46,7 @@ spec: - name: name value: show-sbom - name: bundle - value: quay.io/redhat-appstudio-tekton-catalog/task-show-sbom:0.1@sha256:1f90faefa39c2e4965793c1d8321e7d5d99a6c941276a9094a4e0d483a598fca + value: quay.io/redhat-appstudio-tekton-catalog/task-show-sbom:0.1@sha256:1580a8766406207d3a7500cc0c62f8ec4cd935d772008a74dd71ec7e94af2f45 - name: kind value: task resolver: bundles @@ -66,7 +65,7 @@ spec: - name: name value: summary - name: bundle - value: quay.io/redhat-appstudio-tekton-catalog/task-summary:0.2@sha256:bdf58a8a6bf10482fff841ce6c78c54e87d306bc6aae9515821c436d26daff35 + value: quay.io/redhat-appstudio-tekton-catalog/task-summary:0.2@sha256:abdf426424f1331c27be80ed98a0fbcefb8422767d1724308b9d57b37f977155 - name: kind value: task resolver: bundles @@ -82,13 +81,11 @@ spec: name: output-image type: string - default: . - description: Path to the source code of an application's component from where - to build image. + description: Path to the source code of an application's component from where to build image. name: path-context type: string - default: Dockerfile - description: Path to the Dockerfile inside the context specified by parameter - path-context + description: Path to the Dockerfile inside the context specified by parameter path-context name: dockerfile type: string - default: "false" @@ -112,8 +109,7 @@ spec: name: java type: string - default: "" - description: Image tag expiration time, time values could be something like - 1h, 2d, 3w for hours, days, and weeks, respectively. + description: Image tag expiration time, time values could be something like 1h, 2d, 3w for hours, days, and weeks, respectively. name: image-expires-after - default: "false" description: Build a source image. @@ -149,7 +145,7 @@ spec: - name: name value: init - name: bundle - value: quay.io/redhat-appstudio-tekton-catalog/task-init:0.2@sha256:686109bd8088258f73211618824aee5d3cf9e370f65fa3e85d361790a54260ef + value: quay.io/redhat-appstudio-tekton-catalog/task-init:0.2@sha256:596b7c11572bb94eb67d9ffb4375068426e2a8249ff2792ce04ad2a4bc593a63 - name: kind value: task resolver: bundles @@ -166,7 +162,7 @@ spec: - name: name value: git-clone - name: bundle - value: quay.io/redhat-appstudio-tekton-catalog/task-git-clone:0.1@sha256:30709df067659a407968154fd39e99763823d8ecfc6b5cd00a55b68818ec94ba + value: quay.io/redhat-appstudio-tekton-catalog/task-git-clone:0.1@sha256:68a87cafeb43367160497d91a1a66bceef7acc179e809e8eb3996c1deb096042 - name: kind value: task resolver: bundles @@ -191,7 +187,7 @@ spec: - name: name value: prefetch-dependencies - name: bundle - value: quay.io/redhat-appstudio-tekton-catalog/task-prefetch-dependencies:0.1@sha256:c6fdbf404dc61bf8cf8bec5fc4d7fb15f37ba62f1684de0c68bfbad5723c0052 + value: quay.io/redhat-appstudio-tekton-catalog/task-prefetch-dependencies:0.1@sha256:69af2302a0a579f428ea196a2787013d58a6bec503d231d3ef860af7e82b96e9 - name: kind value: task resolver: bundles @@ -226,7 +222,7 @@ spec: - name: name value: buildah - name: bundle - value: quay.io/redhat-appstudio-tekton-catalog/task-buildah:0.1@sha256:7e5f19d3aa233b9becf90d1ca01697486dc1acb1f1d6d2a0b8d1a1cc07c66249 + value: quay.io/redhat-appstudio-tekton-catalog/task-buildah:0.2@sha256:79bfd61dc67a505f8a7025181a7651f73479742eb10d9bc35f2d73254629d4f3 - name: kind value: task resolver: bundles @@ -242,8 +238,6 @@ spec: params: - name: BINARY_IMAGE value: $(params.output-image) - - name: BASE_IMAGES - value: $(tasks.build-container.results.BASE_IMAGES_DIGESTS) runAfter: - build-container taskRef: @@ -251,7 +245,7 @@ spec: - name: name value: source-build - name: bundle - value: quay.io/redhat-appstudio-tekton-catalog/task-source-build:0.1@sha256:1f62eaf64a188fcf61f808ad78a15ebf9a8f7f51c644266ad195718b6a2dd372 + value: quay.io/redhat-appstudio-tekton-catalog/task-source-build:0.1@sha256:14b91ad9124b722b44222685013faaf9af8ac5b66030d9abeb1c61da3c118cdd - name: kind value: task resolver: bundles @@ -269,8 +263,6 @@ spec: workspace: workspace - name: deprecated-base-image-check params: - - name: BASE_IMAGES_DIGESTS - value: $(tasks.build-container.results.BASE_IMAGES_DIGESTS) - name: IMAGE_URL value: $(tasks.build-container.results.IMAGE_URL) - name: IMAGE_DIGEST @@ -282,7 +274,7 @@ spec: - name: name value: deprecated-image-check - name: bundle - value: quay.io/redhat-appstudio-tekton-catalog/task-deprecated-image-check:0.4@sha256:6b1b325de0af29b6e9a0696f4d2b669a1e6a046941726cc97c5e42785aad870c + value: quay.io/redhat-appstudio-tekton-catalog/task-deprecated-image-check:0.4@sha256:b91642a29e3fd204f724ce9e6ab97f3799b1d0102f6458a10e45f840281409ca - name: kind value: task resolver: bundles @@ -304,7 +296,7 @@ spec: - name: name value: clair-scan - name: bundle - value: quay.io/redhat-appstudio-tekton-catalog/task-clair-scan:0.1@sha256:a6107f78e5fa9e087992f11d788701e4241d9875b153def796fb3bf257c3b7fd + value: quay.io/redhat-appstudio-tekton-catalog/task-clair-scan:0.2@sha256:89ca5c9ddcaf609509aaed9c937c2a72cf400810e3a7892adfb9ac247a13693d - name: kind value: task resolver: bundles @@ -313,6 +305,23 @@ spec: operator: in values: - "false" + - name: rpms-signature-scan + params: + - name: image-digest + value: $(tasks.build-container.results.IMAGE_DIGEST) + - name: image-url + value: $(tasks.build-container.results.IMAGE_URL) + runAfter: + - build-container + taskRef: + params: + - name: name + value: rpms-signature-scan + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-rpms-signature-scan:0.2@sha256:7aa4d3c95e2b963e82fdda392f7cb3d61e3dab035416cf4a3a34e43cf3c9c9b8 + - name: kind + value: task + resolver: bundles - name: ecosystem-cert-preflight-checks params: - name: image-url @@ -324,7 +333,7 @@ spec: - name: name value: ecosystem-cert-preflight-checks - name: bundle - value: quay.io/redhat-appstudio-tekton-catalog/task-ecosystem-cert-preflight-checks:0.1@sha256:b6c1276b983d7ec6f8cf250056e904887f519bb6e54d538525f6314b681bd728 + value: quay.io/redhat-appstudio-tekton-catalog/task-ecosystem-cert-preflight-checks:0.1@sha256:fc2cda064580364bb80c3ad6f438002de0033963fc33985d01ad249346b93433 - name: kind value: task resolver: bundles @@ -335,13 +344,13 @@ spec: - "false" - name: sast-snyk-check runAfter: - - clone-repository + - build-container taskRef: params: - name: name value: sast-snyk-check - name: bundle - value: quay.io/redhat-appstudio-tekton-catalog/task-sast-snyk-check:0.1@sha256:b3d2d07394ff983d5f2578c294cd8c4e9428fecc801495feeb929d932c10f740 + value: quay.io/redhat-appstudio-tekton-catalog/task-sast-snyk-check:0.3@sha256:a35e1b1e108fe50737e47d5d71368ec882809fdd854096691d4e6fa7bc67e77b - name: kind value: task resolver: bundles @@ -353,42 +362,25 @@ spec: workspaces: - name: workspace workspace: workspace - - name: clamav-scan params: - name: image-digest value: $(tasks.build-container.results.IMAGE_DIGEST) - name: image-url value: $(tasks.build-container.results.IMAGE_URL) - runAfter: - - build-container - taskRef: - params: - - name: name - value: clamav-scan - - name: bundle - value: quay.io/redhat-appstudio-tekton-catalog/task-clamav-scan:0.1@sha256:6ba32717bd837ca0d5714b518cc4530e1f1d5bef137df54c02b0c2151b9d217e - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: sbom-json-check + - name: clamav-scan params: - - name: IMAGE_URL - value: $(tasks.build-container.results.IMAGE_URL) - - name: IMAGE_DIGEST + - name: image-digest value: $(tasks.build-container.results.IMAGE_DIGEST) + - name: image-url + value: $(tasks.build-container.results.IMAGE_URL) runAfter: - build-container taskRef: params: - name: name - value: sbom-json-check + value: clamav-scan - name: bundle - value: quay.io/redhat-appstudio-tekton-catalog/task-sbom-json-check:0.1@sha256:dbd467a0507cff1981d3c98f683339feaab1b387c5b5fbf1ff957e9be2e27027 + value: quay.io/konflux-ci/tekton-catalog/task-clamav-scan:0.1@sha256:a94b6523ba0b691dc276e37594321c2eff3594d2753014e5c920803b47627df1 - name: kind value: task resolver: bundles diff --git a/.tekton/segment-backup-job-push.yaml b/.tekton/segment-backup-job-push.yaml index 305aaed..5cf4019 100644 --- a/.tekton/segment-backup-job-push.yaml +++ b/.tekton/segment-backup-job-push.yaml @@ -6,8 +6,8 @@ metadata: build.appstudio.redhat.com/commit_sha: '{{revision}}' build.appstudio.redhat.com/target_branch: '{{target_branch}}' pipelinesascode.tekton.dev/max-keep-runs: "3" - pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch - == "main" + pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" + build.appstudio.openshift.io/build-nudge-files: "controllers/constants/*" creationTimestamp: null labels: appstudio.openshift.io/application: segment-backup-job @@ -22,7 +22,7 @@ spec: - name: git-url value: '{{source_url}}' - name: output-image - value: quay.io/redhat-user-workloads/rhtas-tenant/segment-backup-job/segment-backup-job:{{revision}} + value: quay.io/securesign/segment-backup-job:{{revision}} - name: path-context value: . - name: revision @@ -44,7 +44,7 @@ spec: - name: name value: show-sbom - name: bundle - value: quay.io/redhat-appstudio-tekton-catalog/task-show-sbom:0.1@sha256:1f90faefa39c2e4965793c1d8321e7d5d99a6c941276a9094a4e0d483a598fca + value: quay.io/redhat-appstudio-tekton-catalog/task-show-sbom:0.1@sha256:1580a8766406207d3a7500cc0c62f8ec4cd935d772008a74dd71ec7e94af2f45 - name: kind value: task resolver: bundles @@ -63,7 +63,7 @@ spec: - name: name value: summary - name: bundle - value: quay.io/redhat-appstudio-tekton-catalog/task-summary:0.2@sha256:bdf58a8a6bf10482fff841ce6c78c54e87d306bc6aae9515821c436d26daff35 + value: quay.io/redhat-appstudio-tekton-catalog/task-summary:0.2@sha256:abdf426424f1331c27be80ed98a0fbcefb8422767d1724308b9d57b37f977155 - name: kind value: task resolver: bundles @@ -79,13 +79,11 @@ spec: name: output-image type: string - default: . - description: Path to the source code of an application's component from where - to build image. + description: Path to the source code of an application's component from where to build image. name: path-context type: string - default: Dockerfile - description: Path to the Dockerfile inside the context specified by parameter - path-context + description: Path to the Dockerfile inside the context specified by parameter path-context name: dockerfile type: string - default: "false" @@ -109,8 +107,7 @@ spec: name: java type: string - default: "" - description: Image tag expiration time, time values could be something like - 1h, 2d, 3w for hours, days, and weeks, respectively. + description: Image tag expiration time, time values could be something like 1h, 2d, 3w for hours, days, and weeks, respectively. name: image-expires-after - default: "false" description: Build a source image. @@ -146,7 +143,7 @@ spec: - name: name value: init - name: bundle - value: quay.io/redhat-appstudio-tekton-catalog/task-init:0.2@sha256:686109bd8088258f73211618824aee5d3cf9e370f65fa3e85d361790a54260ef + value: quay.io/redhat-appstudio-tekton-catalog/task-init:0.2@sha256:596b7c11572bb94eb67d9ffb4375068426e2a8249ff2792ce04ad2a4bc593a63 - name: kind value: task resolver: bundles @@ -163,7 +160,7 @@ spec: - name: name value: git-clone - name: bundle - value: quay.io/redhat-appstudio-tekton-catalog/task-git-clone:0.1@sha256:30709df067659a407968154fd39e99763823d8ecfc6b5cd00a55b68818ec94ba + value: quay.io/redhat-appstudio-tekton-catalog/task-git-clone:0.1@sha256:68a87cafeb43367160497d91a1a66bceef7acc179e809e8eb3996c1deb096042 - name: kind value: task resolver: bundles @@ -188,7 +185,7 @@ spec: - name: name value: prefetch-dependencies - name: bundle - value: quay.io/redhat-appstudio-tekton-catalog/task-prefetch-dependencies:0.1@sha256:c6fdbf404dc61bf8cf8bec5fc4d7fb15f37ba62f1684de0c68bfbad5723c0052 + value: quay.io/redhat-appstudio-tekton-catalog/task-prefetch-dependencies:0.1@sha256:69af2302a0a579f428ea196a2787013d58a6bec503d231d3ef860af7e82b96e9 - name: kind value: task resolver: bundles @@ -223,7 +220,7 @@ spec: - name: name value: buildah - name: bundle - value: quay.io/redhat-appstudio-tekton-catalog/task-buildah:0.1@sha256:7e5f19d3aa233b9becf90d1ca01697486dc1acb1f1d6d2a0b8d1a1cc07c66249 + value: quay.io/redhat-appstudio-tekton-catalog/task-buildah:0.2@sha256:79bfd61dc67a505f8a7025181a7651f73479742eb10d9bc35f2d73254629d4f3 - name: kind value: task resolver: bundles @@ -239,8 +236,6 @@ spec: params: - name: BINARY_IMAGE value: $(params.output-image) - - name: BASE_IMAGES - value: $(tasks.build-container.results.BASE_IMAGES_DIGESTS) runAfter: - build-container taskRef: @@ -248,7 +243,7 @@ spec: - name: name value: source-build - name: bundle - value: quay.io/redhat-appstudio-tekton-catalog/task-source-build:0.1@sha256:1f62eaf64a188fcf61f808ad78a15ebf9a8f7f51c644266ad195718b6a2dd372 + value: quay.io/redhat-appstudio-tekton-catalog/task-source-build:0.1@sha256:14b91ad9124b722b44222685013faaf9af8ac5b66030d9abeb1c61da3c118cdd - name: kind value: task resolver: bundles @@ -266,8 +261,6 @@ spec: workspace: workspace - name: deprecated-base-image-check params: - - name: BASE_IMAGES_DIGESTS - value: $(tasks.build-container.results.BASE_IMAGES_DIGESTS) - name: IMAGE_URL value: $(tasks.build-container.results.IMAGE_URL) - name: IMAGE_DIGEST @@ -279,7 +272,7 @@ spec: - name: name value: deprecated-image-check - name: bundle - value: quay.io/redhat-appstudio-tekton-catalog/task-deprecated-image-check:0.4@sha256:6b1b325de0af29b6e9a0696f4d2b669a1e6a046941726cc97c5e42785aad870c + value: quay.io/redhat-appstudio-tekton-catalog/task-deprecated-image-check:0.4@sha256:b91642a29e3fd204f724ce9e6ab97f3799b1d0102f6458a10e45f840281409ca - name: kind value: task resolver: bundles @@ -301,7 +294,7 @@ spec: - name: name value: clair-scan - name: bundle - value: quay.io/redhat-appstudio-tekton-catalog/task-clair-scan:0.1@sha256:a6107f78e5fa9e087992f11d788701e4241d9875b153def796fb3bf257c3b7fd + value: quay.io/redhat-appstudio-tekton-catalog/task-clair-scan:0.2@sha256:89ca5c9ddcaf609509aaed9c937c2a72cf400810e3a7892adfb9ac247a13693d - name: kind value: task resolver: bundles @@ -310,6 +303,23 @@ spec: operator: in values: - "false" + - name: rpms-signature-scan + params: + - name: image-digest + value: $(tasks.build-container.results.IMAGE_DIGEST) + - name: image-url + value: $(tasks.build-container.results.IMAGE_URL) + runAfter: + - build-container + taskRef: + params: + - name: name + value: rpms-signature-scan + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-rpms-signature-scan:0.2@sha256:7aa4d3c95e2b963e82fdda392f7cb3d61e3dab035416cf4a3a34e43cf3c9c9b8 + - name: kind + value: task + resolver: bundles - name: ecosystem-cert-preflight-checks params: - name: image-url @@ -321,7 +331,7 @@ spec: - name: name value: ecosystem-cert-preflight-checks - name: bundle - value: quay.io/redhat-appstudio-tekton-catalog/task-ecosystem-cert-preflight-checks:0.1@sha256:b6c1276b983d7ec6f8cf250056e904887f519bb6e54d538525f6314b681bd728 + value: quay.io/redhat-appstudio-tekton-catalog/task-ecosystem-cert-preflight-checks:0.1@sha256:fc2cda064580364bb80c3ad6f438002de0033963fc33985d01ad249346b93433 - name: kind value: task resolver: bundles @@ -332,13 +342,13 @@ spec: - "false" - name: sast-snyk-check runAfter: - - clone-repository + - build-container taskRef: params: - name: name value: sast-snyk-check - name: bundle - value: quay.io/redhat-appstudio-tekton-catalog/task-sast-snyk-check:0.1@sha256:b3d2d07394ff983d5f2578c294cd8c4e9428fecc801495feeb929d932c10f740 + value: quay.io/redhat-appstudio-tekton-catalog/task-sast-snyk-check:0.3@sha256:a35e1b1e108fe50737e47d5d71368ec882809fdd854096691d4e6fa7bc67e77b - name: kind value: task resolver: bundles @@ -350,42 +360,25 @@ spec: workspaces: - name: workspace workspace: workspace - - name: clamav-scan params: - name: image-digest value: $(tasks.build-container.results.IMAGE_DIGEST) - name: image-url value: $(tasks.build-container.results.IMAGE_URL) - runAfter: - - build-container - taskRef: - params: - - name: name - value: clamav-scan - - name: bundle - value: quay.io/redhat-appstudio-tekton-catalog/task-clamav-scan:0.1@sha256:6ba32717bd837ca0d5714b518cc4530e1f1d5bef137df54c02b0c2151b9d217e - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: sbom-json-check + - name: clamav-scan params: - - name: IMAGE_URL - value: $(tasks.build-container.results.IMAGE_URL) - - name: IMAGE_DIGEST + - name: image-digest value: $(tasks.build-container.results.IMAGE_DIGEST) + - name: image-url + value: $(tasks.build-container.results.IMAGE_URL) runAfter: - build-container taskRef: params: - name: name - value: sbom-json-check + value: clamav-scan - name: bundle - value: quay.io/redhat-appstudio-tekton-catalog/task-sbom-json-check:0.1@sha256:dbd467a0507cff1981d3c98f683339feaab1b387c5b5fbf1ff957e9be2e27027 + value: quay.io/konflux-ci/tekton-catalog/task-clamav-scan:0.1@sha256:a94b6523ba0b691dc276e37594321c2eff3594d2753014e5c920803b47627df1 - name: kind value: task resolver: bundles diff --git a/Dockerfile.segment-backup-job.rh b/Dockerfile.segment-backup-job.rh index 25e6957..7fab1cb 100644 --- a/Dockerfile.segment-backup-job.rh +++ b/Dockerfile.segment-backup-job.rh @@ -1,4 +1,4 @@ -FROM registry.access.redhat.com/ubi9/python-311@sha256:ade21cd479a66b7d9b92ce5fd4f6633d12f5455b35b590c5c81995f5d01761fc +FROM registry.access.redhat.com/ubi9/python-312@sha256:83096e4e6273efcf52a697e8c235544ebf379125c08173bbe0dbc26b73fe06cd LABEL description="This image provides a data collection service for segment" LABEL io.k8s.description="This image provides a data collection service for segment" diff --git a/licenses/license.txt b/licenses/license.txt index d645695..3160db4 100644 --- a/licenses/license.txt +++ b/licenses/license.txt @@ -187,7 +187,7 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright [yyyy] [name of copyright owner] + Copyright [2024] [RedHat] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/pip_find_builddeps.py b/pip_find_builddeps.py new file mode 100755 index 0000000..e70b7bb --- /dev/null +++ b/pip_find_builddeps.py @@ -0,0 +1,236 @@ +#!/usr/bin/env python3 +import argparse +import datetime +import logging +import re +import shutil +import subprocess +import sys +import tempfile +from pathlib import Path + +SCRIPT_NAME = Path(sys.argv[0]).name + +DESCRIPTION = """\ +Find build dependencies for all your runtime dependencies. The input to this +script must be a requirements.txt file containing all the *recursive* runtime +dependencies. You can use pip-compile to generate such a file. The output is an +intermediate file that must first go through pip-compile before being used in +a Cachito request. +""" + +logging.basicConfig(format="%(levelname)s: %(message)s") + +log = logging.getLogger(__name__) +log.setLevel(logging.INFO) + + +class FindBuilddepsError(Exception): + """Failed to find build dependencies.""" + + +def _pip_download(requirements_files, output_file, tmpdir, no_cache, allow_binary): + """Run pip download, write output to file.""" + cmd = [ + "pip", + "download", + "-d", + tmpdir, + "--no-binary", + ":all:", + "--use-pep517", + "--verbose", + ] + if allow_binary: + cmd.remove("--no-binary") + cmd.remove(":all:") + if no_cache: + cmd.append("--no-cache-dir") + for file in requirements_files: + cmd.append("-r") + cmd.append(file) + + with open(output_file, "w") as outfile: + subprocess.run(cmd, stdout=outfile, stderr=outfile, check=True) + + +def _filter_builddeps(pip_download_output_file): + """Find builddeps in output of pip download.""" + # Requirement is a sequence of non-whitespace, non-';' characters + # Example: package, package==1.0, package[extra]==1.0 + requirement_re = r"[^\s;]+" + # Leading whitespace => requirement is a build dependency + # (because all recursive runtime dependencies were present in input files) + builddep_re = re.compile(rf"^\s+Collecting ({requirement_re})") + + with open(pip_download_output_file) as f: + matches = (builddep_re.match(line) for line in f) + builddeps = set(match.group(1) for match in matches if match) + + return sorted(builddeps) + + +def find_builddeps( + requirements_files, no_cache=False, ignore_errors=False, allow_binary=False +): + """ + Find build dependencies for packages in requirements files. + + :param requirements_files: list of requirements file paths + :param no_cache: do not use pip cache when downloading packages + :param ignore_errors: generate partial output even if pip download fails + :return: list of build dependencies and bool whether output is partial + """ + tmpdir = tempfile.mkdtemp(prefix=f"{SCRIPT_NAME}-") + pip_output_file = Path(tmpdir) / "pip-download-output.txt" + is_partial = False + + try: + log.info("Running pip download, this may take a while") + _pip_download( + requirements_files, pip_output_file, tmpdir, no_cache, allow_binary + ) + except subprocess.CalledProcessError: + msg = f"Pip download failed, see {pip_output_file} for more info" + if ignore_errors: + log.error(msg) + log.warning("Ignoring error...") + is_partial = True + else: + raise FindBuilddepsError(msg) + + log.info("Looking for build dependencies in the output of pip download") + builddeps = _filter_builddeps(pip_output_file) + + # Remove tmpdir only if pip download was successful + if not is_partial: + shutil.rmtree(tmpdir) + + return builddeps, is_partial + + +def generate_file_content(builddeps, is_partial): + """ + Generate content to write to output file. + + :param builddeps: list of build dependencies to include in file + :param is_partial: indicates that list of build dependencies may be partial + :return: file content + """ + # Month Day Year HH:MM:SS + date = datetime.datetime.now().strftime("%b %d %Y %H:%M:%S") + + lines = [f"# Generated by {SCRIPT_NAME} on {date}"] + if builddeps: + lines.extend(builddeps) + else: + lines.append("# ") + + if is_partial: + lines.append("# ") + + file_content = "\n".join(lines) + return file_content + + +def _parse_requirements_file(builddeps_file): + """Find deps requirements-build.in file.""" + try: + with open(builddeps_file) as f: + # ignore line comments or comments added after dependency is declared + requirement_re = re.compile(r"^([^\s#;]+)") + matches = (requirement_re.match(line) for line in f) + return set(match.group(1) for match in matches if match) + except FileNotFoundError: + # it's ok if the file doens't exist. + return set() + + +def _sanity_check_args(ap, args): + if args.only_write_on_update and not args.output_file: + ap.error("--only-write-on-update requires an output-file (-o/--output-file).") + + +def main(): + """Run script.""" + ap = argparse.ArgumentParser(description=DESCRIPTION) + ap.add_argument("requirements_files", metavar="REQUIREMENTS_FILE", nargs="+") + ap.add_argument( + "-o", "--output-file", metavar="FILE", help="write output to this file" + ) + ap.add_argument( + "-a", + "--append", + action="store_true", + help="append to output file instead of overwriting", + ) + ap.add_argument( + "--no-cache", + action="store_true", + help="do not use pip cache when downloading packages", + ) + ap.add_argument( + "--ignore-errors", + action="store_true", + help="generate partial output even if pip download fails", + ) + ap.add_argument( + "--only-write-on-update", + action="store_true", + help=( + "only write output file if dependencies will be modified - or new " + "dependencies will be added if used in conjunction with -a/--append." + ), + ) + ap.add_argument( + "--allow-binary", + action="store_true", + help=( + "do not find build dependencies for packages with wheels " + "available for the current platform" + ), + ) + + args = ap.parse_args() + _sanity_check_args(ap, args) + + log.info( + "Please make sure the input files meet the requirements of this script (see --help)" + ) + + builddeps, is_partial = find_builddeps( + args.requirements_files, + no_cache=args.no_cache, + ignore_errors=args.ignore_errors, + allow_binary=args.allow_binary, + ) + + if args.only_write_on_update: + original_builddeps = _parse_requirements_file(args.output_file) + if args.append: + # append only new dependencies + builddeps = sorted(set(builddeps) - original_builddeps) + if not builddeps or set(builddeps) == original_builddeps: + log.info("No new build dependencies found.") + return + + file_content = generate_file_content(builddeps, is_partial) + + log.info("Make sure to pip-compile the output before submitting a Cachito request") + if is_partial: + log.warning("Pip download failed, output may be incomplete!") + + if args.output_file: + mode = "a" if args.append else "w" + with open(args.output_file, mode) as f: + print(file_content, file=f) + else: + print(file_content) + + +if __name__ == "__main__": + try: + main() + except FindBuilddepsError as e: + log.error("%s", e) + exit(1) \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 03c1e14..c64f280 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,7 @@ +[metadata] +name = "SegmentBackupJob" +version = "1.0" + [project] requires-python = ">=3.11" name = "SegmentBackupJob" @@ -8,5 +12,7 @@ dependencies = [ 'kubernetes==29.0.0', 'openshift==0.13.2', 'setuptools-scm==7.1.0', - 'setuptools==69.2.0' + 'setuptools==73.0.1', + 'idna==3.7', + 'calver==2022.6.26' ] diff --git a/requirements-build.in b/requirements-build.in index 270cff9..d09da91 100644 --- a/requirements-build.in +++ b/requirements-build.in @@ -1,19 +1,16 @@ -#Generated by pip_find_builddeps.py on Mar 26 2024 11:43:14 +# Generated by pip_find_builddeps.py on Aug 22 2024 13:52:54 Cython<3.0 flit_core<4,>=3.2 flit_core<4,>=3.4 flit_core>=3.3 hatchling<2,>=1.6.0 packaging>=20.0 -packaging>=21.3 +packaging>=23.2 pathspec>=0.10.1 pluggy>=1.0.0 poetry-core>=1.0.0 -setuptools -setuptools>=40.8.0 -setuptools>=45 -setuptools>=46.4.0 -setuptools_scm<8.0 +setuptools==73.0.1 +setuptools-scm==7.1.0 trove-classifiers typing-extensions typing_extensions diff --git a/requirements-build.txt b/requirements-build.txt index ae1c349..113c3dd 100644 --- a/requirements-build.txt +++ b/requirements-build.txt @@ -2,12 +2,8 @@ # This file is autogenerated by pip-compile with Python 3.11 # by the following command: # -# pybuild-deps compile --generate-hashes --output-file=requirements-build.txt ./requirements.txt +# pip-compile --allow-unsafe --generate-hashes requirements-build.in # -calver==2022.6.26 \ - --hash=sha256:a1d7fcdd67797afc52ee36ffb8c8adf6643173864306547bfd1380cbce6310a0 \ - --hash=sha256:e05493a3b17517ef1748fbe610da11f10485faa7c416b9d33fd4a52d74894f8b - # via trove-classifiers cython==0.29.37 \ --hash=sha256:0301d4739c6894e012f1d410052082fdda9e63888c815d9e23e0f7f82fff7d79 \ --hash=sha256:0544f7a3e4437b89b356baa15387494c18214e03f2ffaddada5a2c71c3dfd24b \ @@ -51,74 +47,63 @@ cython==0.29.37 \ --hash=sha256:f813d4a6dd94adee5d4ff266191d1d95bf6d4164a4facc535422c021b2504cfb \ --hash=sha256:fa5b6a0f69bf1823c9fd038fa77a2568b78fda2de045a95b48a71dee4d0d578f \ --hash=sha256:fe0eaf6b1e9ee97c5ee7bfc943f00e36cf59d929db16886cb018352bff8208da - # via pyyaml + # via -r requirements-build.in flit-core==3.9.0 \ --hash=sha256:72ad266176c4a3fcfab5f2930d76896059851240570ce9a98733b658cb786eba \ --hash=sha256:7aada352fb0c7f5538c4fafeddf314d3a6a92ee8e2b1de70482329e42de70301 - # via - # idna - # packaging - # pathspec - # typing-extensions - # wheel + # via -r requirements-build.in hatchling==1.22.4 \ --hash=sha256:8a2dcec96d7fb848382ef5848e5ac43fdae641f35a08a3fab5116bd495f3416e \ --hash=sha256:f56da5bfc396af7b29daa3164851dd04991c994083f56cb054b5003675caecdc - # via urllib3 + # via -r requirements-build.in packaging==24.0 \ --hash=sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5 \ --hash=sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9 # via + # -r requirements-build.in # hatchling # setuptools-scm pathspec==0.12.1 \ --hash=sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08 \ --hash=sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712 - # via hatchling + # via + # -r requirements-build.in + # hatchling pluggy==1.4.0 \ --hash=sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981 \ --hash=sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be - # via hatchling + # via + # -r requirements-build.in + # hatchling poetry-core==1.9.0 \ --hash=sha256:4e0c9c6ad8cf89956f03b308736d84ea6ddb44089d16f2adc94050108ec1f5a1 \ --hash=sha256:fa7a4001eae8aa572ee84f35feb510b321bd652e5cf9293249d62853e1f935a2 - # via - # backoff - # rsa -setuptools-scm @ https://files.pythonhosted.org/packages/98/12/2c1e579bb968759fc512391473340d0661b1a8c96a59fb7c65b02eec1321/setuptools_scm-7.1.0.tar.gz#sha256=6c508345a771aad7d56ebff0e70628bf2b0ec7573762be9960214730de278f27 \ - --hash=sha256:6c508345a771aad7d56ebff0e70628bf2b0ec7573762be9960214730de278f27 - # via - # pluggy - # python-dateutil + # via -r requirements-build.in +setuptools-scm==7.1.0 \ + --hash=sha256:6c508345a771aad7d56ebff0e70628bf2b0ec7573762be9960214730de278f27 \ + --hash=sha256:73988b6d848709e2af142aa48c986ea29592bbcfca5375678064708205253d8e + # via -r requirements-build.in trove-classifiers==2024.3.25 \ --hash=sha256:6de68d06edd6fec5032162b6af22e818a4bb6f4ae2258e74699f8a41064b7cad \ --hash=sha256:c400e0bdceb018913339d53b07682d09a42aada687d070e90ee3c08477bec024 - # via hatchling + # via + # -r requirements-build.in + # hatchling typing-extensions==4.10.0 \ --hash=sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475 \ --hash=sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb - # via setuptools-scm + # via + # -r requirements-build.in + # setuptools-scm wheel==0.43.0 \ --hash=sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85 \ --hash=sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81 - # via - # cachetools - # python-dateutil - # python-string-utils - # pyyaml + # via -r requirements-build.in # The following packages are considered to be unsafe in a requirements file: -setuptools==69.2.0 \ - --hash=sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e \ - --hash=sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c +setuptools==73.0.1 \ + --hash=sha256:b208925fcb9f7af924ed2dc04708ea89791e24bde0d3020b27df0e116088b34e \ + --hash=sha256:d59a3e788ab7e012ab2c4baed1b376da6366883ee20d7a5fc426816e3d7b1193 # via - # cachetools - # calver - # pathspec - # pluggy - # pyasn1 - # pyasn1-modules - # python-dateutil - # pyyaml + # -r requirements-build.in # setuptools-scm - # trove-classifiers diff --git a/requirements.txt b/requirements.txt index d333925..b8e650b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,6 +12,10 @@ cachetools==5.3.3 \ --hash=sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945 \ --hash=sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105 # via google-auth +calver==2022.6.26 \ + --hash=sha256:a1d7fcdd67797afc52ee36ffb8c8adf6643173864306547bfd1380cbce6310a0 \ + --hash=sha256:e05493a3b17517ef1748fbe610da11f10485faa7c416b9d33fd4a52d74894f8b + # via SegmentBackupJob (pyproject.toml) certifi==2024.2.2 \ --hash=sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f \ --hash=sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1 @@ -114,10 +118,12 @@ google-auth==2.29.0 \ --hash=sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360 \ --hash=sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415 # via kubernetes -idna==3.6 \ - --hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \ - --hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f - # via requests +idna==3.7 \ + --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ + --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 + # via + # SegmentBackupJob (pyproject.toml) + # requests kubernetes==29.0.0 \ --hash=sha256:ab8cb0e0576ccdfb71886366efb102c6a20f268d817be065ce7f9909c631e43e \ --hash=sha256:c4812e227ae74d07d53c88293e564e54b850452715a59a927e7e1bc6b9a60459 @@ -237,7 +243,6 @@ segment-analytics-python==2.2.3 \ --hash=sha256:0df5908e3df74b4482f33392fdd450df4c8351bf54974376fbe6bf33b0700865 # via SegmentBackupJob (pyproject.toml) setuptools-scm==7.1.0 \ - --hash=sha256:6c508345a771aad7d56ebff0e70628bf2b0ec7573762be9960214730de278f27 \ --hash=sha256:6c508345a771aad7d56ebff0e70628bf2b0ec7573762be9960214730de278f27 # via SegmentBackupJob (pyproject.toml) six==1.16.0 \ @@ -263,9 +268,9 @@ websocket-client==1.7.0 \ # via kubernetes # The following packages are considered to be unsafe in a requirements file: -setuptools==69.2.0 \ - --hash=sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e \ - --hash=sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c +setuptools==73.0.1 \ + --hash=sha256:b208925fcb9f7af924ed2dc04708ea89791e24bde0d3020b27df0e116088b34e \ + --hash=sha256:d59a3e788ab7e012ab2c4baed1b376da6366883ee20d7a5fc426816e3d7b1193 # via # SegmentBackupJob (pyproject.toml) # setuptools-scm diff --git a/src/script.py b/src/script.py index f535e68..7231ff4 100644 --- a/src/script.py +++ b/src/script.py @@ -43,7 +43,7 @@ def check_cluster_monitoring_config(openshift_client): return 1 return 0 except: - print('Could not get configmap cluster-monitoring-config in openshift-monitoring namespace, and thus it cannot have \`.telemeterClient.disabled: true\`. Continuing ...') + print('Could not get configmap cluster-monitoring-config in openshift-monitoring namespace, and thus it cannot have `.telemeterClient.disabled: true`. Continuing ...') return 0 def check_console_operator(openshift_client): @@ -58,7 +58,7 @@ def check_console_operator(openshift_client): return 1 return 0 except: - print('could not get Console named cluster in namespace \`openshift-console\`, and thus it cannot have the disabled annotation. Continuing ...') + print('could not get Console named cluster in namespace `openshift-console`, and thus it cannot have the disabled annotation. Continuing ...') return 0 def check_thanos_querier_status(openshift_client): @@ -83,7 +83,7 @@ def check_thanos_querier_status(openshift_client): if route_up == True: return thanos_quierier_host elif route_up == False: - print('Timed out. Thanos Querier route did not spin up in the \`openshift-monitoring\` namespace.') + print('Timed out. Thanos Querier route did not spin up in the `openshift-monitoring` namespace.') return 1 def check_user_workload_monitoring(openshift_client): @@ -100,7 +100,7 @@ def check_user_workload_monitoring(openshift_client): return 1 return 0 except: - print('Could not get ConfigMap \`cluster-monitoring-config\` in namespace \`openshift-monitoring\`, meaning userWorkloadMonitoring is not enabled or there are permissions errors.') + print('Could not get ConfigMap `cluster-monitoring-config` in namespace `openshift-monitoring`, meaning userWorkloadMonitoring is not enabled or there are permissions errors.') return 1 def get_bearer_token(): @@ -129,11 +129,17 @@ def write_dict_as_json(dictionairy): outfile.write(json_object) outfile.close() -def query_nightly_metrics(openshift_client, thanos_quierier_host, bearer_token, base_domain): +def fetch_response_data(query_url, headers, REQUESTS_CA_BUNDLE, REQUESTS_CA_BUNDLE_INTERNAL): + try: + response = requests.get(query_url, headers=headers, verify=REQUESTS_CA_BUNDLE) + except: + response = requests.get(query_url, headers=headers, verify=REQUESTS_CA_BUNDLE_INTERNAL) + return response + +def query_nightly_metrics(openshift_client, thanos_quierier_host, bearer_token, base_domain, REQUESTS_CA_BUNDLE, REQUESTS_CA_BUNDLE_INTERNAL): fulcio_new_certs=None rekor_new_entries=None rekor_qps_by_api=None - fulcio_new_certs_query_data='query=fulcio_new_certs' fulcio_new_certs_query_URL = 'https://{thanos_quierier_host}/api/v1/query?&{fulcio_new_certs_query_data}'.format(thanos_quierier_host=thanos_quierier_host, fulcio_new_certs_query_data=fulcio_new_certs_query_data) @@ -143,13 +149,13 @@ def query_nightly_metrics(openshift_client, thanos_quierier_host, bearer_token, rekor_qps_by_api_query_URL='https://{thanos_quierier_host}/api/v1/query?&{rekor_qps_by_api_query_data}'.format(thanos_quierier_host=thanos_quierier_host, rekor_qps_by_api_query_data=rekor_qps_by_api_query_data) headers = {'Authorization': 'Bearer {bearer_token}'.format(bearer_token=bearer_token)} - fulcio_new_certs_response_data = requests.get(fulcio_new_certs_query_URL, headers=headers, verify=True,) + fulcio_new_certs_response_data = fetch_response_data(fulcio_new_certs_query_URL, headers, REQUESTS_CA_BUNDLE, REQUESTS_CA_BUNDLE_INTERNAL) if fulcio_new_certs_response_data.status_code == 200 or fulcio_new_certs_response_data.status_code == 201: fulcio_new_certs_json = fulcio_new_certs_response_data.json() if fulcio_new_certs_json['status'] == 'success' and fulcio_new_certs_json['data']['result']: fulcio_new_certs = fulcio_new_certs_json['data']['result'][0]['value'][1] - rekor_new_entries_response_data = requests.get(rekor_new_entries_query_URL,headers=headers, verify=True,) + rekor_new_entries_response_data = fetch_response_data(rekor_new_entries_query_URL, headers, REQUESTS_CA_BUNDLE, REQUESTS_CA_BUNDLE_INTERNAL) if rekor_new_entries_response_data.status_code == 200 or rekor_new_entries_response_data.status_code == 201: rekor_new_entries_json = rekor_new_entries_response_data.json() if rekor_new_entries_json['status'] == 'success' and rekor_new_entries_json['data']['result']: @@ -158,8 +164,7 @@ def query_nightly_metrics(openshift_client, thanos_quierier_host, bearer_token, else: rekor_new_entries = rekor_new_entries_json['data']['result'][0]['value'][1] - - rekor_qps_by_api_response_data = requests.get(rekor_qps_by_api_query_URL,headers=headers, verify=True,) + rekor_qps_by_api_response_data = fetch_response_data(rekor_qps_by_api_query_URL, headers, REQUESTS_CA_BUNDLE, REQUESTS_CA_BUNDLE_INTERNAL) if rekor_qps_by_api_response_data.status_code == 200 or rekor_qps_by_api_response_data.status_code == 201: rekor_qps_by_api_json = rekor_qps_by_api_response_data.json() if rekor_qps_by_api_json['status'] == 'success' and rekor_qps_by_api_json['data']['result']: @@ -226,7 +231,9 @@ def main(): print('failed to get base_domain which is required for both installation and nightly metrics. Failing job.') exit(1) if RUN_TYPE == 'nightly': - query_nightly_metrics(openshift_client, thanos_quierier_host, bearer_token, base_domain) + REQUESTS_CA_BUNDLE_INTERNAL = os.environ.get('REQUESTS_CA_BUNDLE_INTERNAL') + REQUESTS_CA_BUNDLE = os.environ.get('REQUESTS_CA_BUNDLE') + query_nightly_metrics(openshift_client, thanos_quierier_host, bearer_token, base_domain, REQUESTS_CA_BUNDLE, REQUESTS_CA_BUNDLE_INTERNAL) main_nightly() elif RUN_TYPE == 'installation': metrics_dict = { 'base_domain': base_domain}