diff --git a/build_latest.sh b/build_latest.sh index c865960c6..0c4e8ce9e 100755 --- a/build_latest.sh +++ b/build_latest.sh @@ -14,7 +14,7 @@ # set -o pipefail -root_dir="$PWD" +export root_dir="$PWD" push_cmdfile=${root_dir}/push_commands.sh target_repo="adoptopenjdk/openjdk" version="9" diff --git a/common_functions.sh b/common_functions.sh index 209558f41..f7a0e5903 100755 --- a/common_functions.sh +++ b/common_functions.sh @@ -35,7 +35,7 @@ all_jvms="hotspot openj9" # All supported arches all_arches="aarch64 armv7l ppc64le s390x x86_64 windows-amd windows-nano" -# All supported packges +# All supported packages # shellcheck disable=SC2034 # used externally all_packages="jdk jre" @@ -145,8 +145,8 @@ function get_arches() { # Check if the given VM is supported on the current architecture. # This is based on the hotspot_shasums_latest.sh/openj9_shasums_latest.sh function vm_supported_onarch() { - vm=$1 - sums=$2 + local vm=$1 + local sums=$2 if [ -n "$3" ]; then test_arch=$3; @@ -183,7 +183,7 @@ function cleanup_manifest() { # Check if a given docker image exists on the server. # This script errors out if the image does not exist. function check_image() { - img=$1 + local img=$1 docker pull "${img}" >/dev/null ret=$? @@ -244,14 +244,14 @@ function check_manifest_tool() { # Build valid image tags using the tags.config file as the base function build_tags() { - vm=$1; shift - ver=$1; shift; - pkg=$1; shift; - rel=$1; shift; - os=$1; shift; - build=$1; shift; - rawtags=$* - tmpfile=raw_arch_tags.$$.tmp + local vm=$1; shift + local ver=$1; shift; + local pkg=$1; shift; + local rel=$1; shift; + local os=$1; shift; + local build=$1; shift; + local rawtags=$* + local tmpfile=raw_arch_tags.$$.tmp # For jre builds, replace the version tag to distinguish it from the jdk if [ "${pkg}" == "jre" ]; then @@ -276,7 +276,7 @@ function build_tags() { printf("tag_aliases: "); for (key in natags) { printf"%s ", natags[key] }; printf"\n"; }' > ${tmpfile} - # shellcheck disable=SC2034 # used externally + # shellcheck disable=SC2034 # used externally tag_aliases=$( < "${tmpfile}" grep "^tag_aliases:" | sed "s/tag_aliases: //") raw_arch_tags=$( < "${tmpfile}" grep "^arch_tags:" | sed "s/arch_tags: //") arch_tags="" @@ -311,14 +311,14 @@ function build_tags() { # url_pkg = jdk / jre # url_rel = latest / ${version} function get_v2_url() { - request_type=$1 - release_type=$2 - url_impl=$3 - url_pkg=$4 - url_rel=$5 - url_arch=$6 - url_heapsize=normal - url_version=openjdk${version} + local request_type=$1 + local release_type=$2 + local url_impl=$3 + local url_pkg=$4 + local url_rel=$5 + local url_arch=$6 + local url_heapsize=normal + local url_version=openjdk${version} baseurl="https://api.adoptopenjdk.net/v2/${request_type}/${release_type}/${url_version}" specifiers="openjdk_impl=${url_impl}&type=${url_pkg}&release=${url_rel}&heap_size=${url_heapsize}" @@ -336,29 +336,55 @@ function get_v2_url() { echo "${baseurl}?${specifiers}" } +# Get the binary github link for a release given a V2 API URL +function get_v2_binary_url() { + local v2_url=$1 + local info_file=/tmp/info_$$.json + + if ! curl -Lso ${info_file} "${v2_url}" || [ ! -s ${info_file} ]; then + rm -f ${info_file} + return; + fi + < ${info_file} grep "binary_link" | awk -F '"' '{ print $4 }' + rm -f ${info_file} +} + +# Get the installer github link for a release given a V2 API URL +function get_v2_installer_url() { + local v2_url=$1 + local info_file=/tmp/info_$$.json + + if ! curl -Lso ${info_file} "${v2_url}" || [ ! -s ${info_file} ]; then + rm -f ${info_file} + return; + fi + < ${info_file} grep "installer_link" | awk -F '"' '{ print $4 }' + rm -f ${info_file} +} + # Build the URL using adoptopenjdk.net v3 api based on the given parameters # request_type = feature_releases # release_type = ga / ea # url_impl = hotspot / openj9 # url_arch = aarch64 / ppc64le / s390x / x64 # url_pkg = jdk / jre -# url_rel = latest / ${version} # https://api.adoptopenjdk.net/v3/assets/feature_releases/11/ga?page=0&page_size=1&release_type=ga&sort_order=DESC&vendor=adoptopenjdk&jvm_impl=openj9&heap_size=normal&architecture=x64&os=linux&image_type=jdk function get_v3_url() { - request_type=$1 - release_type=$2 - url_impl=$3 - url_pkg=$4 - url_rel=$5 - url_arch=$6 - url_heapsize="normal" - url_version="openjdk${version}" - # shellcheck disable=2034 - url_vendor="adoptopenjdk" - - baseurl="https://api.adoptopenjdk.net/v3/assets/${request_type}/${version}/${release_type}" + local request_type=$1 + local release_type=$2 + local url_impl=$3 + local url_pkg=$4 + local url_arch=$5 + local url_heapsize="normal" + + if [ "${release_type}" == "releases" ]; then + rel_type="ga" + else + rel_type="ea" + fi + baseurl="https://api.adoptopenjdk.net/v3/assets/${request_type}/${version}/${rel_type}" specifiers="page=0&page_size=1&sort_order=DESC&vendor=adoptopenjdk" - specifiers="${specifiers}&jvm_impl=${url_impl}&image_type=${url_pkg}&release=${url_rel}&heap_size=${url_heapsize}" + specifiers="${specifiers}&jvm_impl=${url_impl}&image_type=${url_pkg}&heap_size=${url_heapsize}" windows_pat="windows.*" if [ -n "${url_arch}" ]; then if [[ "${url_arch}" =~ ${windows_pat} ]]; then @@ -373,43 +399,42 @@ function get_v3_url() { echo "${baseurl}?${specifiers}" } -# Get the binary github link for a release given a V2 API URL -function get_binary_url() { - v2_url=$1 - info_file=/tmp/info_$$.json +# Get the binary github link for a release given a V3 API URL +function get_v3_binary_url() { + local v3_url=$1 + local info_file=/tmp/info_$$.json - if ! curl -Lso ${info_file} "${v2_url}" || [ ! -s ${info_file} ]; then + if ! curl -Lso ${info_file} "${v3_url}" || [ ! -s ${info_file} ]; then rm -f ${info_file} return; fi - # shellcheck disable=SC2005 - echo "$( < "${info_file}" grep "binary_link" | awk -F '"' '{ print $4 }')" - rm -f "${info_file}" + python3 -c "import sys, json; print(json.load(sys.stdin)[0]['binaries'][0]['package']['link'])" < "${info_file}" + rm -f ${info_file} } -# Get the installer github link for a release given a V2 API URL -function get_instaler_url() { - v2_url=$1 - info_file=/tmp/info_$$.json +# Get the installer github link for a release given a V3 API URL +function get_v3_installer_url() { + local v3_url=$1 + local info_file=/tmp/info_$$.json - if ! curl -Lso "${info_file}" "${v2_url}" || [ ! -s ${info_file} ]; then + if ! curl -Lso "${info_file}" "${v3_url}" || [ ! -s ${info_file} ]; then rm -f ${info_file} return; fi - < ${info_file} grep "installer_link" | awk -F '"' '{ print $4 }' + python3 -c "import sys, json; print(json.load(sys.stdin)[0]['binaries'][0]['installer']['link'])" < "${info_file}" rm -f ${info_file} } # Get the short build version from the full version for this specific arch # $1 = full version function get_nightly_short_version() { - arch_build=$1 - arch_full_version=$2 + local arch_build=$1 + local arch_full_version=$2 if [ "${arch_build}" = "nightly" ]; then # Remove date and time at the end of full_version for nightly builds. # Handle both the old and new date-time formats used by the Adopt build system. # Older date-time format - 201809270034 - # shellcheck disable=SC2001 + # shellcheck disable=SC2001 arch_version=$(echo "${arch_full_version}" | sed 's/-[0-9]\{4\}[0-9]\{2\}[0-9]\{2\}[0-9]\{4\}$//') # New date-time format - 2018-09-27-00-34 # shellcheck disable=SC2001 @@ -422,39 +447,38 @@ function get_nightly_short_version() { # Get the shasums for the given specific build and arch combination. function get_sums_for_build_arch() { - # shellcheck disable=SC2034 # TODO check where it is used - gsba_ver=$1 - gsba_vm=$2 - gsba_pkg=$3 - gsba_build=$4 - gsba_arch=$5 - - case ${gsba_arch} in + local ver=$1 + local vm=$2 + local pkg=$3 + local build=$4 + local arch=$5 + + case ${arch} in armv7l) - LATEST_URL=$(get_v2_url info "${gsba_build}" "${gsba_vm}" "${gsba_pkg}" latest arm); + LATEST_URL=$(get_v3_url feature_releases "${build}" "${vm}" "${pkg}" arm); ;; aarch64) - LATEST_URL=$(get_v2_url info "${gsba_build}" "${gsba_vm}" "${gsba_pkg}" latest aarch64); + LATEST_URL=$(get_v3_url feature_releases "${build}" "${vm}" "${pkg}" aarch64); ;; ppc64le) - LATEST_URL=$(get_v2_url info "${gsba_build}" "${gsba_vm}" "${gsba_pkg}" latest ppc64le); + LATEST_URL=$(get_v3_url feature_releases "${build}" "${vm}" "${pkg}" ppc64le); ;; s390x) - LATEST_URL=$(get_v2_url info "${gsba_build}" "${gsba_vm}" "${gsba_pkg}" latest s390x); + LATEST_URL=$(get_v3_url feature_releases "${build}" "${vm}" "${pkg}" s390x); ;; x86_64) - LATEST_URL=$(get_v2_url info "${gsba_build}" "${gsba_vm}" "${gsba_pkg}" latest x64); + LATEST_URL=$(get_v3_url feature_releases "${build}" "${vm}" "${pkg}" x64); ;; windows-amd|windows-nano) - LATEST_URL=$(get_v2_url info "${gsba_build}" "${gsba_vm}" "${gsba_pkg}" latest windows-amd); + LATEST_URL=$(get_v3_url feature_releases "${build}" "${vm}" "${pkg}" windows-amd); ;; *) - echo "Unsupported arch: ${gsba_arch}" + echo "Unsupported arch: ${arch}" esac while : do - shasum_file="${gsba_arch}_${gsba_build}_latest" + shasum_file="${arch}_${build}_latest" # Bad builds cause the latest url to return an empty file or sometimes curl fails if ! curl -Lso "${shasum_file}" "${LATEST_URL}" || [ ! -s "${shasum_file}" ]; then echo "Latest url not available at url: ${LATEST_URL}" @@ -465,13 +489,13 @@ function get_sums_for_build_arch() { # Print the arch and the corresponding shasums to the vm output file if [ -z "${availability}" ]; then # If there are multiple builds for a single version, then pick the latest one. - if [ "$gsba_arch" == "windows-amd" ]; then - shasums_url=$( < "${shasum_file}" grep "installer_checksum_link" | head -1 | awk -F'"' '{ print $4 }'); + if [ "${arch}" == "windows-amd" ]; then + shasums_url=$(python3 -c "import sys, json; print(json.load(sys.stdin)[0]['binaries'][0]['installer']['checksum_link'])" < "${shasum_file}") if [ -z "$shasums_url" ]; then - shasums_url=$( < "${shasum_file}" grep "checksum_link" | head -1 | awk -F'"' '{ print $4 }'); + shasums_url=$(python3 -c "import sys, json; print(json.load(sys.stdin)[0]['binaries'][0]['package']['checksum_link'])" < "${shasum_file}") fi else - shasums_url=$( < "${shasum_file}" grep "checksum_link" | head -1 | awk -F'"' '{ print $4 }'); + shasums_url=$(python3 -c "import sys, json; print(json.load(sys.stdin)[0]['binaries'][0]['package']['checksum_link'])" < "${shasum_file}") fi shasum=$(curl -Ls "${shasums_url}" | sed -e 's/<[^>]*>//g' | awk '{ print $1 }'); # Sometimes shasum files are missing, check for error and do not print on error. @@ -481,20 +505,20 @@ function get_sums_for_build_arch() { break; fi # Get the release version for this arch from the info file - arch_build_version=$( < "${shasum_file}" grep "release_name" | awk -F'"' '{ print $4 }'); + arch_build_version=$(python3 -c "import sys, json; print(json.load(sys.stdin)[0]['release_name'])" < "${shasum_file}") # For nightly builds get the short version without the date/time stamp - arch_build_version=$(get_nightly_short_version "${gsba_build}" "${arch_build_version}") + arch_build_version=$(get_nightly_short_version "${build}" "${arch_build_version}") # If the latest for the current arch does not match with the latest for the parent arch, # then skip this arch. # Parent version in this case would be the "full_version" from function get_sums_for_build # The parent version will automatically be the latest for all arches as returned by the v2 API if [ "${arch_build_version}" != "${full_version}" ]; then - echo "Parent version not matching for arch ${gsba_arch}: ${arch_build_version}, ${full_version}" + echo "Parent version not matching for arch ${arch}: ${arch_build_version}, ${full_version}" break; fi # Only print the entry if the shasum is not empty if [ -n "${shasum}" ]; then - printf "\t[%s]=\"%s\"\n" "${gsba_arch}" "${shasum}" >> "${ofile}" + printf "\t[%s]=\"%s\"\n" "${arch}" "${shasum}" >> "${ofile}" fi fi break; @@ -505,56 +529,54 @@ function get_sums_for_build_arch() { # Get shasums for the build and arch combination given # If no arch given, generate for all valid arches function get_sums_for_build() { - gsb_ver=$1 - gsb_vm=$2 - gsb_pkg=$3 - gsb_build=$4 - gsb_arch=$5 + local ver=$1 + local vm=$2 + local pkg=$3 + local build=$4 + local arch=$5 - info_url=$(get_v2_url info "${gsb_build}" "${gsb_vm}" "${gsb_pkg}" latest); + info_url=$(get_v3_url feature_releases "${build}" "${vm}" "${pkg}"); # Repeated requests from a script triggers a error threshold on adoptopenjdk.net sleep 1; info=$(curl -Ls "${info_url}") err=$(echo "${info}" | grep -e "Error" -e "No matches" -e "Not found") if [ -n "${err}" ]; then - # shellcheck disable=SC2104 # TODO need to check flow here - continue; + return; fi - full_version=$(echo "${info}" | grep "release_name" | awk -F'"' '{ print $4 }'); - full_version=$(get_nightly_short_version "${gsb_build}" "${full_version}") + full_version=$(echo "${info}" | python3 -c "import sys, json; print(json.load(sys.stdin)[0]['release_name'])") + full_version=$(get_nightly_short_version "${build}" "${full_version}") # Declare the array with the proper name and write to the vm output file. - printf "declare -A %s_%s_%s_%s_sums=(\n" "${gsb_pkg}" "${gsb_vm}" "${gsb_ver}" "${gsb_build}" >> "${ofile}" + printf "declare -A %s_%s_%s_%s_sums=(\n" "${pkg}" "${vm}" "${ver}" "${build}" >> "${ofile}" # Capture the full version according to adoptopenjdk printf "\t[version]=\"%s\"\n" "${full_version}" >> "${ofile}" - if [ -n "${gsb_arch}" ]; then - get_sums_for_build_arch "${gsb_ver}" "${gsb_vm}" "${gsb_pkg}" "${gsb_build}" "${gsb_arch}" + if [ -n "${arch}" ]; then + get_sums_for_build_arch "${ver}" "${vm}" "${pkg}" "${build}" "${arch}" else - for gsb_arch in ${all_arches} + for arch in ${all_arches} do - get_sums_for_build_arch "${gsb_ver}" "${gsb_vm}" "${gsb_pkg}" "${gsb_build}" "${gsb_arch}" + get_sums_for_build_arch "${ver}" "${vm}" "${pkg}" "${build}" "${arch}" done fi printf ")\n" >> "${ofile}" echo - echo "sha256sums for the version ${full_version} for build type \"${gsb_build}\" is now available in ${ofile}" + echo "sha256sums for the version ${full_version} for build type \"${build}\" is now available in ${ofile}" echo } # get sha256sums for the specific builds and arches given. # If no build or arch specified, do it for all valid ones. function get_shasums() { - ver=$1 - vm=$2 - pkg=$3 - build=$4 - arch=$5 - # shellcheck disable=SC2154 # declared externally - ofile="${root_dir}/${vm}_shasums_latest.sh" + local ver=$1 + local vm=$2 + local pkg=$3 + local build=$4 + local arch=$5 + local ofile="${vm}_shasums_latest.sh" # Dont build the shasums array it already exists for the Ver/VM/Pkg/Build combination if [ -f "${ofile}" ]; then - # shellcheck disable=SC1090 + # shellcheck disable=SC1090 source ./"${vm}"_shasums_latest.sh sums="${pkg}_${vm}_${ver}_${build}_sums" # File exists, which means shasums for the VM exists. diff --git a/dockerfile_functions.sh b/dockerfile_functions.sh index 83317c940..84c8e79dd 100755 --- a/dockerfile_functions.sh +++ b/dockerfile_functions.sh @@ -189,7 +189,7 @@ EOI print_alpine_pkg() { cat >> "$1" <<'EOI' RUN apk add --no-cache --virtual .build-deps curl binutils \ - && GLIBC_VER="2.30-r0" \ + && GLIBC_VER="2.31-r0" \ && ALPINE_GLIBC_REPO="https://github.com/sgerrand/alpine-pkg-glibc/releases/download" \ && GCC_LIBS_URL="https://archive.archlinux.org/packages/g/gcc-libs/gcc-libs-9.1.0-2-x86_64.pkg.tar.xz" \ && GCC_LIBS_SHA256="91dba90f3c20d32fcf7f1dbe91523653018aa0b8d2230b00f822f6722804cf08" \ @@ -226,7 +226,7 @@ EOI print_ubi_pkg() { cat >> "$1" <<'EOI' RUN dnf install -y openssl curl ca-certificates fontconfig glibc-langpack-en gzip tar \ - && dnf update; dnf clean all + && dnf update -y; dnf clean all EOI } @@ -234,8 +234,8 @@ EOI # Select the ubi OS packages. print_ubi-minimal_pkg() { cat >> "$1" <<'EOI' -RUN microdnf install openssl curl ca-certificates fontconfig glibc-langpack-en gzip tar \ - && microdnf update; microdnf clean all +RUN microdnf install -y openssl curl ca-certificates fontconfig glibc-langpack-en gzip tar \ + && microdnf update -y; microdnf clean all EOI } @@ -243,7 +243,7 @@ EOI print_centos_pkg() { cat >> "$1" <<'EOI' RUN yum install -y openssl curl ca-certificates fontconfig gzip tar \ - && yum update; yum clean all + && yum update -y; yum clean all EOI } @@ -295,43 +295,43 @@ print_java_install_pre() { for sarch in ${supported_arches} do if [ "${sarch}" == "aarch64" ]; then - JAVA_URL=$(get_v2_url info "${bld}" "${vm}" "${pkg}" latest aarch64); + JAVA_URL=$(get_v3_url feature_releases "${bld}" "${vm}" "${pkg}" aarch64); cat >> "$1" <<-EOI aarch64|arm64) \\ ESUM='$(sarray="${shasums}[aarch64]"; eval esum=\${$sarray}; echo "${esum}")'; \\ - BINARY_URL='$(get_binary_url "${JAVA_URL}")'; \\ + BINARY_URL='$(get_v3_binary_url "${JAVA_URL}")'; \\ ;; \\ EOI elif [ "${sarch}" == "armv7l" ]; then - JAVA_URL=$(get_v2_url info "${bld}" "${vm}" "${pkg}" latest arm); + JAVA_URL=$(get_v3_url feature_releases "${bld}" "${vm}" "${pkg}" arm); cat >> "$1" <<-EOI armhf|armv7l) \\ ESUM='$(sarray="${shasums}[armv7l]"; eval esum=\${$sarray}; echo "${esum}")'; \\ - BINARY_URL='$(get_binary_url "${JAVA_URL}")'; \\ + BINARY_URL='$(get_v3_binary_url "${JAVA_URL}")'; \\ ;; \\ EOI elif [ "${sarch}" == "ppc64le" ]; then - JAVA_URL=$(get_v2_url info "${bld}" "${vm}" "${pkg}" latest ppc64le); + JAVA_URL=$(get_v3_url feature_releases "${bld}" "${vm}" "${pkg}" ppc64le); cat >> "$1" <<-EOI ppc64el|ppc64le) \\ ESUM='$(sarray="${shasums}[ppc64le]"; eval esum=\${$sarray}; echo "${esum}")'; \\ - BINARY_URL='$(get_binary_url "${JAVA_URL}")'; \\ + BINARY_URL='$(get_v3_binary_url "${JAVA_URL}")'; \\ ;; \\ EOI elif [ "${sarch}" == "s390x" ]; then - JAVA_URL=$(get_v2_url info "${bld}" "${vm}" "${pkg}" latest s390x); + JAVA_URL=$(get_v3_url feature_releases "${bld}" "${vm}" "${pkg}" s390x); cat >> "$1" <<-EOI s390x) \\ ESUM='$(sarray="${shasums}[s390x]"; eval esum=\${$sarray}; echo "${esum}")'; \\ - BINARY_URL='$(get_binary_url "${JAVA_URL}")'; \\ + BINARY_URL='$(get_v3_binary_url "${JAVA_URL}")'; \\ ;; \\ EOI elif [ "${sarch}" == "x86_64" ]; then - JAVA_URL=$(get_v2_url info "${bld}" "${vm}" "${pkg}" latest x64); + JAVA_URL=$(get_v3_url feature_releases "${bld}" "${vm}" "${pkg}" x64); cat >> "$1" <<-EOI amd64|x86_64) \\ ESUM='$(sarray="${shasums}[x86_64]"; eval esum=\${$sarray}; echo "${esum}")'; \\ - BINARY_URL='$(get_binary_url "${JAVA_URL}")'; \\ + BINARY_URL='$(get_v3_binary_url "${JAVA_URL}")'; \\ ;; \\ EOI fi @@ -422,9 +422,9 @@ print_windows_java_install() { servertype=$(echo "$file" | cut -f4 -d"/" | cut -f1 -d"-") version=$(echo "$file" | cut -f1 -d "/") if [ "$servertype" == "windowsservercore" ]; then - JAVA_URL=$(get_v2_url info "${bld}" "${vm}" "${pkg}" latest windows-amd); + JAVA_URL=$(get_v3_url feature_releases "${bld}" "${vm}" "${pkg}" windows-amd); ESUM=$(sarray="${shasums}[windows-amd]"; eval esum=\${$sarray}; echo "${esum}"); - BINARY_URL=$(get_instaler_url "${JAVA_URL}"); + BINARY_URL=$(get_v3_installer_url "${JAVA_URL}"); cat >> "$1" <<-EOI RUN Write-Host ('Downloading ${BINARY_URL} ...'); \\ @@ -446,10 +446,10 @@ RUN Write-Host ('Downloading ${BINARY_URL} ...'); \\ Remove-Item -Path C:\temp -Recurse | Out-Null; EOI else - JAVA_URL=$(get_v2_url info "${bld}" "${vm}" "${pkg}" latest windows-nano); + JAVA_URL=$(get_v3_url feature_releases "${bld}" "${vm}" "${pkg}" windows-nano); # shellcheck disable=SC1083 ESUM=$(sarray="${shasums}[windows-nano]"; eval esum=\${"$sarray"}; echo "${esum}"); - BINARY_URL=$(get_binary_url "${JAVA_URL}"); + BINARY_URL=$(get_v3_binary_url "${JAVA_URL}"); cat >> "$1" <<-EOI USER ContainerAdministrator diff --git a/test_multiarch.sh b/test_multiarch.sh index 98f1d5b2e..b1a2e4aad 100755 --- a/test_multiarch.sh +++ b/test_multiarch.sh @@ -14,7 +14,7 @@ # set -o pipefail -root_dir="$PWD" +export root_dir="$PWD" source_prefix="adoptopenjdk" source_repo="openjdk" version="9" @@ -39,7 +39,8 @@ package=$3 # Run a java -version test for a given docker image. function test_java_version() { - img="$1" + local img=$1 + local rel=$2 echo echo "TEST: Running java -version test on image: ${img}..." @@ -54,17 +55,20 @@ function test_java_version() { # Run all test buckets for the given image. function run_tests() { - img=$1 - grep -v '^#' < "${test_buckets_file}" | while IFS= read -r test_case + local img=$1 + local rel=$2 + + grep -v '^#' < "${test_buckets_file}" | while IFS= read -r test_case do - ${test_case} "${img}" + ${test_case} "${img}" "${rel}" done } # Run tests on all the alias docker tags. function test_aliases() { - repo=$1 - target_repo=${source_prefix}/${repo} + local repo=$1 + local rel=$2 + local target_repo=${source_prefix}/${repo} # Check if all the individual docker images exist for each expected arch for arch_tag in ${arch_tags} @@ -88,15 +92,16 @@ function test_aliases() { printf "\nError: Docker Image %s not found on hub.docker\n" "${img}" printf "\n##############################################\n" fi - run_tests "${target_repo}":"${tag_alias}" + run_tests "${target_repo}":"${tag_alias}" "${rel}" done } # Check each of the images in the global variable arch_tags exist # and run tests on them function test_tags() { - repo=$1 - target_repo=${source_prefix}/${repo} + local repo=$1 + local rel=$2 + local target_repo=${source_prefix}/${repo} # Check if all the individual docker images exist for each expected arch for arch_tag in ${arch_tags} @@ -112,18 +117,19 @@ function test_tags() { printf "\nError: Docker Image %s not found on hub.docker\n" "${img}" printf "\n##############################################\n" fi - run_tests "${target_repo}":"${arch_tag}" + run_tests "${target_repo}":"${arch_tag}" "${rel}" done } # Run tests for each of the test image types # Currently image types = test_tags and test_aliases. function test_image_types() { - srepo=$1 + local srepo=$1 + local rel=$2 - grep -v '^#' < "${test_image_types_file}" | while IFS= read -r test_image + grep -v '^#' "${test_image_types_file}" | while IFS= read -r test_image do - ${test_image} "${srepo}" + ${test_image} "${srepo}" "${rel}" done } @@ -136,12 +142,12 @@ set_arch_os # Source the hotspot and openj9 shasums scripts available_jvms="" if [ "${vm}" == "hotspot" ] && [ -f hotspot_shasums_latest.sh ]; then - # shellcheck disable=SC1091 + # shellcheck disable=SC1091 source ./hotspot_shasums_latest.sh available_jvms="hotspot" fi if [ "${vm}" == "openj9" ] && [ -f openj9_shasums_latest.sh ]; then - # shellcheck disable=SC1091 + # shellcheck disable=SC1091 source ./openj9_shasums_latest.sh available_jvms="${available_jvms} openj9" fi @@ -159,14 +165,14 @@ do do shasums="${package}"_"${vm}"_"${version}"_"${build}"_sums jverinfo="${shasums}[version]" - eval jrel="\${$jverinfo}" - # shellcheck disable=SC2154 + # shellcheck disable=SC1083,SC2086 + eval jrel=\${$jverinfo} + # shellcheck disable=SC2154 if [[ -z "${jrel}" ]]; then continue; fi # Docker image tags cannot have "+" in them, replace it with "_" instead. - # shellcheck disable=SC2001 - rel=$(echo "${jrel}" | sed 's/+/_/g') + rel=${jrel//+/_/} srepo=${source_repo}${version} if [ "${vm}" != "hotspot" ]; then @@ -181,7 +187,7 @@ do build_tags "${vm}" "${version}" "${package}" "${rel}" "${os}" "${build}" "${raw_tags}" echo "done" # Test both the arch specific tags and the tag aliases. - test_image_types "${srepo}" + test_image_types "${srepo}" "${rel}" done done done diff --git a/update_multiarch.sh b/update_multiarch.sh index 0980443eb..e22860ca9 100755 --- a/update_multiarch.sh +++ b/update_multiarch.sh @@ -16,7 +16,7 @@ set -o pipefail # Dockerfiles to be generated version="9" -root_dir="$PWD" +export root_dir="$PWD" source ./common_functions.sh source ./dockerfile_functions.sh