Skip to content

Commit

Permalink
Measure coverage for integration tests in CI (#1893)
Browse files Browse the repository at this point in the history
This change adds additional steps to the CI system that:

1. Builds a version of SAW with HPC support enabled
2. Runs the integration tests with the binaries produced in (1)
3. Generates an HTML coverage report from the HPC data from (2) and uploads this as an artifact tagged with the appropriate PR number
4. Downloads all coverage artifacts from all PRs and generates a website containing all of them (much like the cryptol docs).
5. Uploads this website to github pages

A few things to note:
* The coverage reports are hosted at https://galoisinc.github.io/saw-script
  * Step (4) generates the `index.html` file there that links to all available reports
* Coverage reports are available so long as the underlying artifact is available. This is determined by a combination of the expiration date of the artifact (90 days after it is generated), as well as our artifact storage limits. This should be sufficient for coverage reports of active PRs, as well as history going back a bit.
  • Loading branch information
bboston7 authored Sep 15, 2023
1 parent 37d60f4 commit 1f31781
Show file tree
Hide file tree
Showing 6 changed files with 221 additions and 36 deletions.
20 changes: 19 additions & 1 deletion .github/ci.sh
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,10 @@ build() {
else
pkgs=(saw crux-mir-comp saw-remote-api)
fi
tee -a cabal.project.local > /dev/null < cabal.project.ci
cat cabal.project.ci >> cabal.project.local
if [[ "$ENABLE_HPC" == "true" ]]; then
cat cabal.project.ci-hpc >> cabal.project.local
fi
if ! retry cabal v2-build "$@" "${pkgs[@]}"; then
if [[ "$RUNNER_OS" == "macOS" ]]; then
echo "Working around a dylib issue on macos by removing the cache and trying again"
Expand All @@ -74,6 +77,21 @@ build() {
fi
}

# Gather and tar up all HPC coverage files and binaries
collect_hpc_files() {
local MIX_FILES=$(find dist-newstyle -name "*.mix")
local GENERATED_HS_FILES=$(find dist-newstyle/build -name "*.hs")
local BINS="dist/bin"
tar cvf hpc.tar.gz ${MIX_FILES} ${GENERATED_HS_FILES} ${BINS}
}

# Download HTML coverage reports and generate an index file linking to them
collect_all_html() {
local HTML_DIR=all-html
mkdir -p ${HTML_DIR}
(cd ${HTML_DIR} && gh run download -p "coverage-html-*" && python3 ../.github/generate_index.py)
}

install_system_deps() {
(cd $BIN && curl -o bins.zip -sL "https://github.com/GaloisInc/what4-solvers/releases/download/$SOLVER_PKG_VERSION/$BUILD_TARGET_OS-bin.zip" && unzip -o bins.zip && rm bins.zip)
chmod +x $BIN/*
Expand Down
30 changes: 30 additions & 0 deletions .github/generate_index.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
#!/usr/bin/env python3

# This script generates an HTML index file for all coverage reports

import glob

HEADER = """
<!DOCTYPE html>
<head>
<title>SAWScript Test Coverage Results</title>
</head>
<body>
<h1>SAWScript Test Coverage Results</h1>
<p>SAWScript coverage results by pull request number:</p>
<ul>
"""

FOOTER = """
</ul>
</body>
"""

if __name__ == "__main__":
with open("index.html", "w") as f:
f.write(HEADER)
for dir in sorted(glob.glob("coverage-html-*")):
pr_num = dir[14:]
link_dest = f"{dir}/hpc_index.html"
f.write(f" <li><a href={link_dest}>{pr_num}</a></li>")
f.write(FOOTER)
146 changes: 143 additions & 3 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,7 @@ jobs:
cabal: ["3.10.1.0"]
ghc: ["8.10.7", "9.2.7", "9.4.4"]
run-tests: [true]
hpc: [false]
include:
# We include one job from an older Ubuntu LTS release to increase our
# coverage of possible Linux configurations. Since we already run the
Expand All @@ -88,6 +89,13 @@ jobs:
ghc: "8.10.7"
cabal: "3.10.1.0"
run-tests: false
hpc: false
# Include one job with HPC enabled
- os: ubuntu-22.04
ghc: "9.4.4"
cabal: "3.10.1.0"
run-tests: true
hpc: true
outputs:
cabal-test-suites-json: ${{ steps.cabal-test-suites.outputs.targets-json }}
steps:
Expand Down Expand Up @@ -143,6 +151,9 @@ jobs:
- shell: bash
run: .github/ci.sh build
env:
ENABLE_HPC: ${{ matrix.hpc }}


- shell: bash
env:
Expand Down Expand Up @@ -191,28 +202,56 @@ jobs:
SIGNING_KEY: ${{ secrets.SIGNING_KEY }}
run: .github/ci.sh sign $NAME-with-solvers.tar.gz

- if: matrix.ghc == '8.10.7'
##########################################################################
# We upload an archive containing SAW, and also and archive containing SAW
# and the set of possible SMT solvers, but only for our "primary"
# distribution (currently: GHC 8.10.7). These archives are utilized in
# subsequent CI jobs, but are also published for external users, and are
# therefore signed.
#
# In addition, if we built with HPC, we upload a tarball containing the
# HPC-enabled binaries and HPC-specific files generated during the build
# process. These are mostly used for subsequent CI jobs that will
# generate a coverage report, and the binaries are essentially the same as
# those collected for the previous operation, but they are captured in
# their original cabal-generated locations where they are expected to live
# for subsequent coverage collection.

# In the next 3 steps we check that `matrix.hpc == false` so that if the
# distribution version matches the HPC version, the HPC build artifacts do
# not clobber the non-HPC distribution artifacts.
- if: matrix.ghc == '8.10.7' && matrix.hpc == false
uses: actions/upload-artifact@v2
with:
name: ${{ steps.config.outputs.name }} (GHC ${{ matrix.ghc }})
path: "${{ steps.config.outputs.name }}.tar.gz*"
if-no-files-found: error
retention-days: ${{ needs.config.outputs.retention-days }}

- if: matrix.ghc == '8.10.7'
- if: matrix.ghc == '8.10.7' && matrix.hpc == false
uses: actions/upload-artifact@v2
with:
name: ${{ steps.config.outputs.name }}-with-solvers (GHC ${{ matrix.ghc }})
path: "${{ steps.config.outputs.name }}-with-solvers.tar.gz*"
if-no-files-found: error
retention-days: ${{ needs.config.outputs.retention-days }}

- if: matrix.ghc == '8.10.7' && matrix.run-tests
- if: matrix.ghc == '8.10.7' && matrix.run-tests && matrix.hpc == false
uses: actions/upload-artifact@v2
with:
path: dist/bin
name: ${{ runner.os }}-bins

- if: matrix.hpc == true
shell: bash
run: .github/ci.sh collect_hpc_files

- if: matrix.hpc == true
uses: actions/upload-artifact@v2
with:
path: hpc.tar.gz
name: ${{ runner.os }}-hpc.tar.gz

- uses: actions/cache/save@v3
name: Save cabal store cache
if: always()
Expand Down Expand Up @@ -459,6 +498,107 @@ jobs:
path: ${{ matrix.suite }}.cache
key: ${{ env.SOLVER_CACHE_VERSION }}-solver-${{ matrix.suite }}-${{ matrix.os }}-${{ github.sha }}

# The coverage job is similar to the cabal-test job, but it only runs the HPC
# enabled SAW build against the integration test suite. It then collects the
# test coverage results, generates an HTML summary, and publishes the results
# to github pages.
coverage:
name: "Run integration tests with coverage reporting"
needs: build
runs-on: ${{ matrix.os }}
if: github.event_name == 'pull_request'
permissions:
pages: write
id-token: write
strategy:
matrix:
suite: ['integration_tests']
os: [ubuntu-22.04]
steps:
# Need a copy of the source to generate coverage HTML
- uses: actions/checkout@v2
with:
submodules: true

- name: Install system dependencies
shell: bash
run: .github/ci.sh install_system_deps
env:
BUILD_TARGET_OS: ${{ matrix.os }}

- uses: actions/download-artifact@v2
with:
name: dist-tests-${{ matrix.os }}
path: dist-tests

- uses: actions/download-artifact@v2
with:
name: "${{ runner.os }}-hpc.tar.gz"

- name: Setup test environment
shell: bash
run: |
tar xvf hpc.tar.gz
chmod +x dist/bin/*
chmod +x bin/*
chmod +x dist-tests/*
- uses: actions/setup-java@v1
with:
java-version: "8"
java-package: jdk
architecture: x64

# NOTE: This job uses the SMT solver cache to improve performance but it
# does not save the updated SMT solver cache. This is because the
# `cabal-test` also runs the integration tests and uploads an updated
# cache. Because the test suite is the same, the resulting cache files
# would also be identical.
- uses: actions/cache/restore@v3
name: Restore SMT solver result cache
with:
path: ${{ matrix.suite }}.cache
key: ${{ env.SOLVER_CACHE_VERSION }}-solver-${{ matrix.suite }}-${{ matrix.os }}-${{ github.sha }}
restore-keys: |
${{ env.SOLVER_CACHE_VERSION }}-solver-${{ matrix.suite }}-${{ matrix.os }}-
- shell: bash
name: Enable solver caching
run: |
echo "SAW_SOLVER_CACHE_PATH=$(pwd)/${{ matrix.suite }}.cache" >> "$GITHUB_ENV"
dist/bin/saw --clean-mismatched-versions-solver-cache=$(pwd)/${{ matrix.suite }}.cache
- name: Run integration tests
shell: bash
run: |
export PATH="$PWD/bin:$PWD/dist/bin:$PATH"
dist-tests/integration_tests
- name: Compute coverage
shell: bash
run: |
./compute-coverage.sh
- uses: actions/upload-artifact@v2
with:
path: hpc-html
name: coverage-html-${{ github.event.number }}

- name: Gather HPC coverage files
shell: bash
run: .github/ci.sh collect_all_html
env:
GH_TOKEN: ${{ github.token }}

- name: Upload pages artifact
uses: actions/upload-pages-artifact@v1
with:
path: all-html

- name: Deploy to github pages
id: deployment
uses: actions/deploy-pages@v2

build-push-image:
runs-on: ubuntu-22.04
needs: [config]
Expand Down
3 changes: 3 additions & 0 deletions cabal.project.ci-hpc
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
-- concatenated with cabal.project when run in the CI HPC build
package saw-script
coverage: true
26 changes: 26 additions & 0 deletions compute-coverage.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
#!/usr/bin/env bash
set -Eeuxo pipefail

# This script generates an HTML coverage report for any tests run within the
# saw-script repo. It uses HPC, which is a tool in the standard GHC
# distribution. Follow these steps to use this script:
# 1. Build with coverage enabled. One way to do this is to add "coverage: true"
# to the saw-script package in cabal.project.
# 2. Run whatever tests you want. It is important that you use the saw binary
# built in step (1), and that your current working directory be somewhere at
# or underneath this top level-directory.
# 3. Run this script in the top-level directory (where this script is found).
# 4. You'll find the HPC HTML report in the "hpc-html" directory beneath the
# directory containing this script.

# Combine .tix files
SUM_TIX="all.tix"
hpc sum --output=$SUM_TIX --union --exclude=Main --exclude=GitRev $(find . -name "*.tix")

# Generate report
HPC_ROOT=$(find dist-newstyle -name "hpc")
HPC_ARGS=""
for dir in ${HPC_ROOT}/vanilla/mix/*; do
HPC_ARGS="${HPC_ARGS} --hpcdir=${dir}"
done
hpc markup --destdir=hpc-html ${HPC_ARGS} ${SUM_TIX}
32 changes: 0 additions & 32 deletions coverage.sh

This file was deleted.

0 comments on commit 1f31781

Please sign in to comment.