Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/test_integration.yml
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Bazel build targets
run: |
./integration_test.sh --known-good known_good.updated.json
scripts/integration_test.sh --known-good known_good.updated.json
- name: Show disk space after build
if: always()
run: |
Expand Down
93 changes: 93 additions & 0 deletions .github/workflows/unit_tests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
# *******************************************************************************
# Copyright (c) 2025 Contributors to the Eclipse Foundation
#
# See the NOTICE file(s) distributed with this work for additional
# information regarding copyright ownership.
#
# This program and the accompanying materials are made available under the
# terms of the Apache License Version 2.0 which is available at
# https://www.apache.org/licenses/LICENSE-2.0
#
# SPDX-License-Identifier: Apache-2.0
# *******************************************************************************

name: Execute Unit Tests
on:
workflow_dispatch:
pull_request:
release:
types: [created]
schedule:
- cron: '30 3 * * *' # Every night at 03:30 UTC on main branch
jobs:
integration_test:
runs-on: ubuntu-latest
steps:
- name: Show disk space before build
run: |
echo 'Disk space before build:'
df -h

- name: Removing unneeded software
run: |
echo "Removing unneeded software... "
if [ -d /usr/share/dotnet ]; then echo "Removing dotnet..."; start=$(date +%s); sudo rm -rf /usr/share/dotnet; end=$(date +%s); echo "Duration: $((end-start))s"; fi
#if [ -d /usr/local/lib/android ]; then echo "Removing android..."; start=$(date +%s); sudo rm -rf /usr/local/lib/android; end=$(date +%s); echo "Duration: $((end-start))s"; fi
if [ -d /opt/ghc ]; then echo "Removing haskell (ghc)..."; start=$(date +%s); sudo rm -rf /opt/ghc; end=$(date +%s); echo "Duration: $((end-start))s"; fi
if [ -d /usr/local/.ghcup ]; then echo "Removing haskell (ghcup)..."; start=$(date +%s); sudo rm -rf /usr/local/.ghcup; end=$(date +%s); echo "Duration: $((end-start))s"; fi
if [ -d /usr/share/swift ]; then echo "Removing swift..."; start=$(date +%s); sudo rm -rf /usr/share/swift; end=$(date +%s); echo "Duration: $((end-start))s"; fi
if [ -d /usr/local/share/chromium ]; then echo "Removing chromium..."; start=$(date +%s); sudo rm -rf /usr/local/share/chromium; end=$(date +%s); echo "Duration: $((end-start))s"; fi

- name: Show disk space after cleanup
run: |
echo 'Disk space after cleanup:'
df -h

- name: Checkout repository
uses: actions/checkout@v4.2.2

- name: Setup Bazel
uses: bazel-contrib/setup-bazel@0.15.0
with:
bazelisk-cache: true
disk-cache: ${{ github.workflow }}
repository-cache: true

- name: Bazel build targets
run: |
scripts/run_unit_tests.sh
cat _logs/ut_summary.md

- name: Publish build summary
if: always()
run: |
if [ -f _logs/ut_summary.md ]; then
cat _logs/ut_summary.md >> "$GITHUB_STEP_SUMMARY"
else
echo "No build summary file found (_logs/ut_summary.md)" >> "$GITHUB_STEP_SUMMARY"
fi

- name: Upload logs artifact
if: always()
uses: actions/upload-artifact@v4
with:
name: bazel-build-logs
path: _logs/
if-no-files-found: warn
retention-days: 14

- name: Create archive of test reports
if: github.ref_type == 'tag'
run: |
mkdir -p artifacts
find bazel-testlogs/external -name 'test.xml' -print0 | xargs -0 -I{} cp --parents {} artifacts/
zip -r ${{ github.event.repository.name }}_test_reports.zip artifacts/
shell: bash

- name: Upload release asset (attach ZIP to GitHub Release)
uses: softprops/action-gh-release@v2.5.0
if: github.ref_type == 'tag'
with:
files: ${{ github.event.repository.name }}_test_reports.zip
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ local_path_override(module_name = "score_tooling", path = "../tooling")

### Rust

Use `./generate_rust_analyzer_support.sh` to generate rust_analyzer settings that will let VS Code work.
Use `scripts/generate_rust_analyzer_support.sh` to generate rust_analyzer settings that will let VS Code work.

## 🗂 Notes
Keep this file updated as integration issues are resolved. Prefer converting ad-hoc shell steps into Bazel rules or documented scripts under `tools/` for repeatability.
File renamed without changes.
56 changes: 56 additions & 0 deletions scripts/run_unit_tests.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
#!/usr/bin/env bash
set -uo pipefail

# Integration unit test script.

CONFIG=${CONFIG:-bl-x86_64-linux}
LOG_DIR=${LOG_DIR:-_logs/logs_ut}
SUMMARY_FILE=${SUMMARY_FILE:-_logs/ut_summary.md}
mkdir -p "${LOG_DIR}" || true

declare -A UT_TARGET_GROUPS=(
[baselibs]="@score_baselibs//score/... -- \
-@score_baselibs//score/language/safecpp/aborts_upon_exception:abortsuponexception_toolchain_test \
-@score_baselibs//score/containers:dynamic_array_test" # nok, error in @score_baselibs//score/json/examples:json_buffer
[communication]="@score_communication//score/..." # nok, error from trlc in score_communication//score/mw/com/requirements/feature_requirements
[persistency]="@score_persistency//:unit_tests" # ok
[orchestrator]="@score_orchestrator//src/..." # ok
[kyron]="@score_kyron//:unit_tests" # ok
[feo]="@score_feo//... --build_tests_only" # ok (flag required or error from docs)
)

# Markdown table header
echo -e "Status\tPassed\tFailed\tSkipped\tTotal\tGroup\tDuration(s)" >> "${SUMMARY_FILE}"

for group in "${!UT_TARGET_GROUPS[@]}"; do
targets="${UT_TARGET_GROUPS[$group]}"
command="bazel test --config="${CONFIG}" ${targets}"
echo "==========================================="
echo "Running unit tests for group: $group"
echo "${command}"
echo "==========================================="
start_ts=$(date +%s)
out=$(bazel test --test_summary=testcase --test_output=errors --nocache_test_results --config="${CONFIG}" ${targets} 2>&1 | tee "${LOG_DIR}/ut_${group}_output.log")
build_status=${PIPESTATUS[0]}
end_ts=$(date +%s)
duration=$(( end_ts - start_ts ))

# Parse bazel output
tests_passed=$(echo "$out" | grep -Eo '[0-9]+ passing' | grep -Eo '[0-9]+' | head -n1)
tests_failed=$(echo "$out" | grep -Eo '[0-9]+ failing' | grep -Eo '[0-9]+' | head -n1)
tests_skipped=$(echo "$out" | grep -Eo '[0-9]+ skipped' | grep -Eo '[0-9]+' | head -n1)
tests_executed=$(echo "$out" | grep -Eo '[0-9]+ test cases' | grep -Eo '[0-9]+' | head -n1)
if [[ ${build_status} -eq 0 ]]; then
status_symbol="✅"
else
status_symbol="❌"
fi

# Append as a markdown table row
echo -e "${status_symbol}\t${tests_passed}\t${tests_failed}\t${tests_skipped}\t${tests_executed}\t${group}\t${duration}s" >> "${SUMMARY_FILE}"
echo "==========================================="
echo -e "\n\n"
done

# Align the summary table columns
column -t -s $'\t' "${SUMMARY_FILE}" > "${SUMMARY_FILE}.tmp" && mv "${SUMMARY_FILE}.tmp" "${SUMMARY_FILE}"
Loading