Skip to content

Kafka - TestFlows Tests #36

Kafka - TestFlows Tests

Kafka - TestFlows Tests #36

name: Kafka - TestFlows Tests
on:
workflow_call:
inputs:
SINK_CONNECTOR_IMAGE:
description: "Kafka connector docker image"
required: true
type: string
secrets:
DOCKERHUB_USERNAME:
required: false
DOCKERHUB_TOKEN:
required: false
AWS_ACCESS_KEY_ID:
required: false
AWS_SECRET_ACCESS_KEY:
required: false
workflow_dispatch:
inputs:
SINK_CONNECTOR_IMAGE:
description: "Kafka connector docker image"
required: true
type: string
env:
SINK_CONNECTOR_IMAGE: ${{ inputs.SINK_CONNECTOR_IMAGE }}
jobs:
testflows-kafka:
runs-on: [self-hosted, on-demand, type-cpx51, image-x86-app-docker-ce]
steps:
- uses: actions/checkout@v2
- uses: actions/download-artifact@v3
if: ${{ github.event.pull_request.head.repo.full_name != 'Altinity/clickhouse-sink-connector' && github.event_name != 'workflow_dispatch' }}
with:
name: clickhouse-sink-connector_${{ github.event.number }}-${{ github.sha }}-kafka.tar.gz
- name: Load Docker image
if: ${{ github.event.pull_request.head.repo.full_name != 'Altinity/clickhouse-sink-connector' && github.event_name != 'workflow_dispatch' }}
run: |
docker load < clickhouse-sink-connector_${{ github.event.number }}-${{ github.sha }}-kafka.tar.gz
docker image ls
- name: Runner ssh command
working-directory: sink-connector/tests/integration
run: echo "ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null root@$(hostname -I | cut -d ' ' -f 1)"
- name: Create a virtual environment
run: |
echo "Install Python modules..."
sudo apt-get clean
sudo apt-get update
sudo apt-get install -y python3.12-venv
echo "Create and activate Python virtual environment..."
python3 -m venv venv
source venv/bin/activate
echo PATH=$PATH >> $GITHUB_ENV
- name: Install all dependencies
working-directory: sink-connector/tests/integration
run: pip3 install -r requirements.txt
- name: Get current date
id: date
run: echo "date=$(date +'%Y-%m-%d_%H%M%S')" >> $GITHUB_OUTPUT
- name: Add ~./local/bin to the PATH
if: always()
working-directory: sink-connector/tests/integration
run: echo ~/.local/bin >> $GITHUB_PATH
- name: Run testflows tests
working-directory: sink-connector/tests/integration
run: python3 -u regression.py --only "/mysql to clickhouse replication/*" --clickhouse-binary-path=docker://clickhouse/clickhouse-server:22.8 --test-to-end -o classic --collect-service-logs --attr project="${GITHUB_REPOSITORY}" project.id="$GITHUB_RUN_NUMBER" user.name="$GITHUB_ACTOR" github_actions_run="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" sink_version="altinity/clickhouse-sink-connector:${SINK_CONNECTOR_VERSION}" s3_url="https://altinity-test-reports.s3.amazonaws.com/index.html#altinity-sink-connector/testflows/${{ steps.date.outputs.date }}_${{github.run.number}}/" --log logs/raw.log
- name: Create tfs results report
if: always()
working-directory: sink-connector/tests/integration/logs
run: cat raw.log | tfs report results | tfs document convert > report.html
- name: Create tfs coverage report
if: always()
working-directory: sink-connector/tests/integration/logs
run: cat raw.log | tfs report coverage ../requirements/requirements.py | tfs document convert > coverage.html
- name: Upload artifacts to Altinity Test Reports S3 bucket
if: ${{ github.event.pull_request.head.repo.full_name != 'Altinity/clickhouse-sink-connector' && github.event_name != 'workflow_dispatch' }}
working-directory: sink-connector/tests/integration/logs
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: 'eu-west-2'
run: aws s3 cp . s3://altinity-test-reports/altinity-sink-connector/testflows/${{ steps.date.outputs.date }}_sink/ --recursive --exclude "*" --include "*.log" --include "*.html"
- uses: actions/upload-artifact@v3
if: always()
with:
name: testflows-sink-connector-kafka-artefacts
path: |
sink-connector/tests/integration/logs/*.log
if-no-files-found: error
retention-days: 60