Creating ricky-8160-sanity-take2 by @animehart #734
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Create Environment | |
run-name: Creating ${{ github.event.inputs.deployment_name }} by @${{ github.actor }} | |
on: | |
# Ability to execute on demand | |
workflow_dispatch: | |
inputs: | |
deployment_name: | |
type: string | |
description: | | |
Name with letters, numbers, hyphens; start with a letter. Max 20 chars. e.g., 'my-env-123' | |
required: true | |
serverless_mode: | |
description: "Deploy a serverless project instead of an ESS deployment" | |
type: boolean | |
required: true | |
default: false | |
elk-stack-version: | |
required: true | |
description: "Stack version: For released version use 8.x.y, for BC use version with hash 8.x.y-hash, for SNAPSHOT use 8.x.y-SNAPSHOT" | |
default: "8.13.3" | |
type: string | |
ess-region: | |
required: true | |
description: "Elastic Cloud deployment region" | |
default: "gcp-us-west2" | |
type: string | |
docker-image-override: | |
required: false | |
description: "Provide the full Docker image path to override the default image (e.g. for testing BC/SNAPSHOT)" | |
type: string | |
run-sanity-tests: | |
description: "Run sanity tests after provision" | |
default: false | |
type: boolean | |
run-ui-sanity-tests: | |
description: "Run UI sanity tests after provision" | |
default: false | |
type: boolean | |
kibana_ref: | |
description: "Kibana branch, tag, or commit SHA to check out the UI sanity tests from" | |
required: false | |
default: "main" | |
type: string | |
expiration_days: | |
description: "Number of days until environment expiration" | |
required: false | |
default: 14 | |
type: string | |
ec-api-key: | |
type: string | |
description: "**Optional** By default, the environment will be created in our Cloud Security Organization. If you want to use your own cloud account, enter your Elastic Cloud API key." | |
required: false | |
workflow_call: | |
inputs: | |
deployment_name: | |
description: Name of the deployment to create | |
type: string | |
required: true | |
serverless_mode: | |
description: "Deploy a serverless project instead of an ESS deployment" | |
type: boolean | |
required: true | |
default: false | |
elk-stack-version: | |
required: true | |
description: "Stack version: For released version use 8.x.y, for BC use version with hash 8.x.y-hash, for SNAPSHOT use 8.x.y-SNAPSHOT" | |
default: "8.13.3" | |
type: string | |
ess-region: | |
required: true | |
description: "Elastic Cloud deployment region" | |
default: "gcp-us-west2" | |
type: string | |
docker-image-override: | |
required: false | |
description: "Provide the full Docker image path to override the default image (e.g. for testing BC/SNAPSHOT)" | |
type: string | |
run-sanity-tests: | |
description: "Run sanity tests after provision" | |
default: false | |
type: boolean | |
run-ui-sanity-tests: | |
description: "Run UI sanity tests after provision" | |
default: false | |
type: boolean | |
kibana_ref: | |
description: "Kibana branch, tag, or commit SHA to check out the UI sanity tests from" | |
required: false | |
default: "main" | |
type: string | |
expiration_days: | |
description: "Number of days until environment expiration" | |
required: false | |
default: 14 | |
type: string | |
ec-api-key: | |
type: string | |
description: "**Optional** By default, the environment will be created in our Cloud Security Organization. If you want to use your own cloud account, enter your Elastic Cloud API key." | |
required: false | |
cdr-infra: | |
description: "Flag to indicate that the CDR infrastructure is being created" | |
type: boolean | |
required: false | |
default: false | |
outputs: | |
s3-bucket: | |
description: "Terraform state s3 bucket folder" | |
value: ${{ jobs.Deploy.outputs.deploy-s3-bucket }} | |
cnvm-stack-name: | |
description: "AWS CNVM integration stack name" | |
value: ${{ jobs.Deploy.outputs.aws-cnvm-stack-name }} | |
env: | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
AWS_REGION: "eu-west-1" | |
WORKING_DIR: deploy/test-environments | |
INTEGRATIONS_SETUP_DIR: tests/integrations_setup | |
AWS_DEFAULT_TAGS: "Key=division,Value=engineering Key=org,Value=security Key=team,Value=cloud-security-posture Key=project,Value=test-environments" | |
GCP_ZONE: "us-central1-a" | |
AZURE_DEFAULT_TAGS: "division=engineering org=security team=cloud-security-posture project=test-environments owner=${{ github.actor }}" | |
TF_VAR_ec_api_key: ${{ secrets.EC_API_KEY }} | |
TF_VAR_gcp_service_account_json: ${{ secrets.GCP_AGENT_CREDENTIALS }} | |
jobs: | |
Deploy: | |
runs-on: ubuntu-20.04 | |
timeout-minutes: 120 | |
defaults: | |
run: | |
working-directory: ${{ env.WORKING_DIR }} | |
env: | |
TF_VAR_ess_region: ${{ inputs.ess-region }} | |
DEPLOYMENT_NAME: ${{ inputs.deployment_name }} | |
TF_VAR_serverless_mode: ${{ inputs.serverless_mode }} | |
TEST_AGENTLESS: ${{ inputs.serverless_mode }} | |
S3_BASE_BUCKET: "s3://tf-state-bucket-test-infra" | |
S3_BUCKET_URL: "https://s3.console.aws.amazon.com/s3/buckets/tf-state-bucket-test-infra" | |
DOCKER_IMAGE_OVERRIDE: ${{ inputs.docker-image-override }} | |
CNVM_STACK_NAME: "${{ inputs.deployment_name }}-cnvm-sanity-test-stack" | |
# Add "id-token" with the intended permissions. | |
permissions: | |
contents: 'read' | |
id-token: 'write' | |
outputs: | |
deploy-s3-bucket: ${{ steps.upload-state.outputs.s3-bucket-folder }} | |
aws-cnvm-stack-name: ${{ steps.upload-state.outputs.aws-cnvm-stack }} | |
steps: | |
- name: Check out the repo | |
uses: actions/checkout@v4 | |
- name: Init Hermit | |
run: ./bin/hermit env -r >> $GITHUB_ENV | |
working-directory: ./ | |
- name: Check Deployment Name | |
run: | | |
deployment_name="${{ inputs.deployment_name }}" | |
# Check length | |
if [ ${#deployment_name} -gt 20 ]; then | |
echo "error: Deployment name is too long (max 20 characters)" | |
exit 1 | |
fi | |
# Check pattern required for cloud deployment | |
if ! [[ $deployment_name =~ ^[a-z][-a-z0-9]*$ ]]; then | |
echo "error: Deployment name doesn't match the required pattern [a-z][-a-z0-9]*" | |
exit 1 | |
fi | |
- name: Mask Sensitive Data | |
if: inputs.ec-api-key != '' | |
run: | | |
ec_api_key=$(jq -r '.inputs["ec-api-key"]' $GITHUB_EVENT_PATH) | |
echo "::add-mask::$ec_api_key" | |
echo "TF_VAR_ec_api_key=$ec_api_key" >> $GITHUB_ENV | |
- name: Process Stack Version | |
id: remove-commit-hash | |
run: | | |
# Extract the stack version | |
stack_version="${{ inputs.elk-stack-version }}" | |
echo "TF_VAR_stack_version=$stack_version" >> $GITHUB_ENV | |
echo "STACK_VERSION=$stack_version" >> $GITHUB_ENV | |
# Handle BC versions with commit hash (e.g. 8.11.0-1234567890) | |
if [[ $stack_version =~ -[a-f0-9]+ ]]; then | |
cleaned_version=$(echo $stack_version | awk -F"-" '{print $1}') | |
# Versions with commit hash are not allowed for EC regular deployments and should be modified | |
# EC module resource: | |
# ec_deployment.deployment.version is required attribute and should be in format 8.x.y | 8.x.y-SNAPSHOT | |
# Therefore, we need to modify the version in the env variable | |
echo "TF_VAR_stack_version=$cleaned_version" >> $GITHUB_ENV | |
# env variable STACK_VERSION is used in sanity tests for findings validation | |
# findings are saved with version without commit hash | |
# therefore, we need to modify the version in the env variable | |
echo "STACK_VERSION=$cleaned_version" >> $GITHUB_ENV | |
# TF_VAR_pin_version is used to override stack docker images | |
# for BC versions with commit hash | |
# This version will be used to override the docker images | |
# elasticsearch.config.docker_image | |
# kibana.config.docker_image | |
# integrations_server.config.docker_image | |
echo "TF_VAR_pin_version=$stack_version" >> $GITHUB_ENV | |
fi | |
- name: Init Enrollment Token | |
run: | | |
enrollment_token="init" | |
echo "::add-mask::$enrollment_token" | |
echo "ENROLLMENT_TOKEN=$enrollment_token" >> $GITHUB_ENV | |
- name: Init CDR Infra | |
id: init-cdr-infra | |
env: | |
CDR_INFRA: ${{ inputs.cdr-infra }} | |
run: | | |
if [[ "${CDR_INFRA:-}" == "true" ]]; then | |
echo "TF_VAR_cdr_infra=true" >> $GITHUB_ENV | |
else | |
echo "TF_VAR_cdr_infra=false" >> $GITHUB_ENV | |
fi | |
- name: Set up Python | |
uses: actions/setup-python@v5 | |
with: | |
python-version: '3.9' | |
- name: Install Poetry | |
run: | | |
curl -sSL https://install.python-poetry.org | python3 - | |
poetry --version | |
- name: Install Fleet & Tests Dependencies | |
id: fleet-and-tests-deps | |
working-directory: ./tests | |
run: | | |
poetry install | |
- name: Configure AWS credentials | |
uses: aws-actions/configure-aws-credentials@v4.0.1 | |
with: | |
aws-access-key-id: ${{ env.AWS_ACCESS_KEY_ID }} | |
aws-secret-access-key: ${{ env.AWS_SECRET_ACCESS_KEY }} | |
aws-region: ${{ env.AWS_REGION }} | |
- id: azure-auth | |
name: Azure login | |
uses: azure/login@v2 | |
with: | |
creds: ${{ secrets.AZURE_CREDENTIALS }} | |
- id: google-auth | |
name: Authenticate to Google Cloud | |
uses: google-github-actions/auth@v2 | |
with: | |
workload_identity_provider: ${{ secrets.GCP_WORKLOAD_IDENTITY_PROVIDER }} | |
service_account: ${{ secrets.GCP_SERVICE_ACCOUNT }} | |
- name: Update Vars | |
run: | | |
echo "TF_VAR_gcp_project_id=$GCP_PROJECT" >> $GITHUB_ENV | |
echo "TF_STATE_FOLDER=$(date +'%Y-%m-%d_%H-%M-%S')" >> $GITHUB_ENV | |
- name: Terraform Init | |
run: terraform init | |
- name: Terraform Validate | |
run: terraform validate | |
- name: Provision Test Environment (EC + EC2 K8s + EC2 CSPM) | |
id: apply | |
if: success() | |
run: | | |
terraform apply --auto-approve \ | |
-var="deployment_name=${{ env.DEPLOYMENT_NAME }}" \ | |
-var="region=${{ env.AWS_REGION }}" \ | |
-var="project=${{ github.actor }}" \ | |
-var="owner=${{ github.actor }}" | |
- name: Set Environment Output | |
id: env-output | |
run: ../../.ci/scripts/set_cloud_env_params.sh | |
- name: Upload tf state | |
id: upload-state | |
if: always() | |
env: | |
S3_BUCKET: "${{ env.S3_BASE_BUCKET }}/${{ env.DEPLOYMENT_NAME }}_${{ env.TF_STATE_FOLDER }}" | |
EXPIRATION_DAYS: ${{ inputs.expiration_days }} | |
run: | | |
aws s3 cp "./terraform.tfstate" "${S3_BUCKET}/terraform.tfstate" | |
aws s3 cp "${EC2_CSPM_KEY}" "${S3_BUCKET}/cspm.pem" | |
aws s3 cp "${EC2_KSPM_KEY}" "${S3_BUCKET}/kspm.pem" | |
aws s3 cp "${EC2_ASSET_INV_KEY}" "${S3_BUCKET}/asset_inv.pem" | |
echo "s3-bucket-folder=${S3_BUCKET}" >> $GITHUB_OUTPUT | |
echo "aws-cnvm-stack=${CNVM_STACK_NAME}" >> $GITHUB_OUTPUT | |
python3 ../../.ci/scripts/create_env_config.py | |
aws s3 cp "./env_config.json" "${S3_BUCKET}/env_config.json" | |
if [[ ${TF_VAR_cdr_infra:-} == "true" ]]; then | |
aws s3 cp "${CLOUDTRAIL_KEY}" "${S3_BUCKET}/cloudtrail.pem" | |
aws s3 cp "${ACTIVITY_LOGS_KEY}" "${S3_BUCKET}/az_activity_logs.pem" | |
aws s3 cp "${AUDIT_LOGS_KEY}" "${S3_BUCKET}/gcp_audit_logs.pem" | |
fi | |
- name: Summary | |
if: success() | |
run: | | |
kibana_url=$(terraform output -raw kibana_url) | |
summary="Kibana URL: $kibana_url" | |
bucket_name="${{ env.S3_BASE_BUCKET }}" | |
bucket_name="${bucket_name#s3://}" | |
s3_bucket_link="[creds and keys](https://s3.console.aws.amazon.com/s3/buckets/$bucket_name)" | |
summary=$(cat <<-EOF | |
Kibana URL: [kibana]($kibana_url) | |
Environment Details: $s3_bucket_link | |
EOF | |
) | |
echo "$summary" >> $GITHUB_STEP_SUMMARY | |
echo "$summary" # Print the summary to the workflow log | |
- name: Install AWS Cloudtrail integration | |
id: cloudtrail-integration | |
if: env.TF_VAR_cdr_infra == 'true' | |
working-directory: ${{ env.INTEGRATIONS_SETUP_DIR }} | |
env: | |
CLOUDTRAIL_S3: ${{ secrets.CLOUDTRAIL_S3 }} | |
run: | | |
poetry run python ./install_cloudtrail_integration.py | |
- name: Deploy AWS Cloudtrail agent | |
if: env.TF_VAR_cdr_infra == 'true' | |
run: | | |
scriptname="cloudtrail-linux.sh" | |
src="../../$INTEGRATIONS_SETUP_DIR/$scriptname" | |
cmd="chmod +x $scriptname && ./$scriptname" | |
../../.ci/scripts/remote_setup.sh -k "$CLOUDTRAIL_KEY" -s "$src" -h "$CLOUDTRAIL_PUBLIC_IP" -d "~/$scriptname" -c "$cmd" | |
- name: Install Azure Activity Logs integration | |
id: az-activity-logs-integration | |
if: env.TF_VAR_cdr_infra == 'true' | |
working-directory: ${{ env.INTEGRATIONS_SETUP_DIR }} | |
env: | |
EVENTHUB: "activity-logs" | |
CONNECTION_STRING: ${{ secrets.AZURE_EVENTHUB_CONNECTION_STRING }} | |
STORAGE_ACCOUNT: "testenvsactivitylogs" | |
STORAGE_ACCOUNT_KEY: ${{ secrets.AZURE_STORAGE_ACCOUNT_KEY }} | |
run: | | |
poetry run python ./install_az_activity_logs_integration.py | |
- name: Deploy Azure Activity Logs agent | |
if: env.TF_VAR_cdr_infra == 'true' | |
run: | | |
scriptname="az_activity_logs.sh" | |
src="../../$INTEGRATIONS_SETUP_DIR/$scriptname" | |
cmd="chmod +x $scriptname && ./$scriptname" | |
../../.ci/scripts/remote_setup.sh -k "$ACTIVITY_LOGS_KEY" -s "$src" -h "$ACTIVITY_LOGS_PUBLIC_IP" -d "~/$scriptname" -c "$cmd" | |
- name: Install GCP Audit Logs integration | |
id: gcp-audit-logs-integration | |
if: env.TF_VAR_cdr_infra == 'true' | |
working-directory: ${{ env.INTEGRATIONS_SETUP_DIR }} | |
env: | |
GCP_TOPIC_NAME: "test-envs-topic" | |
GCP_SUBSCRIPTION_NAME: "test-envs-topic-sub-id" | |
run: | | |
poetry run python ./install_gcp_audit_logs_integration.py | |
- name: Deploy GCP Audit Logs agent | |
if: env.TF_VAR_cdr_infra == 'true' | |
run: | | |
scriptname="gcp_audit_logs.sh" | |
src="../../$INTEGRATIONS_SETUP_DIR/$scriptname" | |
cmd="chmod +x $scriptname && ./$scriptname" | |
../../.ci/scripts/remote_setup.sh -k "$AUDIT_LOGS_KEY" -s "$src" -h "$AUDIT_LOGS_PUBLIC_IP" -d "~/$scriptname" -c "$cmd" | |
- name: Install CNVM integration | |
id: cnvm | |
working-directory: ${{ env.INTEGRATIONS_SETUP_DIR }} | |
run: | | |
poetry run python ./install_cnvm_integration.py | |
- name: Deploy CNVM agent | |
env: | |
STACK_NAME: "${{ env.CNVM_STACK_NAME}}" | |
run: | | |
unset ENROLLMENT_TOKEN | |
just deploy-cloudformation | |
- name: Install CSPM GCP integration | |
id: cspm-gcp-integration | |
working-directory: ${{ env.INTEGRATIONS_SETUP_DIR }} | |
run: | | |
poetry run python ./install_cspm_gcp_integration.py | |
- name: Deploy CSPM GCP agent | |
id: cspm-gcp-agent | |
working-directory: deploy/deployment-manager | |
env: | |
ACTOR: ${{ github.actor }} | |
run: | | |
# GCP labeling rules: | |
# Only hyphens (-), underscores (_), lowercase characters, and numbers are allowed. International characters are allowed. | |
# Convert github.actor to lowercase, replace disallowed characters | |
GCP_LABEL=$(echo "$ACTOR" | tr '[:upper:]' '[:lower:]' | sed 's/[^a-z0-9_-]/_/g') | |
GCP_DEFAULT_TAGS="division=engineering,org=security,team=cloud-security-posture,project=test-environments,owner=$GCP_LABEL" | |
. ./set_env.sh && ./deploy.sh && gcloud compute instances update "${DEPLOYMENT_NAME}" --update-labels "${GCP_DEFAULT_TAGS}" --zone="${GCP_ZONE}" | |
- name: Install CSPM Azure integration | |
id: cspm-azure-integration | |
working-directory: ${{ env.INTEGRATIONS_SETUP_DIR }} | |
run: | | |
poetry run python ./install_cspm_azure_integration.py | |
- name: Deploy CSPM Azure agent | |
id: cspm-azure-agent | |
working-directory: deploy/azure | |
env: | |
AZURE_TAGS: ${{ env.AZURE_DEFAULT_TAGS }} | |
run: ./install_agent_az_cli.sh | |
- name: Check Asset Inventory supported version | |
id: asset-inventory-version-check | |
run: | | |
MIN_VERSION="8.16.0" | |
if [[ "$(echo -e "$MIN_VERSION\n$STACK_VERSION" | sort -V | head -n 1)" == "$MIN_VERSION" ]]; then | |
echo "Stack version meets the requirement: $STACK_VERSION >= $MIN_VERSION." | |
echo "asset_inventory_supported=true" >> $GITHUB_ENV | |
else | |
echo "Stack version is below the requirement: $STACK_VERSION < $MIN_VERSION." | |
echo "asset_inventory_supported=false" >> $GITHUB_ENV | |
fi | |
- name: Install Azure Asset Inventory integration | |
id: azure-asset-inventory-integration | |
working-directory: ${{ env.INTEGRATIONS_SETUP_DIR }} | |
if: env.asset_inventory_supported == 'true' | |
run: | | |
poetry run python ./install_azure_asset_inventory_integration.py | |
- name: Deploy Azure Asset Inventory agent | |
id: azure-asset-inventory-agent | |
working-directory: deploy/azure | |
if: env.asset_inventory_supported == 'true' | |
env: | |
AZURE_TAGS: ${{ env.AZURE_DEFAULT_TAGS }} | |
DEPLOYMENT_NAME: "${{ env.DEPLOYMENT_NAME }}-inventory" | |
run: ./install_agent_az_cli.sh | |
- name: Install D4C integration | |
id: kspm-d4c | |
working-directory: ${{ env.INTEGRATIONS_SETUP_DIR }} | |
run: | | |
poetry run python ./install_d4c_integration.py | |
- name: Install KSPM EKS integration | |
id: kspm-eks | |
working-directory: ${{ env.INTEGRATIONS_SETUP_DIR }} | |
run: | | |
poetry run python ./install_kspm_eks_integration.py | |
- name: Deploy KSPM EKS agent | |
env: | |
S3_BUCKET: "${{ env.S3_BASE_BUCKET }}/${{ env.DEPLOYMENT_NAME }}_${{ env.TF_STATE_FOLDER }}" | |
run: | | |
aws eks --region ${{ env.AWS_REGION }} update-kubeconfig \ | |
--name $(terraform output -raw deployment_name) --alias eks-config | |
echo 'KUBE_CONFIG_DATA=$(cat ~/.kube/config | base64)' >> $GITHUB_ENV | |
aws s3 cp ~/.kube/config "${{ env.S3_BUCKET }}/kubeconfig" | |
kubectl config use-context eks-config | |
kubectl apply -f ../../${{ env.INTEGRATIONS_SETUP_DIR }}/kspm_d4c.yaml | |
- name: Install KSPM Unmanaged integration | |
id: kspm-unmanaged | |
working-directory: ${{ env.INTEGRATIONS_SETUP_DIR }} | |
run: | | |
poetry run python ./install_kspm_unmanaged_integration.py | |
- name: Deploy KSPM Unmanaged agent | |
run: | | |
scriptname="kspm_unmanaged.yaml" | |
src="../../$INTEGRATIONS_SETUP_DIR/$scriptname" | |
cmd="kubectl apply -f $scriptname" | |
../../.ci/scripts/remote_setup.sh -k "$EC2_KSPM_KEY" -s "$src" -h "$KSPM_PUBLIC_IP" -d "~/$scriptname" -c "$cmd" | |
- name: Install CSPM integration | |
id: cspm | |
working-directory: ${{ env.INTEGRATIONS_SETUP_DIR }} | |
run: | | |
poetry run python ./install_cspm_integration.py | |
- name: Deploy CSPM agent | |
run: | | |
scriptname="cspm-linux.sh" | |
src="../../$INTEGRATIONS_SETUP_DIR/$scriptname" | |
cmd="chmod +x $scriptname && ./$scriptname" | |
../../.ci/scripts/remote_setup.sh -k "$EC2_CSPM_KEY" -s "$src" -h "$CSPM_PUBLIC_IP" -d "~/$scriptname" -c "$cmd" | |
- name: Install AWS Asset Inventory integration | |
id: aws-asset-inventory | |
working-directory: ${{ env.INTEGRATIONS_SETUP_DIR }} | |
if: env.asset_inventory_supported == 'true' | |
run: | | |
poetry run python ./install_aws_asset_inventory_integration.py | |
- name: Deploy AWS Asset Inventory agent | |
if: env.asset_inventory_supported == 'true' | |
run: | | |
scriptname="aws-asset-inventory-linux.sh" | |
src="../../$INTEGRATIONS_SETUP_DIR/$scriptname" | |
cmd="chmod +x $scriptname && ./$scriptname" | |
../../.ci/scripts/remote_setup.sh -k "$EC2_ASSET_INV_KEY" -s "$src" -h "$ASSET_INV_PUBLIC_IP" -d "~/$scriptname" -c "$cmd" | |
- name: Upload Integrations data | |
if: always() | |
env: | |
S3_BUCKET: "${{ env.S3_BASE_BUCKET }}/${{ env.DEPLOYMENT_NAME }}_${{ env.TF_STATE_FOLDER }}" | |
ASSET_INVENTORY_SUPPORTED: "${{ env.asset_inventory_supported }}" | |
working-directory: ${{ env.INTEGRATIONS_SETUP_DIR }} | |
run: | | |
aws s3 cp "./kspm_unmanaged.yaml" "$S3_BUCKET/kspm_unmanaged.yaml" | |
aws s3 cp "./kspm_d4c.yaml" "$S3_BUCKET/kspm_d4c.yaml" | |
aws s3 cp "./kspm_eks.yaml" "$S3_BUCKET/kspm_eks.yaml" | |
aws s3 cp "./cspm-linux.sh" "$S3_BUCKET/cspm-linux.sh" | |
if [[ "${ASSET_INVENTORY_SUPPORTED}" == "true" ]]; then | |
aws s3 cp "./aws-asset-inventory-linux.sh" "$S3_BUCKET/aws-asset-inventory-linux.sh" | |
fi | |
aws s3 cp "./state_data.json" "$S3_BUCKET/state_data.json" | |
- name: Install Agentless integrations | |
id: agentless | |
if: env.TEST_AGENTLESS == 'true' | |
working-directory: ${{ env.INTEGRATIONS_SETUP_DIR }} | |
env: | |
AZURE_CREDENTIALS: ${{ secrets.AZURE_CREDENTIALS }} | |
run: | | |
poetry run python ./install_agentless_integrations.py | |
- name: Wait for agents to enroll | |
id: wait-for-agents | |
working-directory: ${{ env.INTEGRATIONS_SETUP_DIR }} | |
run: | | |
poetry run python ./agents_enrolled.py | |
- name: Run Sanity checks | |
if: ${{ success() && inputs.run-sanity-tests == true }} | |
working-directory: ./tests | |
run: | | |
poetry run pytest -m "sanity" --alluredir=./allure/results/ --clean-alluredir --maxfail=4 | |
- name: Run UI Sanity checks (Kibana) | |
uses: ./.github/actions/kibana-ftr | |
if: ${{ success() && inputs.run-ui-sanity-tests == true }} | |
with: | |
test_kibana_url: ${{ env.TEST_KIBANA_URL }} | |
test_es_url: ${{ env.TEST_ES_URL }} | |
es_version: ${{ env.STACK_VERSION }} | |
kibana_ref: ${{ inputs.kibana_ref }} | |
- name: Create Slack Payload | |
if: always() | |
id: prepare-data | |
working-directory: ./ | |
env: | |
WORKFLOW: "${{ github.workflow }}" | |
RUN_URL: "${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" | |
GITHUB_ACTOR: "${{ github.actor }}" | |
ESS_TYPE: ${{ inputs.serverless_mode }} | |
JOB_STATUS: "${{ job.status }}" | |
S3_BUCKET: "${{ env.S3_BUCKET_URL }}?region=${{ env.AWS_REGION }}&prefix=${{ env.DEPLOYMENT_NAME }}_${{ env.TF_STATE_FOLDER }}/" | |
run: | | |
python3 ./.ci/scripts/prepare_slack_data.py | |
- name: Send Slack Notification | |
uses: ./.github/actions/slack-notification | |
if: always() | |
continue-on-error: true | |
with: | |
vault-url: ${{ secrets.VAULT_ADDR }} | |
vault-role-id: ${{ secrets.CSP_VAULT_ROLE_ID }} | |
vault-secret-id: ${{ secrets.CSP_VAULT_SECRET_ID }} | |
slack-payload: ${{ steps.prepare-data.outputs.payload }} |