diff --git a/.buildkite/hooks/pre-command b/.buildkite/hooks/pre-command index 282fb5a00853..c23f5c3af645 100644 --- a/.buildkite/hooks/pre-command +++ b/.buildkite/hooks/pre-command @@ -3,6 +3,7 @@ set -euo pipefail AWS_SERVICE_ACCOUNT_SECRET_PATH="kv/ci-shared/platform-ingest/aws_account_auth" +PRIVATE_CI_GCS_CREDENTIALS_PATH="kv/ci-shared/platform-ingest/gcp-platform-ingest-ci-service-account" retry() { local retries=$1 @@ -33,7 +34,7 @@ if [[ "$BUILDKITE_PIPELINE_SLUG" == "filebeat" || "$BUILDKITE_PIPELINE_SLUG" == fi fi -if [[ "$BUILDKITE_PIPELINE_SLUG" == "beats-metricbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-libbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-packetbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-winlogbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-libbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-metricbeat" ]]; then +if [[ "$BUILDKITE_PIPELINE_SLUG" == "beats-metricbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-libbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-packetbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-winlogbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-libbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-metricbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-packetbeat" ]]; then source .buildkite/scripts/setenv.sh if [[ "${BUILDKITE_COMMAND}" =~ ^buildkite-agent ]]; then echo "Skipped pre-command when running the Upload pipeline" @@ -47,3 +48,10 @@ if [[ "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-metricbeat" && "$BUILDKITE_STEP BEATS_AWS_ACCESS_KEY=$(retry 5 vault kv get -field access_key ${AWS_SERVICE_ACCOUNT_SECRET_PATH}) export BEATS_AWS_ACCESS_KEY fi + +if [[ "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-packetbeat" ]]; then + if [[ "$BUILDKITE_STEP_KEY" == "extended-win-10-system-tests" || "$BUILDKITE_STEP_KEY" == "mandatory-win-2022-system-tests" ]]; then + PRIVATE_CI_GCS_CREDENTIALS_SECRET=$(retry 5 vault kv get -field plaintext -format=json ${PRIVATE_CI_GCS_CREDENTIALS_PATH}) + export PRIVATE_CI_GCS_CREDENTIALS_SECRET + fi +fi diff --git a/.buildkite/pull-requests.json b/.buildkite/pull-requests.json index 8018411a743a..669b70d6570f 100644 --- a/.buildkite/pull-requests.json +++ b/.buildkite/pull-requests.json @@ -223,6 +223,86 @@ "skip_target_branches": [ ], "skip_ci_on_only_changed": [ ], "always_require_ci_on_changed": ["^x-pack/metricbeat/.*", "^.buildkite/.*", "^go.mod", "^pytest.ini", "^dev-tools/.*", "^libbeat/.*", "^testing/.*", "^x-pack/libbeat/.*"] + }, + { + "enabled": true, + "pipelineSlug": "beats-xpack-auditbeat", + "allow_org_users": true, + "allowed_repo_permissions": ["admin", "write"], + "allowed_list": [ ], + "set_commit_status": true, + "build_on_commit": true, + "build_on_comment": true, + "trigger_comment_regex": "^/test x-pack/auditbeat$", + "always_trigger_comment_regex": "^/test x-pack/auditbeat$", + "skip_ci_labels": [ ], + "skip_target_branches": [ ], + "skip_ci_on_only_changed": [ ], + "always_require_ci_on_changed": ["^x-pack/auditbeat/.*", "^.buildkite/.*", "^go.mod", "^pytest.ini", "^dev-tools/.*", "^libbeat/.*", "^testing/.*", "^x-pack/libbeat/.*"] + }, + { + "enabled": true, + "pipelineSlug": "beats-xpack-dockerlogbeat", + "allow_org_users": true, + "allowed_repo_permissions": ["admin", "write"], + "allowed_list": [ ], + "set_commit_status": true, + "build_on_commit": true, + "build_on_comment": true, + "trigger_comment_regex": "^/test x-pack/dockerlogbeat$", + "always_trigger_comment_regex": "^/test x-pack/dockerlogbeat$", + "skip_ci_labels": [ ], + "skip_target_branches": [ ], + "skip_ci_on_only_changed": [ ], + "always_require_ci_on_changed": ["^x-pack/dockerlogbeat/.*", "^.buildkite/.*", "^go.mod", "^pytest.ini", "^dev-tools/.*", "^libbeat/.*", "^testing/.*", "^x-pack/libbeat/.*"] + }, + { + "enabled": true, + "pipelineSlug": "beats-xpack-filebeat", + "allow_org_users": true, + "allowed_repo_permissions": ["admin", "write"], + "allowed_list": [ ], + "set_commit_status": true, + "build_on_commit": true, + "build_on_comment": true, + "trigger_comment_regex": "^/test x-pack/filebeat$", + "always_trigger_comment_regex": "^/test x-pack/filebeat$", + "skip_ci_labels": [ ], + "skip_target_branches": [ ], + "skip_ci_on_only_changed": [ ], + "always_require_ci_on_changed": ["^x-pack/filebeat/.*", "^.buildkite/.*", "^go.mod", "^pytest.ini", "^dev-tools/.*", "^libbeat/.*", "^testing/.*", "^x-pack/libbeat/.*"] + }, + { + "enabled": true, + "pipelineSlug": "beats-xpack-heartbeat", + "allow_org_users": true, + "allowed_repo_permissions": ["admin", "write"], + "allowed_list": [ ], + "set_commit_status": true, + "build_on_commit": true, + "build_on_comment": true, + "trigger_comment_regex": "^/test x-pack/heartbeat$", + "always_trigger_comment_regex": "^/test x-pack/heartbeat$", + "skip_ci_labels": [ ], + "skip_target_branches": [ ], + "skip_ci_on_only_changed": [ ], + "always_require_ci_on_changed": ["^x-pack/heartbeat/.*", "^.buildkite/.*", "^go.mod", "^pytest.ini", "^dev-tools/.*", "^libbeat/.*", "^testing/.*", "^x-pack/libbeat/.*"] + }, + { + "enabled": true, + "pipelineSlug": "beats-xpack-osquerybeat", + "allow_org_users": true, + "allowed_repo_permissions": ["admin", "write"], + "allowed_list": [ ], + "set_commit_status": true, + "build_on_commit": true, + "build_on_comment": true, + "trigger_comment_regex": "^/test x-pack/osquerybeat$", + "always_trigger_comment_regex": "^/test x-pack/osquerybeat$", + "skip_ci_labels": [ ], + "skip_target_branches": [ ], + "skip_ci_on_only_changed": [ ], + "always_require_ci_on_changed": ["^x-pack/osquerybeat/.*", "^.buildkite/.*", "^go.mod", "^pytest.ini", "^dev-tools/.*", "^libbeat/.*", "^testing/.*", "^x-pack/libbeat/.*"] } ] } diff --git a/.buildkite/scripts/common.sh b/.buildkite/scripts/common.sh index ebb15c937dd2..d6a91a482436 100755 --- a/.buildkite/scripts/common.sh +++ b/.buildkite/scripts/common.sh @@ -22,6 +22,8 @@ XPACK_MODULE_PATTERN="^x-pack\\/[a-z0-9]+beat\\/module\\/([^\\/]+)\\/.*" [ -z "${run_xpack_libbeat_arm_tests+x}" ] && run_xpack_libbeat_arm_tests="$(buildkite-agent meta-data get run_xpack_libbeat_arm_tests --default "false")" [ -z "${run_xpack_metricbeat_aws_tests+x}" ] && run_xpack_metricbeat_aws_tests="$(buildkite-agent meta-data get run_xpack_metricbeat_aws_tests --default "false")" [ -z "${run_xpack_metricbeat_macos_tests+x}" ] && run_xpack_metricbeat_macos_tests="$(buildkite-agent meta-data get run_xpack_metricbeat_macos_tests --default "false")" +[ -z "${run_xpack_packetbeat_arm_tests+x}" ] && run_xpack_packetbeat_arm_tests="$(buildkite-agent meta-data get run_xpack_packetbeat_arm_tests --default "false")" +[ -z "${run_xpack_packetbeat_macos_tests+x}" ] && run_xpack_packetbeat_macos_tests="$(buildkite-agent meta-data get run_xpack_packetbeat_macos_tests --default "false")" metricbeat_changeset=( "^metricbeat/.*" @@ -105,6 +107,9 @@ case "${BUILDKITE_PIPELINE_SLUG}" in "beats-xpack-metricbeat") BEAT_CHANGESET_REFERENCE=${xpack_metricbeat_changeset[@]} ;; + "beats-xpack-packetbeat") + BEAT_CHANGESET_REFERENCE=${xpack_packetbeat_changeset[@]} + ;; *) echo "The changeset for the ${BUILDKITE_PIPELINE_SLUG} pipeline hasn't been defined yet." ;; diff --git a/.buildkite/scripts/generate_xpack_metricbeat_pipeline.sh b/.buildkite/scripts/generate_xpack_metricbeat_pipeline.sh index c9c65a5e7573..af116b17209d 100755 --- a/.buildkite/scripts/generate_xpack_metricbeat_pipeline.sh +++ b/.buildkite/scripts/generate_xpack_metricbeat_pipeline.sh @@ -31,8 +31,6 @@ steps: provider: "gcp" image: "${DEFAULT_UBUNTU_X86_64_IMAGE}" machineType: "${GCP_DEFAULT_MACHINE_TYPE}" - disk_size: 100 - disk_type: "pd-ssd" artifact_paths: "${BEATS_PROJECT_NAME}/build/*.xml" - label: ":python: Python Integration Tests" @@ -42,8 +40,6 @@ steps: provider: "gcp" image: "${DEFAULT_UBUNTU_X86_64_IMAGE}" machineType: "${GCP_DEFAULT_MACHINE_TYPE}" - disk_size: 100 - disk_type: "pd-ssd" artifact_paths: "${BEATS_PROJECT_NAME}/build/*.xml" - label: ":windows: Windows Unit Tests - {{matrix.image}}" @@ -106,8 +102,7 @@ else fi #TODO: replace by commented-out below condition when issues mentioned in the PR https://github.com/elastic/beats/pull/38081 are resolved -if [[ are_conditions_met_aws_tests || are_conditions_met_macos_tests ]]; then -# if [[ are_conditions_met_macos_tests ]]; then +if are_conditions_met_aws_tests || are_conditions_met_macos_tests ; then cat >> $pipelineName <<- YAML - group: "Extended Tests" @@ -140,8 +135,6 @@ if are_conditions_met_aws_tests; then provider: "gcp" image: "${DEFAULT_UBUNTU_X86_64_IMAGE}" machineType: "${GCP_DEFAULT_MACHINE_TYPE}" - disk_size: 100 - disk_type: "pd-ssd" artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" YAML @@ -166,6 +159,8 @@ if are_conditions_met_packaging; then provider: "gcp" image: "${IMAGE_UBUNTU_X86_64}" machineType: "${GCP_HI_PERF_MACHINE_TYPE}" + disk_size: 100 + disk_type: "pd-ssd" env: PLATFORMS: "${PACKAGING_PLATFORMS}" diff --git a/.buildkite/scripts/generate_xpack_packetbeat_pipeline.sh b/.buildkite/scripts/generate_xpack_packetbeat_pipeline.sh new file mode 100644 index 000000000000..4eb2a1c3e049 --- /dev/null +++ b/.buildkite/scripts/generate_xpack_packetbeat_pipeline.sh @@ -0,0 +1,195 @@ +#!/usr/bin/env bash + +source .buildkite/scripts/common.sh + +set -euo pipefail + +pipelineName="pipeline.xpack-packetbeat-dynamic.yml" + +echo "Add the mandatory and extended tests without additional conditions into the pipeline" +if are_conditions_met_mandatory_tests; then + cat > $pipelineName <<- YAML + +steps: + + - group: "Mandatory Tests" + key: "mandatory-tests" + steps: + - label: ":linux: Ubuntu Unit Tests" + key: "mandatory-linux-unit-test" + command: "cd $BEATS_PROJECT_NAME && mage build unitTest" + agents: + provider: "gcp" + image: "${IMAGE_UBUNTU_X86_64}" + machineType: "${GCP_DEFAULT_MACHINE_TYPE}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.xml" + + - label: ":linux: Ubuntu System Tests" + key: "mandatory-linux-system-test" + command: "cd $BEATS_PROJECT_NAME && mage systemTest" + agents: + provider: "gcp" + image: "${IMAGE_UBUNTU_X86_64}" + machineType: "${GCP_DEFAULT_MACHINE_TYPE}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.xml" + + - label: ":rhel: RHEL-9 Unit Tests" + key: "mandatory-rhel9-unit-test" + command: ".buildkite/scripts/unit_tests.sh" + agents: + provider: "gcp" + image: "${IMAGE_RHEL9_X86_64}" + machineType: "${GCP_DEFAULT_MACHINE_TYPE}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + + + - label: ":windows: Windows Unit Tests - {{matrix.image}}" + command: ".buildkite/scripts/win_unit_tests.ps1" + key: "mandatory-win-unit-tests" + agents: + provider: "gcp" + image: "{{matrix.image}}" + machineType: "${GCP_WIN_MACHINE_TYPE}" + disk_size: 100 + disk_type: "pd-ssd" + matrix: + setup: + image: + - "${IMAGE_WIN_2016}" + - "${IMAGE_WIN_2022}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + + ## TODO: uncomment when the issue https://github.com/elastic/beats/issues/38142 is solved + # - label: ":windows: Windows 2022 System Tests" + # key: "mandatory-win-2022-system-tests" + # command: ".buildkite/scripts/win_unit_tests.ps1 systemtest" + # agents: + # provider: "gcp" + # image: "${IMAGE_WIN_2022}" + # machineType: "${GCP_WIN_MACHINE_TYPE}" + # disk_size: 100 + # disk_type: "pd-ssd" + # artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + +## TODO: this condition will be changed in the Phase 3 of the Migration Plan https://docs.google.com/document/d/1IPNprVtcnHlem-uyGZM0zGzhfUuFAh4LeSl9JFHMSZQ/edit#heading=h.sltz78yy249h + + - group: "Extended Windows Tests" + key: "extended-win-tests" + steps: + + - label: ":windows: Windows Unit Tests - {{matrix.image}}" + command: ".buildkite/scripts/win_unit_tests.ps1" + key: "extended-win-unit-tests" + agents: + provider: "gcp" + image: "{{matrix.image}}" + machineType: "${GCP_WIN_MACHINE_TYPE}" + disk_size: 100 + disk_type: "pd-ssd" + matrix: + setup: + image: + - "${IMAGE_WIN_10}" + - "${IMAGE_WIN_11}" + - "${IMAGE_WIN_2019}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + + ## TODO: uncomment when the issue https://github.com/elastic/beats/issues/38142 is solved + # - label: ":windows: Windows 10 System Tests" + # key: "extended-win-10-system-tests" + # command: ".buildkite/scripts/win_unit_tests.ps1 systemtest" + # agents: + # provider: "gcp" + # image: "${IMAGE_WIN_10}" + # machineType: "${GCP_WIN_MACHINE_TYPE}" + # disk_size: 100 + # disk_type: "pd-ssd" + # artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + +YAML +else + echo "The conditions don't match to requirements for generating pipeline steps." + exit 0 +fi + +if are_conditions_met_arm_tests || are_conditions_met_macos_tests ; then + cat >> $pipelineName <<- YAML + + - group: "Extended Tests" + key: "extended-tests" + steps: + +YAML +fi + +if are_conditions_met_macos_tests; then + cat >> $pipelineName <<- YAML + + - label: ":mac: MacOS Unit Tests" + key: "extended-macos-unit-tests" + command: ".buildkite/scripts/unit_tests.sh" + agents: + provider: "orka" + imagePrefix: "${IMAGE_MACOS_X86_64}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + +YAML +fi + +if are_conditions_met_arm_tests; then + cat >> $pipelineName <<- YAML + - label: ":linux: ARM Ubuntu Unit Tests" + key: "extended-arm64-unit-test" + command: "cd $BEATS_PROJECT_NAME && mage build unitTest" + agents: + provider: "aws" + imagePrefix: "${IMAGE_UBUNTU_ARM_64}" + instanceType: "${AWS_ARM_INSTANCE_TYPE}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + +YAML +fi + +echo "Check and add the Packaging into the pipeline" +if are_conditions_met_packaging; then + cat >> $pipelineName <<- YAML + + - wait: ~ + depends_on: + - step: "mandatory-tests" + allow_failure: false + + - group: "Packaging" # TODO: check conditions for future the main pipeline migration: https://github.com/elastic/beats/pull/28589 + key: "packaging" + steps: + - label: ":linux: Packaging Linux" + key: "packaging-linux" + command: "cd $BEATS_PROJECT_NAME && mage package" + agents: + provider: "gcp" + image: "${IMAGE_UBUNTU_X86_64}" + machineType: "${GCP_HI_PERF_MACHINE_TYPE}" + disk_size: 100 + disk_type: "pd-ssd" + env: + PLATFORMS: "${PACKAGING_PLATFORMS}" + + - label: ":linux: Packaging ARM" + key: "packaging-arm" + command: "cd $BEATS_PROJECT_NAME && mage package" + agents: + provider: "aws" + imagePrefix: "${IMAGE_UBUNTU_ARM_64}" + instanceType: "${AWS_ARM_INSTANCE_TYPE}" + env: + PLATFORMS: "${PACKAGING_ARM_PLATFORMS}" + PACKAGES: "docker" + +YAML +fi + +echo "--- Printing dynamic steps" #TODO: remove if the pipeline is public +cat $pipelineName + +echo "--- Loading dynamic steps" +buildkite-agent pipeline upload $pipelineName diff --git a/.buildkite/scripts/setenv.sh b/.buildkite/scripts/setenv.sh index 974886061d44..29a8a05446ec 100755 --- a/.buildkite/scripts/setenv.sh +++ b/.buildkite/scripts/setenv.sh @@ -11,9 +11,9 @@ SETUP_WIN_PYTHON_VERSION="3.11.0" NMAP_WIN_VERSION="7.12" # Earlier versions of NMap provide WinPcap (the winpcap packages don't install nicely because they pop-up a UI) GO_VERSION=$(cat .go-version) ASDF_MAGE_VERSION="1.15.0" -ASDF_TERRAFORM_VERSION="1.0.2" PACKAGING_PLATFORMS="+all linux/amd64 linux/arm64 windows/amd64 darwin/amd64 darwin/arm64" PACKAGING_ARM_PLATFORMS="linux/arm64" +ASDF_TERRAFORM_VERSION="1.0.2" AWS_REGION="eu-central-1" export SETUP_GVM_VERSION diff --git a/.buildkite/scripts/win_unit_tests.ps1 b/.buildkite/scripts/win_unit_tests.ps1 index b3c5c58fac0a..b61e4107c3cb 100644 --- a/.buildkite/scripts/win_unit_tests.ps1 +++ b/.buildkite/scripts/win_unit_tests.ps1 @@ -1,3 +1,7 @@ +param( + [string]$testType = "unittest" +) + $ErrorActionPreference = "Stop" # set -e $WorkFolder = $env:BEATS_PROJECT_NAME $WORKSPACE = Get-Location @@ -120,6 +124,23 @@ function withNmap($version) { } Start-Process -FilePath $nmapDownloadPath -ArgumentList "/S" -Wait } +function google_cloud_auth { + $tempFileName = "google-cloud-credentials.json" + $secretFileLocation = Join-Path $env:TEMP $tempFileName + $null = New-Item -ItemType File -Path $secretFileLocation + Set-Content -Path $secretFileLocation -Value $env:PRIVATE_CI_GCS_CREDENTIALS_SECRET + gcloud auth activate-service-account --key-file $secretFileLocation > $null 2>&1 + $env:GOOGLE_APPLICATION_CREDENTIALS = $secretFileLocation +} + +function google_cloud_auth_cleanup { + if (Test-Path $env:GOOGLE_APPLICATION_CREDENTIALS) { + Remove-Item $env:GOOGLE_APPLICATION_CREDENTIALS -Force + Remove-Item Env:\GOOGLE_APPLICATION_CREDENTIALS + } else { + Write-Host "No GCP credentials were added" + } +} fixCRLF @@ -142,10 +163,23 @@ $env:MAGEFILE_CACHE = $magefile New-Item -ItemType Directory -Force -Path "build" -if ($env:BUILDKITE_PIPELINE_SLUG -eq "beats-xpack-libbeat") { - mage -w reader/etw build goUnitTest -} else { - mage build unitTest +if ($testType -eq "unittest") { + if ($env:BUILDKITE_PIPELINE_SLUG -eq "beats-xpack-libbeat") { + mage -w reader/etw build goUnitTest + } else { + mage build unitTest + } +} +elseif ($testType -eq "systemtest") { + try { + google_cloud_auth + mage systemTest + } finally { + google_cloud_auth_cleanup + } +} +else { + Write-Host "Unknown test type. Please specify 'unittest' or 'systemtest'." } $EXITCODE=$LASTEXITCODE diff --git a/.buildkite/x-pack/pipeline.xpack.auditbeat.yml b/.buildkite/x-pack/pipeline.xpack.auditbeat.yml new file mode 100644 index 000000000000..2343eb6a4ddd --- /dev/null +++ b/.buildkite/x-pack/pipeline.xpack.auditbeat.yml @@ -0,0 +1,6 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json +name: "beats-xpack-auditbeat" + +steps: + - label: "Example test" + command: echo "Hello!" diff --git a/.buildkite/x-pack/pipeline.xpack.dockerlogbeat.yml b/.buildkite/x-pack/pipeline.xpack.dockerlogbeat.yml new file mode 100644 index 000000000000..c4a0805615b1 --- /dev/null +++ b/.buildkite/x-pack/pipeline.xpack.dockerlogbeat.yml @@ -0,0 +1,6 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json +name: "beats-xpack-dockerlogbeat" + +steps: + - label: "Example test" + command: echo "Hello!" diff --git a/.buildkite/x-pack/pipeline.xpack.filebeat.yml b/.buildkite/x-pack/pipeline.xpack.filebeat.yml new file mode 100644 index 000000000000..6d3d7d9daeee --- /dev/null +++ b/.buildkite/x-pack/pipeline.xpack.filebeat.yml @@ -0,0 +1,6 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json +name: "beats-xpack-filebeat" + +steps: + - label: "Example test" + command: echo "Hello!" diff --git a/.buildkite/x-pack/pipeline.xpack.heartbeat.yml b/.buildkite/x-pack/pipeline.xpack.heartbeat.yml new file mode 100644 index 000000000000..65175d3b029a --- /dev/null +++ b/.buildkite/x-pack/pipeline.xpack.heartbeat.yml @@ -0,0 +1,6 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json +name: "beats-xpack-heartbeat" + +steps: + - label: "Example test" + command: echo "Hello!" diff --git a/.buildkite/x-pack/pipeline.xpack.osquerybeat.yml b/.buildkite/x-pack/pipeline.xpack.osquerybeat.yml new file mode 100644 index 000000000000..22297e33ab59 --- /dev/null +++ b/.buildkite/x-pack/pipeline.xpack.osquerybeat.yml @@ -0,0 +1,6 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json +name: "beats-xpack-osquerybeat" + +steps: + - label: "Example test" + command: echo "Hello!" diff --git a/.buildkite/x-pack/pipeline.xpack.packetbeat.yml b/.buildkite/x-pack/pipeline.xpack.packetbeat.yml index 34321b61161b..750b59e716d8 100644 --- a/.buildkite/x-pack/pipeline.xpack.packetbeat.yml +++ b/.buildkite/x-pack/pipeline.xpack.packetbeat.yml @@ -1,5 +1,61 @@ # yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json +name: "beats-xpack-packetbeat" + +env: + IMAGE_UBUNTU_X86_64: "family/platform-ingest-beats-ubuntu-2204" + IMAGE_UBUNTU_ARM_64: "platform-ingest-beats-ubuntu-2204-aarch64" + IMAGE_RHEL9_X86_64: "family/platform-ingest-beats-rhel-9" + IMAGE_WIN_10: "family/general-windows-10" + IMAGE_WIN_11: "family/general-windows-11" + IMAGE_WIN_2016: "family/core-windows-2016" + IMAGE_WIN_2019: "family/core-windows-2019" + IMAGE_WIN_2022: "family/core-windows-2022" + IMAGE_MACOS_X86_64: "generic-13-ventura-x64" + GCP_DEFAULT_MACHINE_TYPE: "c2d-highcpu-8" + GCP_HI_PERF_MACHINE_TYPE: "c2d-highcpu-16" + GCP_WIN_MACHINE_TYPE: "n2-standard-8" + AWS_ARM_INSTANCE_TYPE: "t4g.xlarge" + BEATS_PROJECT_NAME: "x-pack/packetbeat" steps: - - label: "Example test" - command: echo "Hello!" + + - input: "Input Parameters" + key: "force-run-stages" + fields: + - select: "Packetbeat - run_xpack_packetbeat" + key: "run_xpack_packetbeat" + options: + - label: "True" + value: "true" + - label: "False" + value: "false" + default: "false" + - select: "Packetbeat - run_xpack_packetbeat_macos_tests" + key: "run_xpack_packetbeat_macos_tests" + options: + - label: "True" + value: "true" + - label: "False" + value: "false" + default: "false" + - select: "Packetbeat - run_xpack_packetbeat_arm_tests" + key: "run_xpack_packetbeat_arm_tests" + options: + - label: "True" + value: "true" + - label: "False" + value: "false" + default: "false" + + if: "build.source == 'ui'" + + - wait: ~ + if: "build.source == 'ui'" + allow_dependency_failure: false + + - label: ":linux: Load dynamic x-pack packetbeat pipeline" + key: "packetbeat-pipeline" + command: ".buildkite/scripts/generate_xpack_packetbeat_pipeline.sh" + notify: + - github_commit_status: + context: "${BEATS_PROJECT_NAME}: Load dynamic pipeline's steps" diff --git a/CHANGELOG.next.asciidoc b/CHANGELOG.next.asciidoc index 2e30fe66ec95..e789685680ad 100644 --- a/CHANGELOG.next.asciidoc +++ b/CHANGELOG.next.asciidoc @@ -107,6 +107,7 @@ fields added to events containing the Beats version. {pull}37553[37553] - Fix panics when parsing dereferencing invalid parsed url. {pull}34702[34702] - Fix setuid root when running under cgroups v2. {pull}37794[37794] - Adjust State loader to only retry when response code status is 5xx {pull}37981[37981] +- Reset prctl dumpable flag after cap drop. {pull}38269[38269] *Metricbeat* @@ -202,6 +203,8 @@ Setting environmental variable ELASTIC_NETINFO:false in Elastic Agent pod will d - Add parseDateInTZ value template for the HTTPJSON input {pull}37738[37738] - Support VPC endpoint for aws-s3 input SQS queue url. {pull}38189[38189] - Improve rate limit handling by HTTPJSON {issue}36207[36207] {pull}38161[38161] {pull}38237[38237] +- Add parseDateInTZ value template for the HTTPJSON input. {pull}37738[37738] +- Add support for complex event objects in the HTTP Endpoint input. {issue}37910[37910] {pull}38193[38193] *Auditbeat* @@ -228,6 +231,7 @@ Setting environmental variable ELASTIC_NETINFO:false in Elastic Agent pod will d - Add a `/inputs/` route to the HTTP monitoring endpoint that exposes metrics for each metricset instance. {pull}36971[36971] - Add linux IO metrics to system/process {pull}37213[37213] - Add new memory/cgroup metrics to Kibana module {pull}37232[37232] +- Add SSL support to mysql module {pull}37997[37997] *Metricbeat* diff --git a/catalog-info.yaml b/catalog-info.yaml index f52f80df3c71..7f99ab9ce83a 100644 --- a/catalog-info.yaml +++ b/catalog-info.yaml @@ -109,7 +109,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -155,7 +155,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -201,7 +201,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -247,7 +247,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -293,7 +293,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -339,7 +339,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -385,7 +385,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -430,7 +430,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -476,7 +476,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -497,9 +497,9 @@ spec: name: beats-xpack-winlogbeat description: "Beats x-pack winlogbeat pipeline" spec: - # branch_configuration: "main 7.17 8.*" #TODO: uncomment after tests + branch_configuration: "main 7.17 8.*" pipeline_file: ".buildkite/x-pack/pipeline.xpack.winlogbeat.yml" - # maximum_timeout_in_minutes: 120 #TODO: uncomment after tests + maximum_timeout_in_minutes: 120 provider_settings: trigger_mode: none # don't trigger jobs from github activity build_pull_request_forks: false @@ -513,8 +513,8 @@ spec: cancel_intermediate_builds_branch_filter: "!main !7.17 !8.*" skip_intermediate_builds: true skip_intermediate_builds_branch_filter: "!main !7.17 !8.*" - # env: - # ELASTIC_PR_COMMENTS_ENABLED: "true" #TODO: uncomment after tests + env: + ELASTIC_PR_COMMENTS_ENABLED: "true" teams: ingest-fp: access_level: MANAGE_BUILD_AND_READ @@ -522,7 +522,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -543,9 +543,9 @@ spec: name: beats-xpack-packetbeat description: "Beats x-pack packetbeat pipeline" spec: - # branch_configuration: "main 7.17 8.*" #TODO: uncomment after tests + branch_configuration: "main 7.17 8.*" pipeline_file: ".buildkite/x-pack/pipeline.xpack.packetbeat.yml" - # maximum_timeout_in_minutes: 120 #TODO: uncomment after tests + maximum_timeout_in_minutes: 120 provider_settings: trigger_mode: none # don't trigger jobs from github activity build_pull_request_forks: false @@ -559,8 +559,8 @@ spec: cancel_intermediate_builds_branch_filter: "!main !7.17 !8.*" skip_intermediate_builds: true skip_intermediate_builds_branch_filter: "!main !7.17 !8.*" - # env: - # ELASTIC_PR_COMMENTS_ENABLED: "true" #TODO: uncomment after tests + env: + ELASTIC_PR_COMMENTS_ENABLED: "true" teams: ingest-fp: access_level: MANAGE_BUILD_AND_READ @@ -568,7 +568,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -589,9 +589,9 @@ spec: name: beats-xpack-libbeat description: "Beats x-pack libbeat pipeline" spec: - # branch_configuration: "main 7.17 8.*" #TODO: uncomment after tests + branch_configuration: "main 7.17 8.*" pipeline_file: ".buildkite/x-pack/pipeline.xpack.libbeat.yml" - # maximum_timeout_in_minutes: 120 #TODO: uncomment after tests + maximum_timeout_in_minutes: 120 provider_settings: trigger_mode: none # don't trigger jobs from github activity build_pull_request_forks: false @@ -605,8 +605,8 @@ spec: cancel_intermediate_builds_branch_filter: "!main !7.17 !8.*" skip_intermediate_builds: true skip_intermediate_builds_branch_filter: "!main !7.17 !8.*" - # env: - # ELASTIC_PR_COMMENTS_ENABLED: "true" #TODO: uncomment after tests + env: + ELASTIC_PR_COMMENTS_ENABLED: "true" teams: ingest-fp: access_level: MANAGE_BUILD_AND_READ @@ -614,7 +614,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -635,9 +635,9 @@ spec: name: beats-xpack-metricbeat description: "Beats x-pack metricbeat pipeline" spec: - # branch_configuration: "7.17" #TODO: uncomment after tests + branch_configuration: "main 7.17 8.*" pipeline_file: ".buildkite/x-pack/pipeline.xpack.metricbeat.yml" - maximum_timeout_in_minutes: 480 + maximum_timeout_in_minutes: 120 provider_settings: trigger_mode: none # don't trigger jobs from github activity build_pull_request_forks: false @@ -648,11 +648,11 @@ spec: build.pull_request.id == null || (build.creator.name == 'elasticmachine' && build.pull_request.id != null) repository: elastic/beats cancel_intermediate_builds: true - cancel_intermediate_builds_branch_filter: "!7.17" + cancel_intermediate_builds_branch_filter: "!main !7.17 !8.*" skip_intermediate_builds: true - skip_intermediate_builds_branch_filter: "!7.17" - # env: - # ELASTIC_PR_COMMENTS_ENABLED: "true" #TODO: uncomment after tests + skip_intermediate_builds_branch_filter: "!main !7.17 !8.*" + env: + ELASTIC_PR_COMMENTS_ENABLED: "true" teams: ingest-fp: access_level: MANAGE_BUILD_AND_READ @@ -660,7 +660,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -694,7 +694,7 @@ spec: everyone: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -728,3 +728,233 @@ spec: access_level: MANAGE_BUILD_AND_READ everyone: access_level: BUILD_AND_READ + +--- +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json +apiVersion: backstage.io/v1alpha1 +kind: Resource +metadata: + name: buildkite-pipeline-beats-xpack-auditbeat + description: "Beats x-pack auditbeat pipeline" + links: + - title: Pipeline + url: https://buildkite.com/elastic/beats-xpack-auditbeat + +spec: + type: buildkite-pipeline + owner: group:ingest-fp + system: buildkite + implementation: + apiVersion: buildkite.elastic.dev/v1 + kind: Pipeline + metadata: + name: beats-xpack-auditbeat + description: "Beats x-pack auditbeat pipeline" + spec: + # branch_configuration: "main 7.17 8.*" #TODO: uncomment after tests + pipeline_file: ".buildkite/x-pack/pipeline.xpack.auditbeat.yml" + maximum_timeout_in_minutes: 120 + provider_settings: + trigger_mode: none # don't trigger jobs from github activity + build_pull_request_forks: false + build_pull_requests: true # requires filter_enabled and filter_condition settings as below when used with buildkite-pr-bot + build_tags: true + filter_enabled: true + filter_condition: >- + build.pull_request.id == null || (build.creator.name == 'elasticmachine' && build.pull_request.id != null) + repository: elastic/beats + cancel_intermediate_builds: true + cancel_intermediate_builds_branch_filter: "!main !7.17 !8.*" + skip_intermediate_builds: true + skip_intermediate_builds_branch_filter: "!main !7.17 !8.*" + # env: + # ELASTIC_PR_COMMENTS_ENABLED: "true" #TODO: uncomment after tests + teams: + ingest-fp: + access_level: MANAGE_BUILD_AND_READ + everyone: + access_level: READ_ONLY + +--- +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json +apiVersion: backstage.io/v1alpha1 +kind: Resource +metadata: + name: buildkite-pipeline-beats-xpack-dockerlogbeat + description: "Beats x-pack dockerlogbeat pipeline" + links: + - title: Pipeline + url: https://buildkite.com/elastic/beats-xpack-dockerlogbeat + +spec: + type: buildkite-pipeline + owner: group:ingest-fp + system: buildkite + implementation: + apiVersion: buildkite.elastic.dev/v1 + kind: Pipeline + metadata: + name: beats-xpack-dockerlogbeat + description: "Beats x-pack dockerlogbeat pipeline" + spec: + # branch_configuration: "main 7.17 8.*" #TODO: uncomment after tests + pipeline_file: ".buildkite/x-pack/pipeline.xpack.dockerlogbeat.yml" + maximum_timeout_in_minutes: 120 + provider_settings: + trigger_mode: none # don't trigger jobs from github activity + build_pull_request_forks: false + build_pull_requests: true # requires filter_enabled and filter_condition settings as below when used with buildkite-pr-bot + build_tags: true + filter_enabled: true + filter_condition: >- + build.pull_request.id == null || (build.creator.name == 'elasticmachine' && build.pull_request.id != null) + repository: elastic/beats + cancel_intermediate_builds: true + cancel_intermediate_builds_branch_filter: "!main !7.17 !8.*" + skip_intermediate_builds: true + skip_intermediate_builds_branch_filter: "!main !7.17 !8.*" + # env: + # ELASTIC_PR_COMMENTS_ENABLED: "true" #TODO: uncomment after tests + teams: + ingest-fp: + access_level: MANAGE_BUILD_AND_READ + everyone: + access_level: READ_ONLY + +--- +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json +apiVersion: backstage.io/v1alpha1 +kind: Resource +metadata: + name: buildkite-pipeline-beats-xpack-filebeat + description: "Beats x-pack filebeat pipeline" + links: + - title: Pipeline + url: https://buildkite.com/elastic/beats-xpack-filebeat + +spec: + type: buildkite-pipeline + owner: group:ingest-fp + system: buildkite + implementation: + apiVersion: buildkite.elastic.dev/v1 + kind: Pipeline + metadata: + name: beats-xpack-filebeat + description: "Beats x-pack filebeat pipeline" + spec: + # branch_configuration: "main 7.17 8.*" #TODO: uncomment after tests + pipeline_file: ".buildkite/x-pack/pipeline.xpack.filebeat.yml" + maximum_timeout_in_minutes: 120 + provider_settings: + trigger_mode: none # don't trigger jobs from github activity + build_pull_request_forks: false + build_pull_requests: true # requires filter_enabled and filter_condition settings as below when used with buildkite-pr-bot + build_tags: true + filter_enabled: true + filter_condition: >- + build.pull_request.id == null || (build.creator.name == 'elasticmachine' && build.pull_request.id != null) + repository: elastic/beats + cancel_intermediate_builds: true + cancel_intermediate_builds_branch_filter: "!main !7.17 !8.*" + skip_intermediate_builds: true + skip_intermediate_builds_branch_filter: "!main !7.17 !8.*" + # env: + # ELASTIC_PR_COMMENTS_ENABLED: "true" #TODO: uncomment after tests + teams: + ingest-fp: + access_level: MANAGE_BUILD_AND_READ + everyone: + access_level: READ_ONLY + +--- +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json +apiVersion: backstage.io/v1alpha1 +kind: Resource +metadata: + name: buildkite-pipeline-beats-xpack-heartbeat + description: "Beats x-pack heartbeat pipeline" + links: + - title: Pipeline + url: https://buildkite.com/elastic/beats-xpack-heartbeat + +spec: + type: buildkite-pipeline + owner: group:ingest-fp + system: buildkite + implementation: + apiVersion: buildkite.elastic.dev/v1 + kind: Pipeline + metadata: + name: beats-xpack-heartbeat + description: "Beats x-pack heartbeat pipeline" + spec: + # branch_configuration: "main 7.17 8.*" #TODO: uncomment after tests + pipeline_file: ".buildkite/x-pack/pipeline.xpack.heartbeat.yml" + maximum_timeout_in_minutes: 120 + provider_settings: + trigger_mode: none # don't trigger jobs from github activity + build_pull_request_forks: false + build_pull_requests: true # requires filter_enabled and filter_condition settings as below when used with buildkite-pr-bot + build_tags: true + filter_enabled: true + filter_condition: >- + build.pull_request.id == null || (build.creator.name == 'elasticmachine' && build.pull_request.id != null) + repository: elastic/beats + cancel_intermediate_builds: true + cancel_intermediate_builds_branch_filter: "!main !7.17 !8.*" + skip_intermediate_builds: true + skip_intermediate_builds_branch_filter: "!main !7.17 !8.*" + # env: + # ELASTIC_PR_COMMENTS_ENABLED: "true" #TODO: uncomment after tests + teams: + ingest-fp: + access_level: MANAGE_BUILD_AND_READ + everyone: + access_level: READ_ONLY + +--- +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json +apiVersion: backstage.io/v1alpha1 +kind: Resource +metadata: + name: buildkite-pipeline-beats-xpack-osquerybeat + description: "Beats x-pack osquerybeat pipeline" + links: + - title: Pipeline + url: https://buildkite.com/elastic/beats-xpack-osquerybeat + +spec: + type: buildkite-pipeline + owner: group:ingest-fp + system: buildkite + implementation: + apiVersion: buildkite.elastic.dev/v1 + kind: Pipeline + metadata: + name: beats-xpack-osquerybeat + description: "Beats x-pack osquerybeat pipeline" + spec: + # branch_configuration: "main 7.17 8.*" #TODO: uncomment after tests + pipeline_file: ".buildkite/x-pack/pipeline.xpack.osquerybeat.yml" + maximum_timeout_in_minutes: 120 + provider_settings: + trigger_mode: none # don't trigger jobs from github activity + build_pull_request_forks: false + build_pull_requests: true # requires filter_enabled and filter_condition settings as below when used with buildkite-pr-bot + build_tags: true + filter_enabled: true + filter_condition: >- + build.pull_request.id == null || (build.creator.name == 'elasticmachine' && build.pull_request.id != null) + repository: elastic/beats + cancel_intermediate_builds: true + cancel_intermediate_builds_branch_filter: "!main !7.17 !8.*" + skip_intermediate_builds: true + skip_intermediate_builds_branch_filter: "!main !7.17 !8.*" + # env: + # ELASTIC_PR_COMMENTS_ENABLED: "true" #TODO: uncomment after tests + teams: + ingest-fp: + access_level: MANAGE_BUILD_AND_READ + everyone: + access_level: READ_ONLY diff --git a/heartbeat/security/security.go b/heartbeat/security/security.go index 8e15102f7b8d..597e3a5bda94 100644 --- a/heartbeat/security/security.go +++ b/heartbeat/security/security.go @@ -26,6 +26,7 @@ import ( "strconv" "syscall" + "golang.org/x/sys/unix" "kernel.org/pub/linux/libs/security/libcap/cap" ) @@ -46,6 +47,9 @@ func init() { // The beat should use `getcap` at a later point to examine available capabilities // rather than relying on errors from `setcap` _ = setCapabilities() + + // Make heartbeat dumpable so elastic-agent can access process metrics. + _ = setDumpable() } func setNodeProcAttr(localUserName string) error { @@ -99,3 +103,13 @@ func setCapabilities() error { return nil } + +// Enforce PR_SET_DUMPABLE=true to allow user-level access to /proc//io. +func setDumpable() error { + _, err := cap.Prctl(unix.PR_SET_DUMPABLE, 1) + if err != nil { + return fmt.Errorf("error setting dumpable flag via prctl: %w", err) + } + + return nil +} diff --git a/libbeat/outputs/fileout/config.go b/libbeat/outputs/fileout/config.go index e72a9f87d6fc..69af40e4289b 100644 --- a/libbeat/outputs/fileout/config.go +++ b/libbeat/outputs/fileout/config.go @@ -26,14 +26,14 @@ import ( ) type fileOutConfig struct { - Path string `config:"path"` - Filename string `config:"filename"` - RotateEveryKb uint `config:"rotate_every_kb" validate:"min=1"` - NumberOfFiles uint `config:"number_of_files"` - Codec codec.Config `config:"codec"` - Permissions uint32 `config:"permissions"` - RotateOnStartup bool `config:"rotate_on_startup"` - Queue config.Namespace `config:"queue"` + Path *PathFormatString `config:"path"` + Filename string `config:"filename"` + RotateEveryKb uint `config:"rotate_every_kb" validate:"min=1"` + NumberOfFiles uint `config:"number_of_files"` + Codec codec.Config `config:"codec"` + Permissions uint32 `config:"permissions"` + RotateOnStartup bool `config:"rotate_on_startup"` + Queue config.Namespace `config:"queue"` } func defaultConfig() fileOutConfig { @@ -45,6 +45,18 @@ func defaultConfig() fileOutConfig { } } +func readConfig(cfg *config.C) (*fileOutConfig, error) { + foConfig := defaultConfig() + if err := cfg.Unpack(&foConfig); err != nil { + return nil, err + } + + // disable bulk support in publisher pipeline + _ = cfg.SetInt("bulk_max_size", -1, -1) + + return &foConfig, nil +} + func (c *fileOutConfig) Validate() error { if c.NumberOfFiles < 2 || c.NumberOfFiles > file.MaxBackupsLimit { return fmt.Errorf("the number_of_files to keep should be between 2 and %v", diff --git a/libbeat/outputs/fileout/config_test.go b/libbeat/outputs/fileout/config_test.go new file mode 100644 index 000000000000..7e149173f6df --- /dev/null +++ b/libbeat/outputs/fileout/config_test.go @@ -0,0 +1,100 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package fileout + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" + + "github.com/elastic/elastic-agent-libs/config" + "github.com/elastic/elastic-agent-libs/mapstr" +) + +func TestConfig(t *testing.T) { + for name, test := range map[string]struct { + config *config.C + useWindowsPath bool + assertion func(t *testing.T, config *fileOutConfig, err error) + }{ + "default config": { + config: config.MustNewConfigFrom([]byte(`{ }`)), + assertion: func(t *testing.T, actual *fileOutConfig, err error) { + expectedConfig := &fileOutConfig{ + NumberOfFiles: 7, + RotateEveryKb: 10 * 1024, + Permissions: 0600, + RotateOnStartup: true, + } + + assert.Equal(t, expectedConfig, actual) + assert.Nil(t, err) + }, + }, + "config given with posix path": { + config: config.MustNewConfigFrom(mapstr.M{ + "number_of_files": 10, + "rotate_every_kb": 5 * 1024, + "path": "/tmp/packetbeat/%{+yyyy-MM-dd-mm-ss-SSSSSS}", + "filename": "pb", + }), + assertion: func(t *testing.T, actual *fileOutConfig, err error) { + assert.Equal(t, uint(10), actual.NumberOfFiles) + assert.Equal(t, uint(5*1024), actual.RotateEveryKb) + assert.Equal(t, true, actual.RotateOnStartup) + assert.Equal(t, uint32(0600), actual.Permissions) + assert.Equal(t, "pb", actual.Filename) + + path, runErr := actual.Path.Run(time.Date(2024, 1, 2, 3, 4, 5, 67890, time.UTC)) + assert.Nil(t, runErr) + + assert.Equal(t, "/tmp/packetbeat/2024-01-02-04-05-000067", path) + assert.Nil(t, err) + }, + }, + "config given with windows path": { + useWindowsPath: true, + config: config.MustNewConfigFrom(mapstr.M{ + "number_of_files": 10, + "rotate_every_kb": 5 * 1024, + "path": "c:\\tmp\\packetbeat\\%{+yyyy-MM-dd-mm-ss-SSSSSS}", + "filename": "pb", + }), + assertion: func(t *testing.T, actual *fileOutConfig, err error) { + assert.Equal(t, uint(10), actual.NumberOfFiles) + assert.Equal(t, uint(5*1024), actual.RotateEveryKb) + assert.Equal(t, true, actual.RotateOnStartup) + assert.Equal(t, uint32(0600), actual.Permissions) + assert.Equal(t, "pb", actual.Filename) + + path, runErr := actual.Path.Run(time.Date(2024, 1, 2, 3, 4, 5, 67890, time.UTC)) + assert.Nil(t, runErr) + + assert.Equal(t, "c:\\tmp\\packetbeat\\2024-01-02-04-05-000067", path) + assert.Nil(t, err) + }, + }, + } { + t.Run(name, func(t *testing.T) { + isWindowsPath = test.useWindowsPath + cfg, err := readConfig(test.config) + test.assertion(t, cfg, err) + }) + } +} diff --git a/libbeat/outputs/fileout/docs/fileout.asciidoc b/libbeat/outputs/fileout/docs/fileout.asciidoc index 54dfdd0772aa..bb2a953ec75e 100644 --- a/libbeat/outputs/fileout/docs/fileout.asciidoc +++ b/libbeat/outputs/fileout/docs/fileout.asciidoc @@ -49,6 +49,14 @@ The default value is `true`. The path to the directory where the generated files will be saved. This option is mandatory. +The path may include the timestamp when the file output is initialized using the `+FORMAT` syntax where `FORMAT` is a +valid https://github.com/elastic/beats/blob/{doc-branch}/libbeat/common/dtfmt/doc.go[time format], +and enclosed with expansion braces: `%{+FORMAT}`. For example: + +``` +path: 'fileoutput-%{+yyyy.MM.dd}' +``` + ===== `filename` The name of the generated files. The default is set to the Beat name. For example, the files diff --git a/libbeat/outputs/fileout/file.go b/libbeat/outputs/fileout/file.go index 4ddc5955d6ef..34c57f29791f 100644 --- a/libbeat/outputs/fileout/file.go +++ b/libbeat/outputs/fileout/file.go @@ -52,20 +52,17 @@ func makeFileout( observer outputs.Observer, cfg *c.C, ) (outputs.Group, error) { - foConfig := defaultConfig() - if err := cfg.Unpack(&foConfig); err != nil { + foConfig, err := readConfig(cfg) + if err != nil { return outputs.Fail(err) } - // disable bulk support in publisher pipeline - _ = cfg.SetInt("bulk_max_size", -1, -1) - fo := &fileOutput{ log: logp.NewLogger("file"), beat: beat, observer: observer, } - if err := fo.init(beat, foConfig); err != nil { + if err = fo.init(beat, *foConfig); err != nil { return outputs.Fail(err) } @@ -74,10 +71,14 @@ func makeFileout( func (out *fileOutput) init(beat beat.Info, c fileOutConfig) error { var path string + configPath, runErr := c.Path.Run(time.Now().UTC()) + if runErr != nil { + return runErr + } if c.Filename != "" { - path = filepath.Join(c.Path, c.Filename) + path = filepath.Join(configPath, c.Filename) } else { - path = filepath.Join(c.Path, out.beat.Beat) + path = filepath.Join(configPath, out.beat.Beat) } out.filePath = path diff --git a/libbeat/outputs/fileout/pathformatstring.go b/libbeat/outputs/fileout/pathformatstring.go new file mode 100644 index 000000000000..acd2a7605fe6 --- /dev/null +++ b/libbeat/outputs/fileout/pathformatstring.go @@ -0,0 +1,66 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package fileout + +import ( + "os" + "strings" + "time" + + "github.com/elastic/beats/v7/libbeat/common/fmtstr" + + "github.com/elastic/beats/v7/libbeat/beat" +) + +var isWindowsPath = os.PathSeparator == '\\' + +// PathFormatString is a wrapper around EventFormatString for the +// handling paths with a format expression that has access to the timestamp format. +// It has special handling for paths, specifically for windows path separator +// which would be interpreted as an escape character. This formatter double escapes +// the path separator so it is properly interpreted by the fmtstr processor +type PathFormatString struct { + efs *fmtstr.EventFormatString +} + +// Run executes the format string returning a new expanded string or an error +// if execution or event field expansion fails. +func (fs *PathFormatString) Run(timestamp time.Time) (string, error) { + placeholderEvent := &beat.Event{ + Timestamp: timestamp, + } + return fs.efs.Run(placeholderEvent) +} + +// Unpack tries to initialize the PathFormatString from provided value +// (which must be a string). Unpack method satisfies go-ucfg.Unpacker interface +// required by config.C, in order to use PathFormatString with +// `common.(*Config).Unpack()`. +func (fs *PathFormatString) Unpack(v interface{}) error { + path, ok := v.(string) + if !ok { + return nil + } + + if isWindowsPath { + path = strings.ReplaceAll(path, "\\", "\\\\") + } + + fs.efs = &fmtstr.EventFormatString{} + return fs.efs.Unpack(path) +} diff --git a/libbeat/outputs/fileout/pathformatstring_test.go b/libbeat/outputs/fileout/pathformatstring_test.go new file mode 100644 index 000000000000..b8eee4e44eaa --- /dev/null +++ b/libbeat/outputs/fileout/pathformatstring_test.go @@ -0,0 +1,87 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package fileout + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestPathFormatString(t *testing.T) { + tests := []struct { + title string + useWindowsPath bool + format string + timestamp time.Time + expected string + }{ + { + "empty string", + false, + "", + time.Time{}, + "", + }, + { + "no fields configured", + false, + "format string", + time.Time{}, + "format string", + }, + { + "test timestamp formatter", + false, + "timestamp: %{+YYYY.MM.dd}", + time.Date(2015, 5, 1, 20, 12, 34, 0, time.UTC), + "timestamp: 2015.05.01", + }, + { + "test timestamp formatter with posix path", + false, + "/tmp/%{+YYYY.MM.dd}", + time.Date(2015, 5, 1, 20, 12, 34, 0, time.UTC), + "/tmp/2015.05.01", + }, + { + "test timestamp formatter with windows path", + true, + "C:\\tmp\\%{+YYYY.MM.dd}", + time.Date(2015, 5, 1, 20, 12, 34, 0, time.UTC), + "C:\\tmp\\2015.05.01", + }, + } + + for i, test := range tests { + t.Logf("test(%v): %v", i, test.title) + isWindowsPath = test.useWindowsPath + pfs := &PathFormatString{} + err := pfs.Unpack(test.format) + if err != nil { + t.Error(err) + continue + } + + actual, err := pfs.Run(test.timestamp) + + assert.NoError(t, err) + assert.Equal(t, test.expected, actual) + } +} diff --git a/metricbeat/docs/modules/mysql.asciidoc b/metricbeat/docs/modules/mysql.asciidoc index 0c5a793a29ae..8711359bf5f6 100644 --- a/metricbeat/docs/modules/mysql.asciidoc +++ b/metricbeat/docs/modules/mysql.asciidoc @@ -89,6 +89,18 @@ metricbeat.modules: # By setting raw to true, all raw fields from the status metricset will be added to the event. #raw: false + + # Optional SSL/TLS. By default is false. + #ssl.enabled: true + + # List of root certificates for SSL/TLS server verification + #ssl.certificate_authorities: ["/etc/pki/root/ca.crt"] + + # Certificate for SSL/TLS client authentication + #ssl.certificate: "/etc/pki/client/cert.crt" + + # Client certificate key file + #ssl.key: "/etc/pki/client/cert.key" ---- [float] diff --git a/metricbeat/metricbeat.reference.yml b/metricbeat/metricbeat.reference.yml index 6659ca292769..3f9ccb0a9dbc 100644 --- a/metricbeat/metricbeat.reference.yml +++ b/metricbeat/metricbeat.reference.yml @@ -764,6 +764,18 @@ metricbeat.modules: # By setting raw to true, all raw fields from the status metricset will be added to the event. #raw: false + # Optional SSL/TLS. By default is false. + #ssl.enabled: true + + # List of root certificates for SSL/TLS server verification + #ssl.certificate_authorities: ["/etc/pki/root/ca.crt"] + + # Certificate for SSL/TLS client authentication + #ssl.certificate: "/etc/pki/client/cert.crt" + + # Client certificate key file + #ssl.key: "/etc/pki/client/cert.key" + #--------------------------------- NATS Module --------------------------------- - module: nats metricsets: diff --git a/metricbeat/module/mysql/_meta/Dockerfile b/metricbeat/module/mysql/_meta/Dockerfile index 2051c726595e..b701ad617ea3 100644 --- a/metricbeat/module/mysql/_meta/Dockerfile +++ b/metricbeat/module/mysql/_meta/Dockerfile @@ -5,4 +5,8 @@ ENV MYSQL_ROOT_PASSWORD test HEALTHCHECK --interval=1s --retries=90 CMD mysql -u root -p$MYSQL_ROOT_PASSWORD -h$HOSTNAME -P 3306 -e "SHOW STATUS" > /dev/null +COPY /certs/root-ca.pem /etc/certs/root-ca.pem +COPY /certs/server-cert.pem /etc/certs/server-cert.pem +COPY /certs/server-key.pem /etc/certs/server-key.pem + COPY test.cnf /etc/mysql/conf.d/test.cnf diff --git a/metricbeat/module/mysql/_meta/certs/client-cert.pem b/metricbeat/module/mysql/_meta/certs/client-cert.pem new file mode 100755 index 000000000000..df9c76e08626 --- /dev/null +++ b/metricbeat/module/mysql/_meta/certs/client-cert.pem @@ -0,0 +1,19 @@ +-----BEGIN CERTIFICATE----- +MIIDDDCCAfQCAQEwDQYJKoZIhvcNAQELBQAwSjELMAkGA1UEBhMCVVMxEzARBgNV +BAgMCkNhbGlmb3JuaWExFDASBgNVBAcMC1NhbnRhIENsYXJhMRAwDgYDVQQDDAdm +YWtlLUNBMB4XDTI0MDIxNTIzNTA0MloXDTMzMTIyNDIzNTA0MlowTjELMAkGA1UE +BhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFDASBgNVBAcMC1NhbnRhIENsYXJh +MRQwEgYDVQQDDAtmYWtlLWNsaWVudDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC +AQoCggEBAIqHZbSUB1x/iW6DxaRlkFWjPuZ+F1wYTGvfpqnxZgZY1k5vSJTy3ETe +y3TelpEPBWEmsgHDx4bjuqeG+3my9dDEKEIYgXkfkfHREndVxPDfnRdfXPfp3qbm +wV2bdJnpSQzCg+lv8e8U+kMv0WcmwTuwlpVG0Rnb6vFdOs67/IIlBvI9sP5BKDYL +YFRxaoc8fLb8UMkfQ0BSmT4Rvmq5MSETh4re7OecV6pN0naEWhZf72mr/HiTAhb6 +xZJNSvNAzvdkQnhwt9aHemGQLRZD+4dduZYn27cwK4ySTZdyMoKn66HqMIfXPvr8 +LlICP4Gb8Df/JuUZVRbI13P+Xqujd8kCAwEAATANBgkqhkiG9w0BAQsFAAOCAQEA +gwA1+nZISC6QF9JtkOGrPpBZk6v1iy4iLsZSNaoinkB/FgesIpNrTFG0k6exSBV1 +pwQSmMVNSEsUOOjEq/Vk98014Kf8QVqfkdcujaBNPtxMqsocOO9Od78UuX5QdZXi +ayhkzrcPX4HTwjTqKFlJxb92rHrBx/GIWa68TeAjwbRiZmDASpVCEI2HnkBkFWTs +5Ux4wlC3JrnY3Jxb7QfDK94g9r5s1ljHeVki83cUYaI5TdY7F0uP+O6TvlhCPrjd +5708kRZJHnKThu3aE8HJYIbYhHocm9DszbnObd4SqECjfd6YNbREBhyaHJdCY/j2 +hm1zhBiW24dazs108uhFsQ== +-----END CERTIFICATE----- diff --git a/metricbeat/module/mysql/_meta/certs/client-key.pem b/metricbeat/module/mysql/_meta/certs/client-key.pem new file mode 100755 index 000000000000..33430372fd24 --- /dev/null +++ b/metricbeat/module/mysql/_meta/certs/client-key.pem @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCKh2W0lAdcf4lu +g8WkZZBVoz7mfhdcGExr36ap8WYGWNZOb0iU8txE3st03paRDwVhJrIBw8eG47qn +hvt5svXQxChCGIF5H5Hx0RJ3VcTw350XX1z36d6m5sFdm3SZ6UkMwoPpb/HvFPpD +L9FnJsE7sJaVRtEZ2+rxXTrOu/yCJQbyPbD+QSg2C2BUcWqHPHy2/FDJH0NAUpk+ +Eb5quTEhE4eK3uznnFeqTdJ2hFoWX+9pq/x4kwIW+sWSTUrzQM73ZEJ4cLfWh3ph +kC0WQ/uHXbmWJ9u3MCuMkk2XcjKCp+uh6jCH1z76/C5SAj+Bm/A3/yblGVUWyNdz +/l6ro3fJAgMBAAECggEAEPRCAHQrA/k4c9oFBQoonHCMrNdDCuKO7NdsHYm1ucJi +5SnVxWQFTRkC59hrr1B6MTIUEGb6iyHhOOpqafI7B0xQnIlFBFLWsPSseMY6opvN +jTwew9k/xqfAg/E4F7OvXPRMAnSQ1LjZqcInE+Owe9qQjW/DvPFXS2fEgCOOA4vw +M6w6USf8UTsXBzMvRnDHMTQM0vfKNNSdopYDPeQc4YQ1A2AjkpYXZVWXFcFsE9zw +xFVZ9k6tP+gzk6shJjsbBoQ7qWwhdq1Q5tJ28FTaCVXDAp8l6yIFuZuI7r23O7+0 +ngxSejABJ3m9NmG0J7DPGU6zXhJW5nylWcSk5vwMkQKBgQDCWIRe4iSW0eGYBSe5 +hBoQgLe7aMAbsaCrHjTYQkKvI25YlfJ08OVU7oB/Bng/9AlpJlouGz67/W0PiRaz +jlP370p92IiwehUl9PkuVDpex4l2rDLCM1iVrPbxhbm/7+2nro2M/0/4iUyIK+Gr +Rpcqj2dQ3qarD+UmLXYPOoyRuQKBgQC2ec0sWyU67QuFaTemvTH8YFu68BfQqg6t +YQMc4+wj30ww0TZHFYVwyvR4agTOdFjwIUTERRN3EcFmlV5x+fGz/LfUdVYJj8B0 +lXakqeATsGJHngrdlyM+m+g+6JI1SUTshMa/xXVAUx8NZESOVE5JeZH6TD4/9Q3y +ijtithtekQKBgQCPeso/QrXAozLqCORLEjwr8tuygKNTzs/PhX1+K20P4BiXThyy +OScWjP5QyXX9wS0xdB8f6v1lzLO3xH3+EhXr9b4JKtO/dmImo7VTftuZHbde5cKT +nVTJK+kkZpW8HmZWZYgbkGJ6GuNlpP/2cycnRLgB/F8P66xBg06l75PYAQKBgGap +GhR1ZvnC+TNiocuuL5wkfhcrEsrzkfRbWwv68xSvgUcJvTa61etCU84XH4MjlBHt +NaoSjsPzelKDgLIxA5nWeXoPVYtlk8pDeI9lf0q0dmaCdOx8JnkH797Mq81M3nkO +rl6f8bpxyUuYeLV2muDdg5JFKNSEwwcMXCLJ/5XxAoGAKIkS02jWudDoBzubdFe/ +c5jSYufTZOmErnjnSKGkj9oZGVP6RYDhkHMPOxadO/4OLOKo6Phkg9yRmPG2tyKA ++ddgYP7zXEnsLxrjumoYTvcWgs1AHUUH4kA5SdImzYbSSfPW5h0KkvB+gYaukBGa +XHILry/59LkxU+nP1ZCVvt8= +-----END PRIVATE KEY----- diff --git a/metricbeat/module/mysql/_meta/certs/client-req.pem b/metricbeat/module/mysql/_meta/certs/client-req.pem new file mode 100755 index 000000000000..3295c803f8d3 --- /dev/null +++ b/metricbeat/module/mysql/_meta/certs/client-req.pem @@ -0,0 +1,16 @@ +-----BEGIN CERTIFICATE REQUEST----- +MIICkzCCAXsCAQAwTjELMAkGA1UEBhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWEx +FDASBgNVBAcMC1NhbnRhIENsYXJhMRQwEgYDVQQDDAtmYWtlLWNsaWVudDCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAIqHZbSUB1x/iW6DxaRlkFWjPuZ+ +F1wYTGvfpqnxZgZY1k5vSJTy3ETey3TelpEPBWEmsgHDx4bjuqeG+3my9dDEKEIY +gXkfkfHREndVxPDfnRdfXPfp3qbmwV2bdJnpSQzCg+lv8e8U+kMv0WcmwTuwlpVG +0Rnb6vFdOs67/IIlBvI9sP5BKDYLYFRxaoc8fLb8UMkfQ0BSmT4Rvmq5MSETh4re +7OecV6pN0naEWhZf72mr/HiTAhb6xZJNSvNAzvdkQnhwt9aHemGQLRZD+4dduZYn +27cwK4ySTZdyMoKn66HqMIfXPvr8LlICP4Gb8Df/JuUZVRbI13P+Xqujd8kCAwEA +AaAAMA0GCSqGSIb3DQEBCwUAA4IBAQBr6+WE3t0KdMpEBBC81IUHkXNB9Mf5EYKG +d1ev6jq1bi2jw6WqAGbqYp1W0awEjZJZcS2skXoy8QIFDNjznHPgKEXB9b98nj34 +TLpszCrlcQteWmzRCspwkhdrXNGE4Z4UMgN+xoh2P/dujK4kGH6HFcF1Fo4ajDUX +HT5vybjQuQlPDgt6Ufs+Pjotr5uCzLbIsFN1QG6gKVY90WAzPsa0XYN1ehMpkLsM +8vbVP0uRT6/VXTenbTtqqQ5Y70gmeiF/EssnQ9rM3vkGUW1A/9j23agLmlOVaCWw +HSN5HqrFUIlsLFIDDTgi7icW4Uk+7qdMSF7ooMOJIm27PGc49u4U +-----END CERTIFICATE REQUEST----- diff --git a/metricbeat/module/mysql/_meta/certs/root-ca-key.pem b/metricbeat/module/mysql/_meta/certs/root-ca-key.pem new file mode 100755 index 000000000000..2343e39b1499 --- /dev/null +++ b/metricbeat/module/mysql/_meta/certs/root-ca-key.pem @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDSrYQWHfzCy/+N +Nzbg03pW/xnJ4wsEOeSuaYJfrkzU87pwlOFY57bhxe+3rektDOOziwjCxo4a8rKu +YmHRYKx4XdBtTjPnimRiwymSnemZdABWLNuJyvWen6iNJQqrcSesvobAtaQ265A9 +faRPn/Hjx5CH5x52hLhcpo6Yg4Ae6K2dnGbahFb1DI7Btfcf+PYiUau5DRiJiIpU +9K9hBbPmPuo0hsGiAYCJkTspdDMrFsBA6hNadamzsXy6AzB82Pu19nckR20kJVlG +Ioebg6mlHlcTV1qCsiWZBR/ghGGNHBp15EIXvIDpEJ4rcuy4AER4lXIdpG2RPD7Y ++Y7EGi0zAgMBAAECggEAU/SCuR+dHPGXdqqEq4aK78U7/SiFuNrrRxfTtRZsFsbD +yt6BiODaD9HFrCBZVjMXQHLM/HWMpq+Fxhl9mqcYQ+U6zHxIEeKkC3lzhTJ5p0XD +ZpP8rsYbKGm+jPSwck6m/V91qrEX7izkb6S0iGiYR+m8rnPLP3a3U3CqTZvFwErG +n7jk7caLZcT9+p7/TLlDIyx4ha4+7RRaL9OC1dNH8ADOkSHk/vaE6aU8J8PJ4YZg +QvNfsuo7FtDMq3OIkMAsHseuX90X8c3ZS7lNdCTRU7YuC1+8+l6xGs1Arjv1jqnd +9gIo6kh88Ng8zi4TkGLVAnfc55eXmB+f7PPN93fMeQKBgQD0uqDSsvPNnaY6DJIF +Gyz4qExyYH/h2QFT5M4bb0hQNIkP187JhBZw8Et2AvBtSBhs8dXfBxu736KRs8XG +b60iw2qXqo1XUEUO7R0VMO6NcA8Hk206X+p7ukn5RExzv2MurD+3f8QM8CypFA57 +UnSWdDCrOAh6WU5zfcz9woOM2QKBgQDcYWvqbV8XoyhJBG5PkG3UzYtOi/Es/fGH +qt03ZyyfYdCbhAknqftuj6ZzlMfVV3xOSXX+sdr0rLzammnRdlPJtJfjk8lUYa/i +0hy4eTHm7o1iZJfMS9rCMH9uTwyNGnb67u8kW16BuzaLbJMtd7IKtEG69U63abZX +t+zqmxGy6wKBgQCD43w+cNCxdA+cYx/ifpXK4DBqx5TDq0Zq5vkokd1/1AA1uJEp +yvSpIucYD1dxHZSESgR/sH4Czu/249JnMdI11OjCGdkYQBsngyPUQs2dDdIbvBj2 +h7B/w5KQMn2dN3yFL7Ea/FE0w87dxABV98b7OlzsOUNgZHbCCP8LluN8aQKBgGS3 +RTly2JWV5DBSjRNhn0A026h+/i6gs8RbyxOp3FPOwSaBlimBXr4telWyNg2DGPUy +T3Gh2L4fP4PsM9YdbLdvCEdiYA1nQ5m2ipeoE61Fcmn4LQOZ2xUKUwKXr9XAtYWC +stn7w9ooNApOCYkq/bw0myGVQG9EKag3D1g8nD8XAoGAZLJlDhlfFaWa7jy1VF/g +JWcsN/+BfTjBY6t3npxzg4pdi7lHhuAZ45PLnQMTIdWCkqgigt224kcbUy3b351u +lzoSiLatNXj5Q3on85ZNRaOMLqp0ueIzOLWvC+CRp46wXlwxTrPxghXatUBPsG47 +mO/mtw9gmaJ8UBW/SuxS24g= +-----END PRIVATE KEY----- diff --git a/metricbeat/module/mysql/_meta/certs/root-ca.pem b/metricbeat/module/mysql/_meta/certs/root-ca.pem new file mode 100755 index 000000000000..9b3e4f60fe8e --- /dev/null +++ b/metricbeat/module/mysql/_meta/certs/root-ca.pem @@ -0,0 +1,21 @@ +-----BEGIN CERTIFICATE----- +MIIDdTCCAl2gAwIBAgIUUp8x6W/bui3FjHLnJfIb7AsKBIwwDQYJKoZIhvcNAQEL +BQAwSjELMAkGA1UEBhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFDASBgNVBAcM +C1NhbnRhIENsYXJhMRAwDgYDVQQDDAdmYWtlLUNBMB4XDTI0MDIxNTIzNTAzNVoX +DTMzMTIyNDIzNTAzNVowSjELMAkGA1UEBhMCVVMxEzARBgNVBAgMCkNhbGlmb3Ju +aWExFDASBgNVBAcMC1NhbnRhIENsYXJhMRAwDgYDVQQDDAdmYWtlLUNBMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0q2EFh38wsv/jTc24NN6Vv8ZyeML +BDnkrmmCX65M1PO6cJThWOe24cXvt63pLQzjs4sIwsaOGvKyrmJh0WCseF3QbU4z +54pkYsMpkp3pmXQAVizbicr1np+ojSUKq3EnrL6GwLWkNuuQPX2kT5/x48eQh+ce +doS4XKaOmIOAHuitnZxm2oRW9QyOwbX3H/j2IlGruQ0YiYiKVPSvYQWz5j7qNIbB +ogGAiZE7KXQzKxbAQOoTWnWps7F8ugMwfNj7tfZ3JEdtJCVZRiKHm4OppR5XE1da +grIlmQUf4IRhjRwadeRCF7yA6RCeK3LsuABEeJVyHaRtkTw+2PmOxBotMwIDAQAB +o1MwUTAdBgNVHQ4EFgQURA7Q9JPfB4mveB0vzmoqNJ2HSZUwHwYDVR0jBBgwFoAU +RA7Q9JPfB4mveB0vzmoqNJ2HSZUwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0B +AQsFAAOCAQEAB4NGJFZpzltHLqvInSU/EQxdIHgifihOFzsXTEXkdrmkfEw5puVL +fzg6qnLOunh3GAwLCnM0aIzDLS8WAS509Jwwidn7OtBpYV+jIzJrrTycWjAdvcHC +WToPTueXxwaAD3pCrus0w9H8egoQ1haNVmQm0OWcv3My82cNbZwViuQSCrky1srL +N5l7UM0gbXKeZjTGHIoTIjQJDgJT8PydsxpOZq7CcKRDBdF5nYMcUq8wltneb0Nh +7DuLLdxEM11XzIRT4GLRxT2xqwW7UpLfWpuo+niCvmNFY6SzyHFR1vFI3Kw1rYXh +3cbEtHtRvcNQg6Jp/zoHDcXMS3hDMeN2vQ== +-----END CERTIFICATE----- diff --git a/metricbeat/module/mysql/_meta/certs/server-cert.pem b/metricbeat/module/mysql/_meta/certs/server-cert.pem new file mode 100755 index 000000000000..1ca56e3f44f9 --- /dev/null +++ b/metricbeat/module/mysql/_meta/certs/server-cert.pem @@ -0,0 +1,19 @@ +-----BEGIN CERTIFICATE----- +MIIDDDCCAfQCAQEwDQYJKoZIhvcNAQELBQAwSjELMAkGA1UEBhMCVVMxEzARBgNV +BAgMCkNhbGlmb3JuaWExFDASBgNVBAcMC1NhbnRhIENsYXJhMRAwDgYDVQQDDAdm +YWtlLUNBMB4XDTI0MDIxNTIzNTAzOFoXDTMzMTIyNDIzNTAzOFowTjELMAkGA1UE +BhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFDASBgNVBAcMC1NhbnRhIENsYXJh +MRQwEgYDVQQDDAtmYWtlLXNlcnZlcjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC +AQoCggEBAMuPqkUt/Ax9s/h5LPxXU0m6OAEp1InLbR6x//hGVgmIiQu5/Fg1VfmZ +YbwraXxs4JDfMUyK6bd/bk2o71I1pnLmoFmQvawDRxOqkA1NLpF2FJtk0eevkF1D +crC9T1SfrzlwrucqqUXowdprVXFFVbFQTXsSyD8Nv/MGzDgmDtmMXQ8sLVqjGIEM +akuPMbNCVNTVnd/53WMaDzopnam/NCJNDGp2RVhf+KuOWLTURXFYN6j1z+f/1BNa +4QW+WtofzYkAWEcvCc8zeXUhwL6xE5gDyq1NkQ/ejqQq+iIJLd1FUFOH1jPSgmW5 +3CiWih2Is6VA0hCzDirdFtAHTui/OekCAwEAATANBgkqhkiG9w0BAQsFAAOCAQEA +vdGGVxaeSEfOkx+D7uYCx0blnobJoclggQP3fOIpyrU/LCeka+F8dvFvuGJLvn3A +JOMZZHCVnK6jjJYHFeolRCxd9iULYHD+dkWDr6uhvNMfwIt7UzUmtbznHAaD+ays +X0H70Z9+jmr3uFkevRbFkvDZqzdRYi/12oPM+0Skra3ouYen6zAtPU0Hruc0jyBP +W7V6mMSmCUPKTOJRZgDEIEBvu43rwEbQUG0ayqF1sLv+D6hjFrFJ2gCxgVH/+C9E +h0NF2Kdpb+jECCu3yhQA536Ugi9k96zJqJonu9jP4ODXMTG2qmsdFFW1zyFb9DbV +bjUsiDE7bEumHY2NEfzr3A== +-----END CERTIFICATE----- diff --git a/metricbeat/module/mysql/_meta/certs/server-key.pem b/metricbeat/module/mysql/_meta/certs/server-key.pem new file mode 100755 index 000000000000..d1a7d286a1c3 --- /dev/null +++ b/metricbeat/module/mysql/_meta/certs/server-key.pem @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDLj6pFLfwMfbP4 +eSz8V1NJujgBKdSJy20esf/4RlYJiIkLufxYNVX5mWG8K2l8bOCQ3zFMium3f25N +qO9SNaZy5qBZkL2sA0cTqpANTS6RdhSbZNHnr5BdQ3KwvU9Un685cK7nKqlF6MHa +a1VxRVWxUE17Esg/Db/zBsw4Jg7ZjF0PLC1aoxiBDGpLjzGzQlTU1Z3f+d1jGg86 +KZ2pvzQiTQxqdkVYX/irjli01EVxWDeo9c/n/9QTWuEFvlraH82JAFhHLwnPM3l1 +IcC+sROYA8qtTZEP3o6kKvoiCS3dRVBTh9Yz0oJludwoloodiLOlQNIQsw4q3RbQ +B07ovznpAgMBAAECggEADLAux9Me89ReBG3hLPVwfpb56LCny9L/QTuNHfecY0m8 +aRu1q/XfHwi9e9Ik6BmNQdp3ozLBcKujv3l5OWGYt27CrfKEsBUgOAyYoAugjHaU +wD7fipZ55CZRHs0eBcNSU70/Wa9iD7Z7Ztbr43yT49KCkdpQ2wVLYqWY0yMkJ9Eo +ZUJ8fL+yDMeJxnhQSIejK62TQI3FdMz+aNXA6AO0YiSfqagTS8GVNZQvZzvyxYS0 +DpiydzKSbS2RXkf3waClU5hDGwqhNxXa9bya/KrLvm4ag/VaV0O1M9jwFOKwfUGY +0SDELz/mxsOmGntTUbtuH7VSvnqkJHfACUcNkkIjAQKBgQD5pwIzrPnGrljDcFqu +OCRxhiRjgCNth4ObBbmj2n0BV5Uw33o1VlN/+GCfKcIQ1+tHOUrEtkwP5mMatUbf +4G4K/+bO3eWAf+ia5hkSVASbU0ui36iSkPWLYJr0oDx0N6Vw+ZK7oxqLGqW2dm4Y +Q1TFaIDd2wUGPYAuDaqPDHecCQKBgQDQvKXy9Ueh4iTbz3sH6Kp4wGN2BsjWWOVn +Hi4QoqnDoLrguhCe5vvNyxfayziu9hUKzP8kBHQOY/2xpKv+epPuw6hgaD0Mnh/w +UcWEqZs102y0zZcQISfG8TUoLHW31T87veB3YEVIB+8uZg1CWJ7aDKe8UmugVGV2 +k2sMG7fm4QKBgHq0z6w+lPZGs3I8QxXmmmMCH9iYHtGzDcigY8JZnZ+PQNEoxpR4 +vcnkdvlEORK2TfpP+qP9Rh16i7OQ7ikT0oKtjPCYuDkUpWudNS2BBlKh+kcvz1da +0JWVAhTCvXQR9cs1oB2B6YX9rv2j8DEUxxHQb6acBDgw+lOoe/CbnB6hAoGBAKxg +bcbjCcHFCF1BzT8tw8GuVzS7y5U/mkp64N26BunXzRwSa/FdnOpI4q07j9bkv2HJ +ApZS2yibKIFQFP01av8NMvpSer/1wThrvuqcSeG8dJQnB645QykGPrirZpdmki6a +0kijBvPCIaI2gpKcrqoxMz/Q7LJdn+C5Qvif11HhAoGAfai8GYFiXuShvf+8gjOt +qIsBMV3YexcX11qPD5vVJMCW1xLbpb9f3sPf8P31TB8tz5JA3aG24k8yURtgTA4Z +2I6Jo9vwMjAdOxHTalqMllDvBj5S5+cX38kGdcxdcbAiUHwIoXy6cjcGbeO/SesR +L1bbyZA45gpsWFxFr5V67G0= +-----END PRIVATE KEY----- diff --git a/metricbeat/module/mysql/_meta/certs/server-req.pem b/metricbeat/module/mysql/_meta/certs/server-req.pem new file mode 100755 index 000000000000..035ab7e2faf3 --- /dev/null +++ b/metricbeat/module/mysql/_meta/certs/server-req.pem @@ -0,0 +1,16 @@ +-----BEGIN CERTIFICATE REQUEST----- +MIICkzCCAXsCAQAwTjELMAkGA1UEBhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWEx +FDASBgNVBAcMC1NhbnRhIENsYXJhMRQwEgYDVQQDDAtmYWtlLXNlcnZlcjCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMuPqkUt/Ax9s/h5LPxXU0m6OAEp +1InLbR6x//hGVgmIiQu5/Fg1VfmZYbwraXxs4JDfMUyK6bd/bk2o71I1pnLmoFmQ +vawDRxOqkA1NLpF2FJtk0eevkF1DcrC9T1SfrzlwrucqqUXowdprVXFFVbFQTXsS +yD8Nv/MGzDgmDtmMXQ8sLVqjGIEMakuPMbNCVNTVnd/53WMaDzopnam/NCJNDGp2 +RVhf+KuOWLTURXFYN6j1z+f/1BNa4QW+WtofzYkAWEcvCc8zeXUhwL6xE5gDyq1N +kQ/ejqQq+iIJLd1FUFOH1jPSgmW53CiWih2Is6VA0hCzDirdFtAHTui/OekCAwEA +AaAAMA0GCSqGSIb3DQEBCwUAA4IBAQAK3+eAfReXoGP3CQvTE/Bd6u+u5kG65stV +DONrBzhMQ4R36X+Q6q65qJ0rmvwZcUfkIauQzdNv9ZfCDT7pO1VtNT0R+H6+shz9 +JiwGOudAlFSt31Ps0+lDm6WjA6J1Nmr9N7XrsmfdW4z2n1UZSPS9mOZIj+PpUtQw +OzIwJ/+btS/RVO0cGGFkoFwhrYKilAbq+SsMxMVxPcXUP+xLFYn6FCNFbf5uBpLz +ZM7HBDh2uVfwsaptnY3v+EIELCsXsFm9uj4zG45fJmu4KARY6FAi9sEvfA1ieZuU +8hmovXhKq6eSU2fPoeurRV1gxuanuFObd39LRoCTy3fCnqTZFxXg +-----END CERTIFICATE REQUEST----- diff --git a/metricbeat/module/mysql/_meta/config.reference.yml b/metricbeat/module/mysql/_meta/config.reference.yml index 03880a5ad6ab..4e5cc470aca1 100644 --- a/metricbeat/module/mysql/_meta/config.reference.yml +++ b/metricbeat/module/mysql/_meta/config.reference.yml @@ -21,3 +21,15 @@ # By setting raw to true, all raw fields from the status metricset will be added to the event. #raw: false + + # Optional SSL/TLS. By default is false. + #ssl.enabled: true + + # List of root certificates for SSL/TLS server verification + #ssl.certificate_authorities: ["/etc/pki/root/ca.crt"] + + # Certificate for SSL/TLS client authentication + #ssl.certificate: "/etc/pki/client/cert.crt" + + # Client certificate key file + #ssl.key: "/etc/pki/client/cert.key" diff --git a/metricbeat/module/mysql/_meta/config.yml b/metricbeat/module/mysql/_meta/config.yml index 367b32e91736..a86258fca3b5 100644 --- a/metricbeat/module/mysql/_meta/config.yml +++ b/metricbeat/module/mysql/_meta/config.yml @@ -18,3 +18,15 @@ # Password of hosts. Empty by default. #password: secret + + # Optional SSL/TLS. By default is false. + #ssl.enabled: true + + # List of root certificates for SSL/TLS server verification + #ssl.certificate_authorities: ["/etc/pki/root/ca.crt"] + + # Certificate for SSL/TLS client authentication + #ssl.certificate: "/etc/pki/client/cert.crt" + + # Client certificate key file + #ssl.key: "/etc/pki/client/cert.key" \ No newline at end of file diff --git a/metricbeat/module/mysql/_meta/test.cnf b/metricbeat/module/mysql/_meta/test.cnf index f759a49631d3..24eec52dd05f 100644 --- a/metricbeat/module/mysql/_meta/test.cnf +++ b/metricbeat/module/mysql/_meta/test.cnf @@ -1,2 +1,6 @@ [mysqld] bind-address = 0.0.0.0 +require_secure_transport = OFF +ssl-ca = /etc/certs/root-ca.pem +ssl-cert = /etc/certs/server-cert.pem +ssl-key = /etc/certs/server-key.pem \ No newline at end of file diff --git a/metricbeat/module/mysql/config.go b/metricbeat/module/mysql/config.go new file mode 100644 index 000000000000..96704bef4799 --- /dev/null +++ b/metricbeat/module/mysql/config.go @@ -0,0 +1,32 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package mysql + +import ( + "crypto/tls" + + "github.com/elastic/elastic-agent-libs/transport/tlscommon" +) + +type Config struct { + Hosts []string `config:"hosts" validate:"required"` + Username string `config:"username"` + Password string `config:"password"` + TLS *tlscommon.Config `config:"ssl"` + TLSConfig *tls.Config +} diff --git a/metricbeat/module/mysql/docker-compose.yml b/metricbeat/module/mysql/docker-compose.yml index e112587fccdc..0644d9568ad1 100644 --- a/metricbeat/module/mysql/docker-compose.yml +++ b/metricbeat/module/mysql/docker-compose.yml @@ -2,10 +2,10 @@ version: '2.3' services: mysql: - image: docker.elastic.co/integrations-ci/beats-mysql:${MYSQL_VARIANT:-mysql}-${MYSQL_VERSION:-5.7.12}-1 + image: docker.elastic.co/integrations-ci/beats-mysql:${MYSQL_VARIANT:-mysql}-${MYSQL_VERSION:-8.0}-1 build: context: ./_meta args: - MYSQL_IMAGE: ${MYSQL_VARIANT:-mysql}:${MYSQL_VERSION:-5.7.12} + MYSQL_IMAGE: ${MYSQL_VARIANT:-mysql}:${MYSQL_VERSION:-8.0} ports: - 3306 diff --git a/metricbeat/module/mysql/galera_status/status.go b/metricbeat/module/mysql/galera_status/status.go index d1dc68cd0a25..6f27b8d4e8fc 100644 --- a/metricbeat/module/mysql/galera_status/status.go +++ b/metricbeat/module/mysql/galera_status/status.go @@ -42,7 +42,7 @@ func init() { // MetricSet for fetching Galera-MySQL server status type MetricSet struct { - mb.BaseMetricSet + *mysql.Metricset db *sql.DB } @@ -50,7 +50,13 @@ type MetricSet struct { // Loads query_mode config setting from the config file func New(base mb.BaseMetricSet) (mb.MetricSet, error) { cfgwarn.Experimental("The galera_status metricset is experimental.") - return &MetricSet{BaseMetricSet: base}, nil + + ms, err := mysql.NewMetricset(base) + if err != nil { + return nil, err + } + + return &MetricSet{Metricset: ms, db: nil}, nil } // Fetch methods implements the data gathering and data conversion to the right format @@ -58,7 +64,7 @@ func New(base mb.BaseMetricSet) (mb.MetricSet, error) { func (m *MetricSet) Fetch(reporter mb.ReporterV2) error { if m.db == nil { var err error - m.db, err = mysql.NewDB(m.HostData().URI) + m.db, err = mysql.NewDB(m.HostData().URI, m.Metricset.Config.TLSConfig) if err != nil { return fmt.Errorf("Galera-status fetch failed: %w", err) } diff --git a/metricbeat/module/mysql/mysql.go b/metricbeat/module/mysql/mysql.go index 35388a9a1bde..23c0f8dda106 100644 --- a/metricbeat/module/mysql/mysql.go +++ b/metricbeat/module/mysql/mysql.go @@ -21,14 +21,18 @@ Package mysql is Metricbeat module for MySQL server. package mysql import ( + "crypto/tls" "database/sql" "fmt" "github.com/elastic/beats/v7/metricbeat/mb" + "github.com/elastic/elastic-agent-libs/transport/tlscommon" "github.com/go-sql-driver/mysql" ) +const TLSConfigKey = "custom" + func init() { // Register the ModuleFactory function for the "mysql" module. if err := mb.Registry.AddModule("mysql", NewModule); err != nil { @@ -38,16 +42,37 @@ func init() { func NewModule(base mb.BaseModule) (mb.Module, error) { // Validate that at least one host has been specified. - config := struct { - Hosts []string `config:"hosts" validate:"required"` - }{} - if err := base.UnpackConfig(&config); err != nil { + var c Config + if err := base.UnpackConfig(&c); err != nil { return nil, err } return &base, nil } +type Metricset struct { + mb.BaseMetricSet + Config Config +} + +func NewMetricset(base mb.BaseMetricSet) (*Metricset, error) { + var c Config + if err := base.Module().UnpackConfig(&c); err != nil { + return nil, fmt.Errorf("could not read config: %w", err) + } + + if c.TLS.IsEnabled() { + tlsConfig, err := tlscommon.LoadTLSConfig(c.TLS) + if err != nil { + return nil, fmt.Errorf("could not load provided TLS configuration: %w", err) + } + + c.TLSConfig = tlsConfig.ToConfig() + } + + return &Metricset{Config: c, BaseMetricSet: base}, nil +} + // ParseDSN creates a DSN (data source name) string by parsing the host. // It validates the resulting DSN and returns an error if the DSN is invalid. // @@ -55,9 +80,11 @@ func NewModule(base mb.BaseModule) (mb.Module, error) { // Example: root:test@tcp(127.0.0.1:3306)/ func ParseDSN(mod mb.Module, host string) (mb.HostData, error) { c := struct { - Username string `config:"username"` - Password string `config:"password"` + Username string `config:"username"` + Password string `config:"password"` + TLS *tlscommon.Config `config:"ssl"` }{} + if err := mod.UnpackConfig(&c); err != nil { return mb.HostData{}, err } @@ -86,6 +113,10 @@ func ParseDSN(mod mb.Module, host string) (mb.HostData, error) { noCredentialsConfig.User = "" noCredentialsConfig.Passwd = "" + if c.TLS.IsEnabled() { + config.TLSConfig = TLSConfigKey + } + return mb.HostData{ URI: config.FormatDSN(), SanitizedURI: noCredentialsConfig.FormatDSN(), @@ -99,10 +130,18 @@ func ParseDSN(mod mb.Module, host string) (mb.HostData, error) { // must be valid, otherwise an error will be returned. // // DSN Format: [username[:password]@][protocol[(address)]]/ -func NewDB(dsn string) (*sql.DB, error) { +func NewDB(dsn string, tlsConfig *tls.Config) (*sql.DB, error) { + if tlsConfig != nil { + err := mysql.RegisterTLSConfig(TLSConfigKey, tlsConfig) + if err != nil { + return nil, fmt.Errorf("registering custom tls config failed: %w", err) + } + } + db, err := sql.Open("mysql", dsn) if err != nil { return nil, fmt.Errorf("sql open failed: %w", err) } + return db, nil } diff --git a/metricbeat/module/mysql/mysql_integration_test.go b/metricbeat/module/mysql/mysql_integration_test.go index 5713a5821490..2fc96475646e 100644 --- a/metricbeat/module/mysql/mysql_integration_test.go +++ b/metricbeat/module/mysql/mysql_integration_test.go @@ -20,6 +20,9 @@ package mysql import ( + "crypto/tls" + "crypto/x509" + "os" "testing" "github.com/stretchr/testify/assert" @@ -31,9 +34,58 @@ import ( func TestNewDB(t *testing.T) { service := compose.EnsureUp(t, "mysql") - db, err := NewDB(GetMySQLEnvDSN(service.Host())) + db, err := NewDB(GetMySQLEnvDSN(service.Host()), nil) assert.NoError(t, err) err = db.Ping() assert.NoError(t, err) } + +func loadTLSConfig(caCertPath, clientCertPath, clientKeyPath string) (*tls.Config, error) { + caCert, err := os.ReadFile(caCertPath) + if err != nil { + return nil, err + } + caCertPool := x509.NewCertPool() + caCertPool.AppendCertsFromPEM(caCert) + + cert, err := tls.LoadX509KeyPair(clientCertPath, clientKeyPath) + if err != nil { + return nil, err + } + + tlsConfig := &tls.Config{ + Certificates: []tls.Certificate{cert}, + RootCAs: caCertPool, + MinVersion: tls.VersionTLS12, + } + + return tlsConfig, nil +} + +func TestNewDBWithSSL(t *testing.T) { + service := compose.EnsureUp(t, "mysql") + + tlsConfig, err := loadTLSConfig("_meta/certs/root-ca.pem", "_meta/certs/client-cert.pem", "_meta/certs/client-key.pem") + tlsConfig.InsecureSkipVerify = true + assert.NoError(t, err) + + db, err := NewDB(GetMySQLEnvDSN(service.Host())+"?tls=custom", tlsConfig) + assert.NoError(t, err) + + err = db.Ping() + assert.NoError(t, err) + + // Check if the current connection is using SSL + var sslCipher, variableName, value string + err = db.QueryRow(`show status like 'Ssl_cipher'`).Scan(&variableName, &sslCipher) + assert.NoError(t, err) + + // If sslCipher is not empty, then SSL is being used for the connection + assert.NotEmpty(t, variableName) + assert.NotEmpty(t, sslCipher) + + err = db.QueryRow(`show variables like 'have_ssl'`).Scan(&variableName, &value) + assert.NoError(t, err) + assert.Equal(t, "YES", value) +} diff --git a/metricbeat/module/mysql/query/query.go b/metricbeat/module/mysql/query/query.go index 35881d764016..d7bbaaa4cd7b 100644 --- a/metricbeat/module/mysql/query/query.go +++ b/metricbeat/module/mysql/query/query.go @@ -25,13 +25,17 @@ package query import ( "context" + "crypto/tls" "fmt" + mysqlDriver "github.com/go-sql-driver/mysql" + "github.com/elastic/beats/v7/libbeat/common/cfgwarn" "github.com/elastic/beats/v7/metricbeat/helper/sql" "github.com/elastic/beats/v7/metricbeat/mb" "github.com/elastic/beats/v7/metricbeat/module/mysql" "github.com/elastic/elastic-agent-libs/mapstr" + "github.com/elastic/elastic-agent-libs/transport/tlscommon" ) func init() { @@ -57,8 +61,10 @@ type MetricSet struct { mb.BaseMetricSet db *sql.DbClient Config struct { - Queries []query `config:"queries" validate:"nonzero,required"` - Namespace string `config:"namespace" validate:"nonzero,required"` + Queries []query `config:"queries" validate:"nonzero,required"` + Namespace string `config:"namespace" validate:"nonzero,required"` + TLS *tlscommon.Config `config:"ssl"` + TLSConfig *tls.Config } } @@ -72,16 +78,31 @@ func New(base mb.BaseMetricSet) (mb.MetricSet, error) { return nil, err } + if b.Config.TLS.IsEnabled() { + tlsConfig, err := tlscommon.LoadTLSConfig(b.Config.TLS) + if err != nil { + return nil, fmt.Errorf("could not load provided TLS configuration: %w", err) + } + + b.Config.TLSConfig = tlsConfig.ToConfig() + } + return b, nil } // Fetch fetches status messages from a mysql host. func (m *MetricSet) Fetch(ctx context.Context, reporter mb.ReporterV2) error { if m.db == nil { + if m.Config.TLSConfig != nil { + err := mysqlDriver.RegisterTLSConfig(mysql.TLSConfigKey, m.Config.TLSConfig) + if err != nil { + return fmt.Errorf("registering custom tls config failed: %w", err) + } + } var err error m.db, err = sql.NewDBClient("mysql", m.HostData().URI, m.Logger()) if err != nil { - return fmt.Errorf("mysql-status fetch failed: %w", err) + return fmt.Errorf("mysql-query fetch failed: %w", err) } } diff --git a/metricbeat/module/mysql/status/status.go b/metricbeat/module/mysql/status/status.go index dd57f7e23c92..ac3e5b83a185 100644 --- a/metricbeat/module/mysql/status/status.go +++ b/metricbeat/module/mysql/status/status.go @@ -40,20 +40,25 @@ func init() { // MetricSet for fetching MySQL server status. type MetricSet struct { - mb.BaseMetricSet + *mysql.Metricset db *sql.DB } // New creates and returns a new MetricSet instance. func New(base mb.BaseMetricSet) (mb.MetricSet, error) { - return &MetricSet{BaseMetricSet: base}, nil + ms, err := mysql.NewMetricset(base) + if err != nil { + return nil, err + } + + return &MetricSet{Metricset: ms, db: nil}, nil } // Fetch fetches status messages from a mysql host. func (m *MetricSet) Fetch(reporter mb.ReporterV2) error { if m.db == nil { var err error - m.db, err = mysql.NewDB(m.HostData().URI) + m.db, err = mysql.NewDB(m.HostData().URI, m.Metricset.Config.TLSConfig) if err != nil { return fmt.Errorf("mysql-status fetch failed: %w", err) } diff --git a/metricbeat/modules.d/mysql.yml.disabled b/metricbeat/modules.d/mysql.yml.disabled index 2913f5af8bc8..27dcc1e59ea5 100644 --- a/metricbeat/modules.d/mysql.yml.disabled +++ b/metricbeat/modules.d/mysql.yml.disabled @@ -21,3 +21,15 @@ # Password of hosts. Empty by default. #password: secret + + # Optional SSL/TLS. By default is false. + #ssl.enabled: true + + # List of root certificates for SSL/TLS server verification + #ssl.certificate_authorities: ["/etc/pki/root/ca.crt"] + + # Certificate for SSL/TLS client authentication + #ssl.certificate: "/etc/pki/client/cert.crt" + + # Client certificate key file + #ssl.key: "/etc/pki/client/cert.key" \ No newline at end of file diff --git a/testing/environments/snapshot.yml b/testing/environments/snapshot.yml index 859e94b06721..693e6d2f7486 100644 --- a/testing/environments/snapshot.yml +++ b/testing/environments/snapshot.yml @@ -3,7 +3,7 @@ version: '2.3' services: elasticsearch: - image: docker.elastic.co/elasticsearch/elasticsearch:8.14.0-b9699c81-SNAPSHOT + image: docker.elastic.co/elasticsearch/elasticsearch:8.14.0-a2d5464c-SNAPSHOT # When extend is used it merges healthcheck.tests, see: # https://github.com/docker/compose/issues/8962 # healthcheck: @@ -31,7 +31,7 @@ services: - "./docker/elasticsearch/users_roles:/usr/share/elasticsearch/config/users_roles" logstash: - image: docker.elastic.co/logstash/logstash:8.14.0-b9699c81-SNAPSHOT + image: docker.elastic.co/logstash/logstash:8.14.0-a2d5464c-SNAPSHOT healthcheck: test: ["CMD", "curl", "-f", "http://localhost:9600/_node/stats"] retries: 600 @@ -44,7 +44,7 @@ services: - 5055:5055 kibana: - image: docker.elastic.co/kibana/kibana:8.14.0-b9699c81-SNAPSHOT + image: docker.elastic.co/kibana/kibana:8.14.0-a2d5464c-SNAPSHOT environment: - "ELASTICSEARCH_USERNAME=kibana_system_user" - "ELASTICSEARCH_PASSWORD=testing" diff --git a/x-pack/filebeat/docs/inputs/input-http-endpoint.asciidoc b/x-pack/filebeat/docs/inputs/input-http-endpoint.asciidoc index b7a7ee06f70d..7f3050d1f686 100644 --- a/x-pack/filebeat/docs/inputs/input-http-endpoint.asciidoc +++ b/x-pack/filebeat/docs/inputs/input-http-endpoint.asciidoc @@ -227,6 +227,11 @@ The prefix for the signature. Certain webhooks prefix the HMAC signature with a By default the input expects the incoming POST to include a Content-Type of `application/json` to try to enforce the incoming data to be valid JSON. In certain scenarios when the source of the request is not able to do that, it can be overwritten with another value or set to null. +[float] +==== `program` + +The normal operation of the input treats the body either as a single event when the body is an object, or as a set of events when the body is an array. If the body should be treated handled differently, for example a set of events in an array field of an object to be handled as a set of events, then a https://opensource.google.com/projects/cel[Common Expression Language (CEL)] program can be provided through this configuration field. No CEL extensions are provided beyond the function in the CEL https://github.com/google/cel-spec/blob/master/doc/langdef.md#standard[standard library]. CEL https://pkg.go.dev/github.com/google/cel-go/cel#OptionalTypes[optional types] are supported. + [float] ==== `response_code` diff --git a/x-pack/filebeat/input/http_endpoint/config.go b/x-pack/filebeat/input/http_endpoint/config.go index 3b0c97741dee..1618dc907583 100644 --- a/x-pack/filebeat/input/http_endpoint/config.go +++ b/x-pack/filebeat/input/http_endpoint/config.go @@ -37,6 +37,7 @@ type config struct { URL string `config:"url" validate:"required"` Prefix string `config:"prefix"` ContentType string `config:"content_type"` + Program string `config:"program"` SecretHeader string `config:"secret.header"` SecretValue string `config:"secret.value"` HMACHeader string `config:"hmac.header"` diff --git a/x-pack/filebeat/input/http_endpoint/handler.go b/x-pack/filebeat/input/http_endpoint/handler.go index 0e2620b5b658..3d0948489ac8 100644 --- a/x-pack/filebeat/input/http_endpoint/handler.go +++ b/x-pack/filebeat/input/http_endpoint/handler.go @@ -12,10 +12,16 @@ import ( "io" "net" "net/http" + "reflect" "time" + "github.com/google/cel-go/cel" + "github.com/google/cel-go/checker/decls" + "github.com/google/cel-go/common/types" + "github.com/google/cel-go/common/types/ref" "go.uber.org/zap" "go.uber.org/zap/zapcore" + "google.golang.org/protobuf/types/known/structpb" stateless "github.com/elastic/beats/v7/filebeat/input/v2/input-stateless" "github.com/elastic/beats/v7/libbeat/beat" @@ -24,6 +30,7 @@ import ( "github.com/elastic/beats/v7/x-pack/filebeat/input/internal/httplog" "github.com/elastic/elastic-agent-libs/logp" "github.com/elastic/elastic-agent-libs/mapstr" + "github.com/elastic/mito/lib" ) const headerContentEncoding = "Content-Encoding" @@ -43,6 +50,7 @@ type handler struct { reqLogger *zap.Logger host, scheme string + program *program messageField string responseCode int responseBody string @@ -80,7 +88,7 @@ func (h *handler) ServeHTTP(w http.ResponseWriter, r *http.Request) { r.Body = io.NopCloser(&buf) } - objs, _, status, err := httpReadJSON(body) + objs, _, status, err := httpReadJSON(body, h.program) if err != nil { h.sendAPIErrorResponse(w, r, h.log, status, err) h.metrics.apiErrors.Add(1) @@ -218,22 +226,22 @@ func (h *handler) publishEvent(obj, headers mapstr.M) error { return nil } -func httpReadJSON(body io.Reader) (objs []mapstr.M, rawMessages []json.RawMessage, status int, err error) { +func httpReadJSON(body io.Reader, prg *program) (objs []mapstr.M, rawMessages []json.RawMessage, status int, err error) { if body == http.NoBody { return nil, nil, http.StatusNotAcceptable, errBodyEmpty } - obj, rawMessage, err := decodeJSON(body) + obj, rawMessage, err := decodeJSON(body, prg) if err != nil { return nil, nil, http.StatusBadRequest, err } return obj, rawMessage, http.StatusOK, err } -func decodeJSON(body io.Reader) (objs []mapstr.M, rawMessages []json.RawMessage, err error) { +func decodeJSON(body io.Reader, prg *program) (objs []mapstr.M, rawMessages []json.RawMessage, err error) { decoder := json.NewDecoder(body) for decoder.More() { var raw json.RawMessage - if err := decoder.Decode(&raw); err != nil { + if err = decoder.Decode(&raw); err != nil { if err == io.EOF { //nolint:errorlint // This will never be a wrapped error. break } @@ -241,9 +249,22 @@ func decodeJSON(body io.Reader) (objs []mapstr.M, rawMessages []json.RawMessage, } var obj interface{} - if err := newJSONDecoder(bytes.NewReader(raw)).Decode(&obj); err != nil { + if err = newJSONDecoder(bytes.NewReader(raw)).Decode(&obj); err != nil { return nil, nil, fmt.Errorf("malformed JSON object at stream position %d: %w", decoder.InputOffset(), err) } + + if prg != nil { + obj, err = prg.eval(obj) + if err != nil { + return nil, nil, err + } + // Re-marshal to ensure the raw bytes agree with the constructed object. + raw, err = json.Marshal(obj) + if err != nil { + return nil, nil, fmt.Errorf("failed to remarshal object: %w", err) + } + } + switch v := obj.(type) { case map[string]interface{}: objs = append(objs, v) @@ -265,6 +286,86 @@ func decodeJSON(body io.Reader) (objs []mapstr.M, rawMessages []json.RawMessage, return objs, rawMessages, nil } +type program struct { + prg cel.Program + ast *cel.Ast +} + +func newProgram(src string) (*program, error) { + if src == "" { + return nil, nil + } + + registry, err := types.NewRegistry() + if err != nil { + return nil, fmt.Errorf("failed to create env: %w", err) + } + env, err := cel.NewEnv( + cel.Declarations(decls.NewVar("obj", decls.Dyn)), + cel.OptionalTypes(cel.OptionalTypesVersion(lib.OptionalTypesVersion)), + cel.CustomTypeAdapter(&numberAdapter{registry}), + cel.CustomTypeProvider(registry), + ) + if err != nil { + return nil, fmt.Errorf("failed to create env: %w", err) + } + + ast, iss := env.Compile(src) + if iss.Err() != nil { + return nil, fmt.Errorf("failed compilation: %w", iss.Err()) + } + + prg, err := env.Program(ast) + if err != nil { + return nil, fmt.Errorf("failed program instantiation: %w", err) + } + return &program{prg: prg, ast: ast}, nil +} + +var _ types.Adapter = (*numberAdapter)(nil) + +type numberAdapter struct { + fallback types.Adapter +} + +func (a *numberAdapter) NativeToValue(value any) ref.Val { + if n, ok := value.(json.Number); ok { + var errs []error + i, err := n.Int64() + if err == nil { + return types.Int(i) + } + errs = append(errs, err) + f, err := n.Float64() + if err == nil { + return types.Double(f) + } + errs = append(errs, err) + return types.NewErr("%v", errors.Join(errs...)) + } + return a.fallback.NativeToValue(value) +} + +func (p *program) eval(obj interface{}) (interface{}, error) { + out, _, err := p.prg.Eval(map[string]interface{}{"obj": obj}) + if err != nil { + err = lib.DecoratedError{AST: p.ast, Err: err} + return nil, fmt.Errorf("failed eval: %w", err) + } + + v, err := out.ConvertToNative(reflect.TypeOf((*structpb.Value)(nil))) + if err != nil { + return nil, fmt.Errorf("failed proto conversion: %w", err) + } + switch v := v.(type) { + case *structpb.Value: + return v.AsInterface(), nil + default: + // This should never happen. + return nil, fmt.Errorf("unexpected native conversion type: %T", v) + } +} + func decodeJSONArray(raw *bytes.Reader) (objs []mapstr.M, rawMessages []json.RawMessage, err error) { dec := newJSONDecoder(raw) token, err := dec.Token() diff --git a/x-pack/filebeat/input/http_endpoint/handler_test.go b/x-pack/filebeat/input/http_endpoint/handler_test.go index 6660508b15b4..cb911f8ab188 100644 --- a/x-pack/filebeat/input/http_endpoint/handler_test.go +++ b/x-pack/filebeat/input/http_endpoint/handler_test.go @@ -38,6 +38,7 @@ func Test_httpReadJSON(t *testing.T) { tests := []struct { name string body string + program string wantObjs []mapstr.M wantStatus int wantErr bool @@ -135,10 +136,43 @@ func Test_httpReadJSON(t *testing.T) { }, wantStatus: http.StatusOK, }, + { + name: "kinesis", + body: `{ + "requestId": "ed4acda5-034f-9f42-bba1-f29aea6d7d8f", + "timestamp": 1578090901599, + "records": [ + { + "data": "aGVsbG8=" + }, + { + "data": "aGVsbG8gd29ybGQ=" + } + ] +}`, + program: `obj.records.map(r, { + "requestId": obj.requestId, + "timestamp": string(obj.timestamp), // leave timestamp in unix milli for ingest to handle. + "event": r, + })`, + wantRawMessage: []json.RawMessage{ + []byte(`{"event":{"data":"aGVsbG8="},"requestId":"ed4acda5-034f-9f42-bba1-f29aea6d7d8f","timestamp":"1578090901599"}`), + []byte(`{"event":{"data":"aGVsbG8gd29ybGQ="},"requestId":"ed4acda5-034f-9f42-bba1-f29aea6d7d8f","timestamp":"1578090901599"}`), + }, + wantObjs: []mapstr.M{ + {"event": map[string]any{"data": "aGVsbG8="}, "requestId": "ed4acda5-034f-9f42-bba1-f29aea6d7d8f", "timestamp": "1578090901599"}, + {"event": map[string]any{"data": "aGVsbG8gd29ybGQ="}, "requestId": "ed4acda5-034f-9f42-bba1-f29aea6d7d8f", "timestamp": "1578090901599"}, + }, + wantStatus: http.StatusOK, + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - gotObjs, rawMessages, gotStatus, err := httpReadJSON(strings.NewReader(tt.body)) + prg, err := newProgram(tt.program) + if err != nil { + t.Fatalf("failed to compile program: %v", err) + } + gotObjs, rawMessages, gotStatus, err := httpReadJSON(strings.NewReader(tt.body), prg) if (err != nil) != tt.wantErr { t.Errorf("httpReadJSON() error = %v, wantErr %v", err, tt.wantErr) return @@ -344,7 +378,7 @@ func Test_apiResponse(t *testing.T) { pub := new(publisher) metrics := newInputMetrics("") defer metrics.Close() - apiHandler := newHandler(ctx, tracerConfig(tc.name, tc.conf, *withTraces), pub, logp.NewLogger("http_endpoint.test"), metrics) + apiHandler := newHandler(ctx, tracerConfig(tc.name, tc.conf, *withTraces), nil, pub, logp.NewLogger("http_endpoint.test"), metrics) // Execute handler. respRec := httptest.NewRecorder() diff --git a/x-pack/filebeat/input/http_endpoint/input.go b/x-pack/filebeat/input/http_endpoint/input.go index ca648b697470..7d5055ebe653 100644 --- a/x-pack/filebeat/input/http_endpoint/input.go +++ b/x-pack/filebeat/input/http_endpoint/input.go @@ -131,6 +131,14 @@ func (p *pool) serve(ctx v2.Context, e *httpEndpoint, pub stateless.Publisher, m metrics.route.Set(u.Path) metrics.isTLS.Set(e.tlsConfig != nil) + var prg *program + if e.config.Program != "" { + prg, err = newProgram(e.config.Program) + if err != nil { + return err + } + } + p.mu.Lock() s, ok := p.servers[e.addr] if ok { @@ -149,7 +157,7 @@ func (p *pool) serve(ctx v2.Context, e *httpEndpoint, pub stateless.Publisher, m return err } log.Infof("Adding %s end point to server on %s", pattern, e.addr) - s.mux.Handle(pattern, newHandler(s.ctx, e.config, pub, log, metrics)) + s.mux.Handle(pattern, newHandler(s.ctx, e.config, prg, pub, log, metrics)) s.idOf[pattern] = ctx.ID p.mu.Unlock() <-s.ctx.Done() @@ -165,7 +173,7 @@ func (p *pool) serve(ctx v2.Context, e *httpEndpoint, pub stateless.Publisher, m srv: srv, } s.ctx, s.cancel = ctxtool.WithFunc(ctx.Cancelation, func() { srv.Close() }) - mux.Handle(pattern, newHandler(s.ctx, e.config, pub, log, metrics)) + mux.Handle(pattern, newHandler(s.ctx, e.config, prg, pub, log, metrics)) p.servers[e.addr] = s p.mu.Unlock() @@ -287,7 +295,7 @@ func (s *server) getErr() error { return s.err } -func newHandler(ctx context.Context, c config, pub stateless.Publisher, log *logp.Logger, metrics *inputMetrics) http.Handler { +func newHandler(ctx context.Context, c config, prg *program, pub stateless.Publisher, log *logp.Logger, metrics *inputMetrics) http.Handler { h := &handler{ log: log, publisher: pub, @@ -305,6 +313,7 @@ func newHandler(ctx context.Context, c config, pub stateless.Publisher, log *log hmacType: c.HMACType, hmacPrefix: c.HMACPrefix, }, + program: prg, messageField: c.Prefix, responseCode: c.ResponseCode, responseBody: c.ResponseBody, diff --git a/x-pack/metricbeat/metricbeat.reference.yml b/x-pack/metricbeat/metricbeat.reference.yml index 1e6abf11a604..f71e58904fd7 100644 --- a/x-pack/metricbeat/metricbeat.reference.yml +++ b/x-pack/metricbeat/metricbeat.reference.yml @@ -1172,6 +1172,18 @@ metricbeat.modules: # By setting raw to true, all raw fields from the status metricset will be added to the event. #raw: false + # Optional SSL/TLS. By default is false. + #ssl.enabled: true + + # List of root certificates for SSL/TLS server verification + #ssl.certificate_authorities: ["/etc/pki/root/ca.crt"] + + # Certificate for SSL/TLS client authentication + #ssl.certificate: "/etc/pki/client/cert.crt" + + # Client certificate key file + #ssl.key: "/etc/pki/client/cert.key" + #--------------------------------- NATS Module --------------------------------- - module: nats metricsets: diff --git a/x-pack/packetbeat/magefile.go b/x-pack/packetbeat/magefile.go index 03104ab9157e..357e5e235855 100644 --- a/x-pack/packetbeat/magefile.go +++ b/x-pack/packetbeat/magefile.go @@ -172,6 +172,13 @@ func SystemTest(ctx context.Context) error { return devtools.GoTest(ctx, args) } +func getBucketName() string { + if os.Getenv("BUILDKITE") == "true" { + return "ingest-buildkite-ci" + } + return "obs-ci-cache" +} + // getNpcapInstaller gets the installer from the Google Cloud Storage service. // // On Windows platforms, if getNpcapInstaller is invoked with the environment variables @@ -198,7 +205,8 @@ func getNpcapInstaller() error { return err } } + ciBucketName := getBucketName() fmt.Printf("getting %s from private cache\n", installer) - return sh.RunV("gsutil", "cp", "gs://obs-ci-cache/private/"+installer, dstPath) + return sh.RunV("gsutil", "cp", "gs://"+ciBucketName+"/private/"+installer, dstPath) }