diff --git a/.buildkite/auditbeat/auditbeat-pipeline.yml b/.buildkite/auditbeat/auditbeat-pipeline.yml index 798939bbf32..be03d7843b2 100644 --- a/.buildkite/auditbeat/auditbeat-pipeline.yml +++ b/.buildkite/auditbeat/auditbeat-pipeline.yml @@ -1,13 +1,18 @@ # yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json env: - IMAGE_UBUNTU_X86_64: "family/core-ubuntu-2204" - IMAGE_UBUNTU_ARM_64: "core-ubuntu-2004-aarch64" + BEATS_PROJECT_NAME: "auditbeat" + IMAGE_UBUNTU_X86_64: "family/platform-ingest-beats-ubuntu-2204" + AWS_IMAGE_UBUNTU_ARM_64: "platform-ingest-beats-ubuntu-2004-aarch64" IMAGE_WIN_2016: "family/core-windows-2016" IMAGE_WIN_2019: "family/core-windows-2019" IMAGE_WIN_2022: "family/core-windows-2022" - IMAGE_RHEL9: "family/core-rhel-9" + IMAGE_RHEL9: "family/platform-ingest-beats-rhel-9" IMAGE_MACOS_X86_64: "generic-13-ventura-x64" + GCP_DEFAULT_MACHINE_TYPE: "c2d-highcpu-8" + GCP_HI_PERF_MACHINE_TYPE: "c2d-highcpu-16" + GCP_WIN_MACHINE_TYPE: "n2-standard-8" + AWS_ARM_INSTANCE_TYPE: "t4g.xlarge" steps: - group: "Auditbeat Mandatory Testing" @@ -24,6 +29,7 @@ steps: agents: provider: "gcp" image: "${IMAGE_UBUNTU_X86_64}" + machineType: "${GCP_DEFAULT_MACHINE_TYPE}" artifact_paths: - "auditbeat/build/*.xml" - "auditbeat/build/*.json" @@ -37,19 +43,20 @@ steps: agents: provider: "gcp" image: "${IMAGE_RHEL9}" + machineType: "${GCP_DEFAULT_MACHINE_TYPE}" artifact_paths: - "auditbeat/build/*.xml" - "auditbeat/build/*.json" - label: ":windows:-{{matrix.image}} Unit Tests" - command: ".buildkite/auditbeat/scripts/unit-tests-win.ps1" + command: ".buildkite/scripts/win_unit_tests.ps1" notify: - github_commit_status: context: "Auditbeat: windows/Unit Tests" agents: provider: "gcp" image: "{{matrix.image}}" - machine_type: "n2-standard-8" + machine_type: "${GCP_WIN_MACHINE_TYPE}" disk_size: 200 disk_type: "pd-ssd" matrix: @@ -72,6 +79,7 @@ steps: agents: provider: "gcp" image: "${IMAGE_UBUNTU_X86_64}" + machineType: "${GCP_HI_PERF_MACHINE_TYPE}" - group: "Extended Testing" key: "extended-tests" @@ -88,8 +96,8 @@ steps: context: "Auditbeat/Extended: Unit Tests ARM" agents: provider: "aws" - imagePrefix: "${IMAGE_UBUNTU_ARM_64}" - instanceType: "t4g.large" + imagePrefix: "${AWS_IMAGE_UBUNTU_ARM_64}" + instanceType: "${AWS_ARM_INSTANCE_TYPE}" artifact_paths: "auditbeat/build/*.xml" - label: ":mac: MacOS Unit Tests" @@ -112,14 +120,14 @@ steps: steps: - label: ":windows: Win 2019 Unit Tests" key: "win-extended-2019" - command: ".buildkite/auditbeat/scripts/unit-tests-win.ps1" + command: ".buildkite/scripts/win_unit_tests.ps1" notify: - github_commit_status: context: "Auditbeat/Extended: Win-2019 Unit Tests" agents: provider: "gcp" image: "${IMAGE_WIN_2019}" - machine_type: "n2-standard-8" + machine_type: "${GCP_WIN_MACHINE_TYPE}" disk_size: 200 disk_type: "pd-ssd" artifact_paths: @@ -134,4 +142,4 @@ steps: steps: - label: Package pipeline - commands: ".buildkite/auditbeat/scripts/package-step.sh" + commands: ".buildkite/scripts/packaging/package-step.sh" diff --git a/.buildkite/auditbeat/scripts/crosscompile.sh b/.buildkite/auditbeat/scripts/crosscompile.sh index 866d6be4223..da8452d5380 100755 --- a/.buildkite/auditbeat/scripts/crosscompile.sh +++ b/.buildkite/auditbeat/scripts/crosscompile.sh @@ -2,7 +2,5 @@ set -euo pipefail -source .buildkite/env-scripts/linux-env.sh - echo "--- Executing Crosscompile" make -C auditbeat crosscompile diff --git a/.buildkite/auditbeat/scripts/package.sh b/.buildkite/auditbeat/scripts/package.sh deleted file mode 100755 index 71872ca15a3..00000000000 --- a/.buildkite/auditbeat/scripts/package.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/usr/bin/env bash - -set -euo pipefail - -source .buildkite/env-scripts/linux-env.sh - -echo "--- Docker Version: $(docker --version)" - -echo "--- Start Packaging" -cd auditbeat -umask 0022 -mage package - diff --git a/.buildkite/auditbeat/scripts/unit-tests-win.ps1 b/.buildkite/auditbeat/scripts/unit-tests-win.ps1 deleted file mode 100644 index 200627d518f..00000000000 --- a/.buildkite/auditbeat/scripts/unit-tests-win.ps1 +++ /dev/null @@ -1,51 +0,0 @@ -$ErrorActionPreference = "Stop" # set -e -$GoVersion = $env:GOLANG_VERSION # If Choco doesn't have the version specified in .go-version file, should be changed manually - -# Forcing to checkout again all the files with a correct autocrlf. -# Doing this here because we cannot set git clone options before. -function fixCRLF() { - Write-Host "--- Fixing CRLF in git checkout --" - git config core.autocrlf false - git rm --quiet --cached -r . - git reset --quiet --hard -} - -function withGolang() { - Write-Host "--- Install golang $GoVersion --" - choco install golang -y --version $GoVersion - - $choco = Convert-Path "$((Get-Command choco).Path)\..\.." - Import-Module "$choco\helpers\chocolateyProfile.psm1" - refreshenv - go version - go env -} - -function installGoDependencies() { - $installPackages = @( - "github.com/magefile/mage" - "github.com/elastic/go-licenser" - "golang.org/x/tools/cmd/goimports" - "github.com/jstemmer/go-junit-report" - "github.com/tebeka/go2xunit" - ) - foreach ($pkg in $installPackages) { - go install "$pkg" - } -} - -fixCRLF - -$ErrorActionPreference = "Continue" # set +e - -Set-Location -Path auditbeat -New-Item -ItemType Directory -Force -Path "build" -withGolang -installGoDependencies - -mage build unitTest - -$EXITCODE=$LASTEXITCODE -$ErrorActionPreference = "Stop" - -Exit $EXITCODE diff --git a/.buildkite/auditbeat/scripts/unit-tests.sh b/.buildkite/auditbeat/scripts/unit-tests.sh index c1f5685c77f..4b8e86243c9 100755 --- a/.buildkite/auditbeat/scripts/unit-tests.sh +++ b/.buildkite/auditbeat/scripts/unit-tests.sh @@ -2,11 +2,8 @@ set -euo pipefail -source .buildkite/env-scripts/linux-env.sh - echo "--- Running Unit Tests" sudo chmod -R go-w auditbeat/ -cd auditbeat umask 0022 -mage build unitTest +mage -d auditbeat build unitTest diff --git a/.buildkite/buildkite.yml b/.buildkite/buildkite.yml new file mode 100644 index 00000000000..4707707e07c --- /dev/null +++ b/.buildkite/buildkite.yml @@ -0,0 +1,44 @@ +projects: + - "auditbeat" + - "deploy/kubernetes" + - "filebeat" + - "heartbeat" + - "libbeat" + - "metricbeat" + - "packetbeat" + - "winlogbeat" + - "x-pack/auditbeat" + - "x-pack/dockerlogbeat" + - "x-pack/filebeat" + - "x-pack/functionbeat" + - "x-pack/heartbeat" + - "x-pack/libbeat" + - "x-pack/metricbeat" + - "x-pack/osquerybeat" + - "x-pack/packetbeat" + - "x-pack/winlogbeat" + +## Changeset macros that are defined here and used in each specific 3.0 pipeline. +changeset: + ci: + - "^Jenkinsfile" + - "^\\.ci/scripts/.*" + oss: + - "^go.mod" + - "^pytest.ini" + - "^dev-tools/.*" + - "^libbeat/.*" + - "^testing/.*" + xpack: + - "^go.mod" + - "^pytest.ini" + - "^dev-tools/.*" + - "^libbeat/.*" + - "^testing/.*" + - "^x-pack/libbeat/.*" + +disabled: + when: + labels: ## Skip the GitHub Pull Request builds if any of the given GitHub labels match with the assigned labels in the PR. + - skip-ci + draft: true ## Skip the GitHub Pull Request builds with Draft PRs. diff --git a/.buildkite/env-scripts/env.sh b/.buildkite/env-scripts/env.sh index 4dfc01bafc3..f28658a107d 100644 --- a/.buildkite/env-scripts/env.sh +++ b/.buildkite/env-scripts/env.sh @@ -1,19 +1,53 @@ #!/usr/bin/env bash -SETUP_GVM_VERSION="v0.5.1" +source .buildkite/env-scripts/util.sh + +DOCS_CHANGESET="^.*\.(asciidoc|md)$ +deploy/kubernetes/.*-kubernetes.yaml" +PACKAGING_CHANGESET="^dev-tools/packaging/ +^.go-version" + +REPO="beats" WORKSPACE="$(pwd)" BIN="${WORKSPACE}/bin" HW_TYPE="$(uname -m)" PLATFORM_TYPE="$(uname)" -REPO="beats" TMP_FOLDER="tmp.${REPO}" +SNAPSHOT="true" +PYTEST_ADDOPTS="" +OSS_MODULE_PATTERN="^[a-z0-9]+beat\\/module\\/([^\\/]+)\\/.*" +XPACK_MODULE_PATTERN="^x-pack\\/[a-z0-9]+beat\\/module\\/([^\\/]+)\\/.*" + +SETUP_GVM_VERSION="v0.5.1" +ASDF_MAGE_VERSION="1.14.0" +SETUP_WIN_PYTHON_VERSION="3.11.0" + +# Docker & DockerHub +DOCKER_COMPOSE_VERSION="1.21.0" DOCKER_REGISTRY="docker.elastic.co" -export SETUP_GVM_VERSION +ONLY_DOCS=$(changeset_applies "$DOCS_CHANGESET") +PACKAGING_CHANGES=$(changeset_applies "$PACKAGING_CHANGESET") +GO_MOD_CHANGES=$(changeset_applies "^go.mod") + +export REPO export WORKSPACE export BIN export HW_TYPE export PLATFORM_TYPE -export REPO export TMP_FOLDER +export SNAPSHOT +export PYTEST_ADDOPTS +export OSS_MODULE_PATTERN +export XPACK_MODULE_PATTERN + +export SETUP_GVM_VERSION +export ASDF_MAGE_VERSION +export SETUP_WIN_PYTHON_VERSION + +export DOCKER_COMPOSE_VERSION export DOCKER_REGISTRY + +export ONLY_DOCS +export PACKAGING_CHANGES +export GO_MOD_CHANGES diff --git a/.buildkite/env-scripts/linux-env.sh b/.buildkite/env-scripts/linux-env.sh deleted file mode 100644 index 1365aaace4a..00000000000 --- a/.buildkite/env-scripts/linux-env.sh +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/env bash - -set -euo pipefail - -source .buildkite/env-scripts/util.sh - -DEBIAN_FRONTEND="noninteractive" - -sudo mkdir -p /etc/needrestart -echo "\$nrconf{restart} = 'a';" | sudo tee -a /etc/needrestart/needrestart.conf > /dev/null - -if [[ $PLATFORM_TYPE == "Linux" ]]; then - # Remove this code once beats specific agent is set up - if grep -q 'Ubuntu' /etc/*release; then - export DEBIAN_FRONTEND - - echo "--- Ubuntu - Installing libs" - sudo apt-get update - sudo apt-get install -y libsystemd-dev - sudo apt install -y python3-pip - sudo apt-get install -y python3-venv - fi - - # Remove this code once beats specific agent is set up - if grep -q 'Red Hat' /etc/*release; then - echo "--- RHL - Installing libs" - sudo yum update -y - sudo yum install -y systemd-devel - sudo yum install -y python3-pip - sudo yum install -y python3 - pip3 install virtualenv - fi -fi - -if [[ $PLATFORM_TYPE == Darwin* ]]; then - echo "--- Setting larger ulimit on MacOS" - # To bypass file descriptor errors like "Too many open files error" on MacOS - ulimit -Sn 50000 - echo "--- ULIMIT: $(ulimit -n)" -fi - -echo "--- Setting up environment" -add_bin_path -with_go -with_mage diff --git a/.buildkite/env-scripts/util.sh b/.buildkite/env-scripts/util.sh index 6a5c36bcd04..68fd08a75df 100644 --- a/.buildkite/env-scripts/util.sh +++ b/.buildkite/env-scripts/util.sh @@ -9,11 +9,11 @@ add_bin_path() { } with_go() { - local go_version="${GOLANG_VERSION}" + local go_version="${GO_VERSION}" echo "Setting up the Go environment..." create_bin check_platform_architecture - retry 5 curl -sL -o ${BIN}/gvm "https://github.com/andrewkroh/gvm/releases/download/${SETUP_GVM_VERSION}/gvm-${PLATFORM_TYPE}-${arch_type}" + retry_with_count 5 curl -sL -o ${BIN}/gvm "https://github.com/andrewkroh/gvm/releases/download/${SETUP_GVM_VERSION}/gvm-${PLATFORM_TYPE}-${arch_type}" export PATH="${PATH}:${BIN}" chmod +x ${BIN}/gvm eval "$(gvm "$go_version")" @@ -60,7 +60,7 @@ check_platform_architecture() { esac } -retry() { +retry_with_count() { local retries=$1 shift local count=0 @@ -89,16 +89,16 @@ are_files_changed() { fi } -cleanup() { - echo "Deleting temporary files..." - rm -rf ${BIN}/${TMP_FOLDER}.* - echo "Done." +changeset_applies() { + local changeset=$1 + if are_files_changed "$changeset"; then + echo true + else + echo false + fi } -unset_secrets () { - for var in $(printenv | sed 's;=.*;;' | sort); do - if [[ "$var" == *_SECRET || "$var" == *_TOKEN ]]; then - unset "$var" - fi - done +set_git_config() { + git config user.name "${GITHUB_USERNAME_SECRET}" + git config user.email "${GITHUB_EMAIL_SECRET}" } diff --git a/.buildkite/filebeat/filebeat-pipeline.yml b/.buildkite/filebeat/filebeat-pipeline.yml index e811d286953..ae22629e6ec 100644 --- a/.buildkite/filebeat/filebeat-pipeline.yml +++ b/.buildkite/filebeat/filebeat-pipeline.yml @@ -1,12 +1,17 @@ # yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json env: - IMAGE_UBUNTU_X86_64: "family/core-ubuntu-2204" - IMAGE_UBUNTU_ARM_64: "core-ubuntu-2004-aarch64" + BEATS_PROJECT_NAME: "filebeat" + IMAGE_UBUNTU_X86_64: "family/platform-ingest-beats-ubuntu-2204" + AWS_IMAGE_UBUNTU_ARM_64: "platform-ingest-beats-ubuntu-2204-aarch64" IMAGE_WIN_2016: "family/core-windows-2016" IMAGE_WIN_2019: "family/core-windows-2019" IMAGE_WIN_2022: "family/core-windows-2022" IMAGE_MACOS_X86_64: "generic-13-ventura-x64" + GCP_DEFAULT_MACHINE_TYPE: "c2d-highcpu-8" + GCP_HI_PERF_MACHINE_TYPE: "c2d-highcpu-16" + GCP_WIN_MACHINE_TYPE: "n2-standard-8" + AWS_ARM_INSTANCE_TYPE: "m6g.xlarge" steps: - group: "Filebeat Mandatory Testing" @@ -23,7 +28,7 @@ steps: agents: provider: "gcp" image: "${IMAGE_UBUNTU_X86_64}" - machineType: "c2-standard-16" + machineType: "${GCP_DEFAULT_MACHINE_TYPE}" artifact_paths: - "filebeat/build/*.xml" - "filebeat/build/*.json" @@ -37,7 +42,7 @@ steps: agents: provider: "gcp" image: "${IMAGE_UBUNTU_X86_64}" - machineType: "c2-standard-16" + machineType: "${GCP_HI_PERF_MACHINE_TYPE}" artifact_paths: - "filebeat/build/*.xml" - "filebeat/build/*.json" @@ -49,22 +54,22 @@ steps: - github_commit_status: context: "Filebeat: Python Integration Tests" agents: - provider: "gcp" + provider: gcp image: "${IMAGE_UBUNTU_X86_64}" - machineType: "c2-standard-16" + machineType: "${GCP_HI_PERF_MACHINE_TYPE}" artifact_paths: - "filebeat/build/*.xml" - "filebeat/build/*.json" - label: ":windows:-{{matrix.image}} Unit Tests" - command: ".buildkite/filebeat/scripts/unit-tests-win.ps1" + command: ".buildkite/scripts/win_unit_tests.ps1" notify: - github_commit_status: context: "Filebeat: windows/Unit Tests" agents: provider: "gcp" image: "{{matrix.image}}" - machine_type: "n2-standard-8" + machine_type: "${GCP_WIN_MACHINE_TYPE}" disk_size: 200 disk_type: "pd-ssd" matrix: @@ -91,8 +96,8 @@ steps: context: "Filebeat/Extended: Unit Tests ARM" agents: provider: "aws" - imagePrefix: "${IMAGE_UBUNTU_ARM_64}" - instanceType: "t4g.large" + imagePrefix: "${AWS_IMAGE_UBUNTU_ARM_64}" + instanceType: "${AWS_ARM_INSTANCE_TYPE}" artifact_paths: "filebeat/build/*.xml" - label: ":mac: MacOS Unit Tests" @@ -115,14 +120,14 @@ steps: steps: - label: ":windows: Win 2019 Unit Tests" key: "win-extended-2019" - command: ".buildkite/filebeat/scripts/unit-tests-win.ps1" + command: ".buildkite/scripts/win_unit_tests.ps1" notify: - github_commit_status: context: "Filebeat/Extended: Win-2019 Unit Tests" agents: provider: "gcp" image: "${IMAGE_WIN_2019}" - machine_type: "n2-standard-8" + machine_type: "${GCP_WIN_MACHINE_TYPE}" disk_size: 200 disk_type: "pd-ssd" artifact_paths: @@ -137,4 +142,4 @@ steps: steps: - label: Package pipeline - commands: ".buildkite/filebeat/scripts/package-step.sh" + commands: ".buildkite/scripts/packaging/package-step.sh" diff --git a/.buildkite/filebeat/scripts/integration-gotests.sh b/.buildkite/filebeat/scripts/integration-gotests.sh index d64ce7c98eb..6de39ff8817 100755 --- a/.buildkite/filebeat/scripts/integration-gotests.sh +++ b/.buildkite/filebeat/scripts/integration-gotests.sh @@ -2,8 +2,6 @@ set -euo pipefail -source .buildkite/env-scripts/linux-env.sh - echo "--- Executing Integration Tests" sudo chmod -R go-w filebeat/ diff --git a/.buildkite/filebeat/scripts/integration-pytests.sh b/.buildkite/filebeat/scripts/integration-pytests.sh index b51e8ae18a6..9aff8695c35 100755 --- a/.buildkite/filebeat/scripts/integration-pytests.sh +++ b/.buildkite/filebeat/scripts/integration-pytests.sh @@ -2,8 +2,6 @@ set -euo pipefail -source .buildkite/env-scripts/linux-env.sh - echo "--- Executing Integration Tests" sudo chmod -R go-w filebeat/ diff --git a/.buildkite/filebeat/scripts/package-step.sh b/.buildkite/filebeat/scripts/package-step.sh deleted file mode 100755 index f8fa02db81d..00000000000 --- a/.buildkite/filebeat/scripts/package-step.sh +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/env bash - -set -euo pipefail - -source .buildkite/env-scripts/util.sh - -changeset="^filebeat/ -^go.mod -^pytest.ini -^dev-tools/ -^libbeat/ -^testing/ -^\.buildkite/filebeat/" - -if are_files_changed "$changeset"; then - bk_pipeline=$(cat <<-YAML - steps: - - label: ":ubuntu: Packaging Linux X86" - key: "package-linux-x86" - env: - PLATFORMS: "+all linux/amd64 linux/arm64 windows/amd64 darwin/amd64 darwin/arm64" - command: - - ".buildkite/filebeat/scripts/package.sh" - notify: - - github_commit_status: - context: "Filebeat/Packaging: Linux X86" - agents: - provider: "gcp" - image: "${IMAGE_UBUNTU_X86_64}" - - - label: ":linux: Packaging Linux ARM" - key: "package-linux-arm" - env: - PLATFORMS: "linux/arm64" - PACKAGES: "docker" - command: - - ".buildkite/filebeat/scripts/package.sh" - notify: - - github_commit_status: - context: "Filebeat/Packaging: ARM" - agents: - provider: "aws" - imagePrefix: "${IMAGE_UBUNTU_ARM_64}" - instanceType: "t4g.large" -YAML -) - echo "${bk_pipeline}" | buildkite-agent pipeline upload -else - buildkite-agent annotate "No required files changed. Skipped packaging" --style 'warning' --context 'ctx-warning' - exit 0 -fi diff --git a/.buildkite/filebeat/scripts/package.sh b/.buildkite/filebeat/scripts/package.sh deleted file mode 100755 index 0bb03250348..00000000000 --- a/.buildkite/filebeat/scripts/package.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/usr/bin/env bash - -set -euo pipefail - -source .buildkite/env-scripts/linux-env.sh - -echo "--- Start Packaging" -cd filebeat -umask 0022 -mage package diff --git a/.buildkite/filebeat/scripts/unit-tests-win.ps1 b/.buildkite/filebeat/scripts/unit-tests-win.ps1 deleted file mode 100644 index 8990eb30a09..00000000000 --- a/.buildkite/filebeat/scripts/unit-tests-win.ps1 +++ /dev/null @@ -1,51 +0,0 @@ -$ErrorActionPreference = "Stop" # set -e -$GoVersion = $env:GOLANG_VERSION # If Choco doesn't have the version specified in .go-version file, should be changed manually - -# Forcing to checkout again all the files with a correct autocrlf. -# Doing this here because we cannot set git clone options before. -function fixCRLF() { - Write-Host "-- Fixing CRLF in git checkout --" - git config core.autocrlf false - git rm --quiet --cached -r . - git reset --quiet --hard -} - -function withGolang() { - Write-Host "-- Install golang $GoVersion --" - choco install golang -y --version $GoVersion - - $choco = Convert-Path "$((Get-Command choco).Path)\..\.." - Import-Module "$choco\helpers\chocolateyProfile.psm1" - refreshenv - go version - go env -} - -function installGoDependencies() { - $installPackages = @( - "github.com/magefile/mage" - "github.com/elastic/go-licenser" - "golang.org/x/tools/cmd/goimports" - "github.com/jstemmer/go-junit-report" - "github.com/tebeka/go2xunit" - ) - foreach ($pkg in $installPackages) { - go install "$pkg" - } -} - -fixCRLF - -$ErrorActionPreference = "Continue" # set +e - -Set-Location -Path filebeat -New-Item -ItemType Directory -Force -Path "build" -withGolang -installGoDependencies - -mage build unitTest - -$EXITCODE=$LASTEXITCODE -$ErrorActionPreference = "Stop" - -Exit $EXITCODE diff --git a/.buildkite/filebeat/scripts/unit-tests.sh b/.buildkite/filebeat/scripts/unit-tests.sh index 08ce9d4ea1c..2efb6b1ff8e 100755 --- a/.buildkite/filebeat/scripts/unit-tests.sh +++ b/.buildkite/filebeat/scripts/unit-tests.sh @@ -2,8 +2,6 @@ set -euo pipefail -source .buildkite/env-scripts/linux-env.sh - echo "--- Executing Unit Tests" sudo chmod -R go-w filebeat/ diff --git a/.buildkite/heartbeat/heartbeat-pipeline.yml b/.buildkite/heartbeat/heartbeat-pipeline.yml index bf645a2b295..2b5f6195f45 100644 --- a/.buildkite/heartbeat/heartbeat-pipeline.yml +++ b/.buildkite/heartbeat/heartbeat-pipeline.yml @@ -1,13 +1,18 @@ # yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json env: - IMAGE_UBUNTU_X86_64: "family/core-ubuntu-2204" - IMAGE_UBUNTU_ARM_64: "core-ubuntu-2004-aarch64" + BEATS_PROJECT_NAME: "heartbeat" + IMAGE_UBUNTU_X86_64: "family/platform-ingest-beats-ubuntu-2204" + AWS_IMAGE_UBUNTU_ARM_64: "platform-ingest-beats-ubuntu-2004-aarch64" IMAGE_WIN_2016: "family/core-windows-2016" IMAGE_WIN_2019: "family/core-windows-2019" IMAGE_WIN_2022: "family/core-windows-2022" - IMAGE_RHEL9: "family/core-rhel-9" + IMAGE_RHEL9: "family/platform-ingest-beats-rhel-9" IMAGE_MACOS_X86_64: "generic-13-ventura-x64" + GCP_DEFAULT_MACHINE_TYPE: "c2d-highcpu-8" + GCP_HI_PERF_MACHINE_TYPE: "c2d-highcpu-16" + GCP_WIN_MACHINE_TYPE: "n2-standard-8" + AWS_ARM_INSTANCE_TYPE: "t4g.xlarge" steps: - group: "Heartbeat Mandatory Testing" @@ -24,6 +29,7 @@ steps: agents: provider: "gcp" image: "{{matrix.image}}" + machineType: "${GCP_DEFAULT_MACHINE_TYPE}" matrix: setup: image: @@ -35,14 +41,14 @@ steps: - label: ":windows: Unit Tests / {{matrix.image}}" command: - - ".buildkite/heartbeat/scripts/unit-tests-win.ps1" + - ".buildkite/scripts/win_unit_tests.ps1" notify: - github_commit_status: context: "Heartbeat: windows/Unit Tests" agents: provider: "gcp" image: "{{matrix.image}}" - machine_type: "n2-standard-8" + machine_type: "${GCP_WIN_MACHINE_TYPE}" disk_type: "pd-ssd" matrix: setup: @@ -62,6 +68,7 @@ steps: agents: provider: "gcp" image: "${IMAGE_UBUNTU_X86_64}" + machineType: "${GCP_HI_PERF_MACHINE_TYPE}" artifact_paths: - "heartbeat/build/*.xml" - "heartbeat/build/*.json" @@ -75,6 +82,7 @@ steps: agents: provider: "gcp" image: "${IMAGE_UBUNTU_X86_64}" + machineType: "${GCP_HI_PERF_MACHINE_TYPE}" artifact_paths: - "heartbeat/build/*.xml" - "heartbeat/build/*.json" @@ -94,8 +102,8 @@ steps: context: "Heartbeat/Extended: Unit Tests ARM" agents: provider: "aws" - imagePrefix: "${IMAGE_UBUNTU_ARM_64}" - instanceType: "t4g.large" + imagePrefix: "${AWS_IMAGE_UBUNTU_ARM_64}" + instanceType: "${AWS_ARM_INSTANCE_TYPE}" artifact_paths: "heartbeat/build/*.xml" - label: ":mac: MacOS Unit Tests" @@ -118,14 +126,14 @@ steps: steps: - label: ":windows: Win 2019 Unit Tests" key: "win-extended-2019" - command: ".buildkite/heartbeat/scripts/unit-tests-win.ps1" + command: ".buildkite/scripts/win_unit_tests.ps1" notify: - github_commit_status: context: "Heartbeat/Extended: Win-2019 Unit Tests" agents: provider: "gcp" image: "${IMAGE_WIN_2019}" - machine_type: "n2-standard-8" + machine_type: "${GCP_WIN_MACHINE_TYPE}" disk_type: "pd-ssd" artifact_paths: - "heartbeat/build/*.xml" @@ -139,4 +147,4 @@ steps: steps: - label: Package pipeline - commands: ".buildkite/heartbeat/scripts/package-step.sh" + commands: ".buildkite/scripts/packaging/package-step.sh" diff --git a/.buildkite/heartbeat/scripts/integration-gotests.sh b/.buildkite/heartbeat/scripts/integration-gotests.sh index 8eab0e8b5d8..c50dbf45347 100755 --- a/.buildkite/heartbeat/scripts/integration-gotests.sh +++ b/.buildkite/heartbeat/scripts/integration-gotests.sh @@ -2,9 +2,6 @@ set -euo pipefail -# Remove when custom image is set up -source .buildkite/env-scripts/linux-env.sh - echo "--- Executing Integration Tests" # Remove when custom image is set up sudo chmod -R go-w heartbeat/ diff --git a/.buildkite/heartbeat/scripts/integration-pytests.sh b/.buildkite/heartbeat/scripts/integration-pytests.sh index 729df5ae6f6..5875b5460ed 100755 --- a/.buildkite/heartbeat/scripts/integration-pytests.sh +++ b/.buildkite/heartbeat/scripts/integration-pytests.sh @@ -2,9 +2,6 @@ set -euo pipefail -# Remove when custom image is set up -source .buildkite/env-scripts/linux-env.sh - echo "--- Executing Integration Tests" # Remove when custom image is set up sudo chmod -R go-w heartbeat/ diff --git a/.buildkite/heartbeat/scripts/package-step.sh b/.buildkite/heartbeat/scripts/package-step.sh deleted file mode 100755 index 03790edfa5f..00000000000 --- a/.buildkite/heartbeat/scripts/package-step.sh +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/env bash - -set -euo pipefail - -source .buildkite/env-scripts/util.sh - -changeset="^heartbeat/ -^go.mod -^pytest.ini -^dev-tools/ -^libbeat/ -^testing/ -^\.buildkite/heartbeat/" - -if are_files_changed "$changeset"; then - bk_pipeline=$(cat <<-YAML - steps: - - label: ":ubuntu: Packaging Linux X86" - key: "package-linux-x86" - env: - PLATFORMS: "+all linux/amd64 linux/arm64 windows/amd64 darwin/amd64 darwin/arm64" - command: - - ".buildkite/heartbeat/scripts/package.sh" - notify: - - github_commit_status: - context: "heartbeat/Packaging: Linux X86" - agents: - provider: "gcp" - image: "${IMAGE_UBUNTU_X86_64}" - - - label: ":linux: Packaging Linux ARM" - key: "package-linux-arm" - env: - PLATFORMS: "linux/arm64" - PACKAGES: "docker" - command: - - ".buildkite/heartbeat/scripts/package.sh" - notify: - - github_commit_status: - context: "heartbeat/Packaging: ARM" - agents: - provider: "aws" - imagePrefix: "${IMAGE_UBUNTU_ARM_64}" - instanceType: "t4g.large" -YAML -) - echo "${bk_pipeline}" | buildkite-agent pipeline upload -else - buildkite-agent annotate "No required files changed. Skipped packaging" --style 'warning' --context 'ctx-warning' - exit 0 -fi diff --git a/.buildkite/heartbeat/scripts/package.sh b/.buildkite/heartbeat/scripts/package.sh deleted file mode 100755 index 7f51a6b5ca1..00000000000 --- a/.buildkite/heartbeat/scripts/package.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/usr/bin/env bash - -set -euo pipefail - -source .buildkite/env-scripts/linux-env.sh - -echo "--- Docker Version: $(docker --version)" - -echo "--- Start Packaging" -cd heartbeat -umask 0022 -mage package - diff --git a/.buildkite/heartbeat/scripts/unit-tests-win.ps1 b/.buildkite/heartbeat/scripts/unit-tests-win.ps1 deleted file mode 100644 index 17282813e13..00000000000 --- a/.buildkite/heartbeat/scripts/unit-tests-win.ps1 +++ /dev/null @@ -1,51 +0,0 @@ -$ErrorActionPreference = "Stop" # set -e -$GoVersion = $env:GOLANG_VERSION # If Choco doesn't have the version specified in .go-version file, should be changed manually - -# Forcing to checkout again all the files with a correct autocrlf. -# Doing this here because we cannot set git clone options before. -function fixCRLF() { - Write-Host "--- Fixing CRLF in git checkout --" - git config core.autocrlf false - git rm --quiet --cached -r . - git reset --quiet --hard -} - -function withGolang() { - Write-Host "--- Install golang $GoVersion --" - choco install golang -y --version $GoVersion - - $choco = Convert-Path "$((Get-Command choco).Path)\..\.." - Import-Module "$choco\helpers\chocolateyProfile.psm1" - refreshenv - go version - go env -} - -function installGoDependencies() { - $installPackages = @( - "github.com/magefile/mage" - "github.com/elastic/go-licenser" - "golang.org/x/tools/cmd/goimports" - "github.com/jstemmer/go-junit-report" - "github.com/tebeka/go2xunit" - ) - foreach ($pkg in $installPackages) { - go install "$pkg" - } -} - -fixCRLF - -$ErrorActionPreference = "Continue" # set +e - -Set-Location -Path heartbeat -New-Item -ItemType Directory -Force -Path "build" -withGolang -installGoDependencies - -mage build unitTest - -$EXITCODE=$LASTEXITCODE -$ErrorActionPreference = "Stop" - -Exit $EXITCODE diff --git a/.buildkite/heartbeat/scripts/unit-tests.sh b/.buildkite/heartbeat/scripts/unit-tests.sh index 4b746da2d57..1d8de945788 100755 --- a/.buildkite/heartbeat/scripts/unit-tests.sh +++ b/.buildkite/heartbeat/scripts/unit-tests.sh @@ -2,9 +2,6 @@ set -euo pipefail -# Remove when custom image is set up -source .buildkite/env-scripts/linux-env.sh - echo "--- Running Unit Tests" # Remove when custom image is set up sudo chmod -R go-w heartbeat/ diff --git a/.buildkite/hooks/post-checkout b/.buildkite/hooks/post-checkout index b6cc7ad60bd..43881f6e2d8 100644 --- a/.buildkite/hooks/post-checkout +++ b/.buildkite/hooks/post-checkout @@ -8,7 +8,7 @@ checkout_merge() { local merge_branch=$3 if [[ -z "${target_branch}" ]]; then - echo "No pull request target branch" + echo "--- No pull request target branch" exit 1 fi @@ -24,9 +24,9 @@ checkout_merge() { git config user.name "github-merged-pr-post-checkout" git config user.email "auto-merge@buildkite" - git merge --no-edit "${BUILDKITE_COMMIT}" || { + git merge --no-edit "${pr_commit}" || { local merge_result=$? - echo "Merge failed: ${merge_result}" + echo "--- Merge failed: ${merge_result}" git merge --abort exit ${merge_result} } @@ -35,7 +35,7 @@ checkout_merge() { pull_request="${BUILDKITE_PULL_REQUEST:-false}" if [[ "${pull_request}" == "false" ]]; then - echo "Not a pull request, skipping" + echo "--- Not a pull request, skipping" exit 0 fi @@ -46,7 +46,7 @@ MERGE_BRANCH="pr_merge_${PR_ID}" checkout_merge "${TARGET_BRANCH}" "${PR_COMMIT}" "${MERGE_BRANCH}" -echo "Commit information" +echo "--- Commit information" git --no-pager log --format=%B -n 1 # Ensure buildkite groups are rendered diff --git a/.buildkite/hooks/pre-command b/.buildkite/hooks/pre-command index 0ac7c51099c..2c69f322ad8 100644 --- a/.buildkite/hooks/pre-command +++ b/.buildkite/hooks/pre-command @@ -2,20 +2,88 @@ set -euo pipefail -if [[ "$BUILDKITE_PIPELINE_SLUG" == "filebeat" || "$BUILDKITE_PIPELINE_SLUG" == "auditbeat" || "$BUILDKITE_PIPELINE_SLUG" == "heartbeat" ]]; then +# Secrets must be redacted +# https://buildkite.com/docs/pipelines/managing-log-output#redacted-environment-variables +AWS_SERVICE_ACCOUNT_SECRET_PATH="kv/ci-shared/platform-ingest/aws_account_auth" +PRIVATE_CI_GCS_CREDENTIALS_PATH="kv/ci-shared/platform-ingest/gcp-platform-ingest-ci-service-account" +DOCKER_REGISTRY_SECRET_PATH="kv/ci-shared/platform-ingest/docker_registry_prod" +PRIVATE_CI_GCS_CREDENTIALS_PATH="kv/ci-shared/platform-ingest/private_ci_artifacts_gcs_credentials" +GITHUB_TOKEN_VAULT_PATH="kv/ci-shared/platform-ingest/github_token" + +retry() { + local retries=$1 + shift + local count=0 + until "$@"; do + exit=$? + wait=$((2 ** count)) + count=$((count + 1)) + if [ $count -lt "$retries" ]; then + >&2 echo "Retry $count/$retries exited $exit, retrying in $wait seconds..." + sleep $wait + else + >&2 echo "Retry $count/$retries exited $exit, no more retries left." + return $exit + fi + done + return 0 +} + + +if [[ "$BUILDKITE_PIPELINE_SLUG" == "beats" || "$BUILDKITE_PIPELINE_SLUG" == "filebeat" || "$BUILDKITE_PIPELINE_SLUG" == "auditbeat" || "$BUILDKITE_PIPELINE_SLUG" == "heartbeat" || "$BUILDKITE_PIPELINE_SLUG" == "deploy-k8s" ]]; then source .buildkite/env-scripts/env.sh source .buildkite/env-scripts/util.sh - source .buildkite/env-scripts/win-env.sh - if [[ -z "${GOLANG_VERSION-""}" ]]; then - export GOLANG_VERSION=$(cat "${WORKSPACE}/.go-version") + if [[ -z "${GO_VERSION-""}" ]]; then + export GO_VERSION=$(cat "${WORKSPACE}/.go-version") + fi + + if [[ "$BUILDKITE_STEP_KEY" == macos* ]]; then + ulimit -Sn 30000 + + echo "--- Setting up environment" + add_bin_path + with_go + with_mage + fi + + if [[ "$BUILDKITE_STEP_KEY" == package* ]]; then + export DOCKER_USERNAME_SECRET=$(retry_with_count 5 vault kv get -field user "${DOCKER_REGISTRY_SECRET_PATH}") + export DOCKER_PASSWORD_SECRET=$(retry_with_count 5 vault kv get -field password "${DOCKER_REGISTRY_SECRET_PATH}") + export GITHUB_TOKEN_SECRET=$(retry_with_count 5 vault kv get -field token ${GITHUB_TOKEN_VAULT_PATH}) + + docker login -u "${DOCKER_USERNAME_SECRET}" -p "${DOCKER_PASSWORD_SECRET}" "${DOCKER_REGISTRY}" 2>/dev/null + + github_username=$(retry_with_count 5 vault kv get -field username ${GITHUB_TOKEN_VAULT_PATH}) + github_email=$(retry_with_count 5 vault kv get -field email ${GITHUB_TOKEN_VAULT_PATH}) + + git config user.name "$github_username" + git config user.email "$github_email" fi fi -if [[ "$BUILDKITE_PIPELINE_SLUG" == "beats-metricbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-libbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-packetbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-winlogbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-libbeat" ]]; then - source .buildkite/scripts/setenv.sh +if [[ "$BUILDKITE_PIPELINE_SLUG" == "beats-metricbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-libbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-packetbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-winlogbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-libbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-metricbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-packetbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-winlogbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-dockerlogbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-auditbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-filebeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-heartbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-osquerybeat" ]]; then if [[ "${BUILDKITE_COMMAND}" =~ ^buildkite-agent ]]; then echo "Skipped pre-command when running the Upload pipeline" exit 0 fi + source .buildkite/scripts/setenv.sh +fi + +if [[ "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-metricbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-filebeat" ]]; then + if [[ "$BUILDKITE_STEP_KEY" == "extended-cloud-test" ]]; then + BEATS_AWS_SECRET_KEY=$(retry 5 vault kv get -field secret_key ${AWS_SERVICE_ACCOUNT_SECRET_PATH}) + export BEATS_AWS_SECRET_KEY + BEATS_AWS_ACCESS_KEY=$(retry 5 vault kv get -field access_key ${AWS_SERVICE_ACCOUNT_SECRET_PATH}) + export BEATS_AWS_ACCESS_KEY + fi +fi + + + +if [[ "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-packetbeat" ]]; then + if [[ "$BUILDKITE_STEP_KEY" == "extended-win-10-system-tests" || "$BUILDKITE_STEP_KEY" == "mandatory-win-2022-system-tests" ]]; then + PRIVATE_CI_GCS_CREDENTIALS_SECRET=$(retry 5 vault kv get -field plaintext -format=json ${PRIVATE_CI_GCS_CREDENTIALS_PATH}) + export PRIVATE_CI_GCS_CREDENTIALS_SECRET + fi fi diff --git a/.buildkite/hooks/pre-exit b/.buildkite/hooks/pre-exit new file mode 100644 index 00000000000..d1ff6e0ac1c --- /dev/null +++ b/.buildkite/hooks/pre-exit @@ -0,0 +1,15 @@ +#!/usr/bin/env bash + +set -euo pipefail + +source .buildkite/hooks/scripts/util.sh + +if [[ "$BUILDKITE_PIPELINE_SLUG" == "filebeat" || "$BUILDKITE_PIPELINE_SLUG" == "auditbeat" || "$BUILDKITE_PIPELINE_SLUG" == "heartbeat" || "$BUILDKITE_PIPELINE_SLUG" == "deploy-k8s" ]]; then + if [[ "$BUILDKITE_STEP_KEY" == package* ]]; then + docker logout "${DOCKER_REGISTRY}" + fi + + # Ensure that any temporal files created during any step are removed + cleanup + unset_secrets +fi diff --git a/.buildkite/hooks/scripts/util.sh b/.buildkite/hooks/scripts/util.sh new file mode 100755 index 00000000000..07ab6cf4c9a --- /dev/null +++ b/.buildkite/hooks/scripts/util.sh @@ -0,0 +1,33 @@ +#!/usr/bin/env bash + +set -euo pipefail + +unset_secrets () { + for var in $(printenv | sed 's;=.*;;' | sort); do + if [[ "$var" == *_SECRET || "$var" == *_TOKEN ]]; then + unset "$var" + fi + done +} + +google_cloud_logout_active_account() { + local active_account=$(gcloud auth list --filter=status:ACTIVE --format="value(account)" 2>/dev/null) + if [[ -n "$active_account" && -n "${GOOGLE_APPLICATION_CREDENTIALS+x}" ]]; then + echo "Logging out from GCP for active account" + gcloud auth revoke $active_account > /dev/null 2>&1 + else + echo "No active GCP accounts found." + fi + if [ -n "${GOOGLE_APPLICATION_CREDENTIALS+x}" ]; then + unset GOOGLE_APPLICATION_CREDENTIALS + cleanup + fi +} + +cleanup() { + echo "Deleting temporary files..." + if [[ -e "${BIN}/${TMP_FOLDER}" ]]; then + rm -rf "${BIN}/${TMP_FOLDER}.*" + fi + echo "Done." +} diff --git a/.buildkite/pipeline.py b/.buildkite/pipeline.py new file mode 100755 index 00000000000..95530b89628 --- /dev/null +++ b/.buildkite/pipeline.py @@ -0,0 +1,431 @@ +#!/usr/bin/env python3 +from typing import Any +from ruamel.yaml import YAML +import os +import subprocess +import fnmatch +import sys + + +class Agent: + """Buildkite Agent object""" + + def __init__(self, image: str, provider: str): + self.image: str = image + self.provider: str = provider + + def create_entity(self): + raise NotImplementedError("Not implemented yet") + + +class AWSAgent(Agent): + """AWS Agent object""" + + def __init__(self, image: str, instance_type: str = None): + super().__init__(image, "aws") + if instance_type is None: + self.instance_type: str = "t4g.large" + else: + self.instance_type = instance_type + + def create_entity(self) -> dict[str, str]: + return { + "provider": self.provider, + "imagePrefix": self.image, + "instanceType": self.instance_type, + } + + +class GCPAgent(Agent): + """GCP Agent object""" + + def __init__(self, image: str): + super().__init__(image, "gcp") + + def create_entity(self) -> dict[str, str]: + return { + "provider": self.provider, + "image": self.image, + } + + +class OrkaAgent(Agent): + """Orka Agent object""" + + def __init__(self, image: str): + super().__init__(image, "orka") + + def create_entity(self) -> dict[str, str]: + return { + "provider": self.provider, + "imagePrefix": self.image, + } + + +class Step: + """Buildkite Step object""" + + def __init__( + self, + name: str, + project: str, + category: str, + agent: Agent, + definition: dict[str, Any], + ): + self.command = definition.get("command", "") + self.env = definition.get("env", {}) + self.agent: Agent = agent + self.name: str = name + self.project: str = project + self.category: str = category + self.comment = "/test " + self.project + " " + self.name + self.label = self.name + + def __lt__(self, other): + return self.name < other.name + + def step_command(self) -> list[str]: + commands = [ + f"cd {self.project}", + self.command, + ] + return commands + + def create_entity(self) -> dict[str, Any]: + data = { + "label": f"{self.project} {self.name}", + "command": self.step_command(), + "notify": [ + { + "github_commit_status": { + "context": f"{self.project.title()}: {self.name}", + } + } + ], + "agents": self.agent.create_entity(), + "artifact_paths": [ + f"{self.project}/build/*.xml", + f"{self.project}/build/*.json", + ], + } + if self.env: + data["env"] = self.env + return data + + +class Group: + """Buildkite Group object""" + + def __init__(self, project: str, category: str, steps: list[Step]): + self.project: str = project + self.category: str = category + self.steps: list[Step] = steps + + def __lt__(self, other): + return self.project < other.project + + def create_entity(self) -> dict[str, Any]: + if len(self.steps) == 0: + return {} + + data = { + "group": f"{self.project} {self.category}", + "key": f"{self.project}-{self.category}", + "steps": [step.create_entity() for step in self.steps], + } + + return data + + +class GitHelper: + def __init__(self): + self.files: list[str] = [] + + def get_pr_changeset(self) -> list[str]: + base_branch = os.getenv("BUILDKITE_PULL_REQUEST_BASE_BRANCH", "main") + diff_command = ["git", "diff", "--name-only", "{}...HEAD".format(base_branch)] + result = subprocess.run(diff_command, stdout=subprocess.PIPE) + if result.returncode == 0: + self.files = result.stdout.decode().splitlines() + else: + print(f"Detecting changed files failed, exiting [{result.returncode}]") + exit(result.returncode) + return self.files + + +class BuildkitePipeline: + """Buildkite Pipeline object""" + + def __init__(self, groups: list[Group] = None): + if groups is None: + groups = [] + self.groups: list[Group] = groups + + def create_entity(self): + data = {"steps": [group.create_entity() for group in self.groups]} + return data + + +def is_pr() -> bool: + return os.getenv("BUILDKITE_PULL_REQUEST") != "false" + + +def group_comment(group: Group) -> bool: + comment = os.getenv("GITHUB_PR_TRIGGER_COMMENT") + if comment: + # the comment should be a subset of the values + # in .buildkite/pull-requests.json + # TODO: change /test + comment_prefix = "buildkite test" + if group.category == "mandatory": + # i.e: /test filebeat + return comment_prefix + " " + group.project in comment + else: + # i.e: test filebeat extended + return ( + comment_prefix + " " + group.project + " " + group.category in comment + ) + + +def filter_files_by_glob(files, patterns: list[str]): + for pattern in patterns: + # TODO: Support glob extended patterns: ^ and etc. + # Now it supports only linux glob syntax + if fnmatch.filter(files, pattern): + return True + return False + + +def is_in_pr_changeset( + project_changeset_filters: list[str], changeset: list[str] +) -> bool: + return filter_files_by_glob(changeset, project_changeset_filters) + + +def is_group_enabled( + group: Group, changeset_filters: list[str], changeset: list[str] +) -> bool: + if not is_pr(): + return True + + if ( + is_pr() + and is_in_pr_changeset(changeset_filters, changeset) + and group.category.startswith("mandatory") + ): + return True + + return group_comment(group) + + +def fetch_stage(name: str, stage, project: str, category: str) -> Step: + """Create a step given the yaml object.""" + + agent: Agent = None + if ("provider" not in stage) or stage["provider"] == "gcp": + agent = GCPAgent(image=stage["platform"]) + elif stage["provider"] == "aws": + agent = AWSAgent( + image=stage["platform"], + ) + elif stage["provider"] == "orka": + agent = OrkaAgent(image=stage["platform"]) + + return Step( + category=category, name=name, agent=agent, project=project, definition=stage + ) + + +def fetch_group(stages, project: str, category: str) -> Group: + """Create a group given the yaml object.""" + + steps = [] + + for stage in stages: + steps.append( + fetch_stage( + category=category, name=stage, project=project, stage=stages[stage] + ) + ) + + return Group(project=project, category=category, steps=steps) + + +def fetch_pr_pipeline(yaml: YAML) -> list[Group]: + git_helper = GitHelper() + changeset = git_helper.get_pr_changeset() + groups: list[Group] = [] + doc = pipeline_loader(yaml) + for project in doc["projects"]: + project_file = os.path.join(project, "buildkite.yml") + if not os.path.isfile(project_file): + continue + project_obj = project_loader(yaml, project_file) + group = fetch_group( + stages=project_obj["stages"]["mandatory"], + project=project, + category="mandatory", + ) + + if is_group_enabled(group, project_obj["when"]["changeset"], changeset): + groups.append(group) + + group = fetch_group( + stages=project_obj["stages"]["extended"], + project=project, + category="extended", + ) + + if is_group_enabled(group, project_obj["when"]["changeset"], changeset): + groups.append(group) + + # TODO: improve this merging lists + all_groups = [] + for group in groups: + all_groups.append(group) + + return all_groups + + +class PRComment: + command: str + group: str + project: str + step: str + + def __init__(self, comment: str): + words = comment.split() + self.command = words.pop(0) if words else "" + self.project = words.pop(0) if words else "" + self.group = words.pop(0) if words else "" + self.step = words.pop(0) if words else "" + + +# A comment like "/test filebeat extended" +# Returns a group of steps corresponding to the comment +def fetch_pr_comment_group_pipeline(comment: PRComment, yaml: YAML) -> list[Group]: + groups = [] + doc = pipeline_loader(yaml) + if comment.project in doc["projects"]: + project_file = os.path.join(comment.project, "buildkite.yml") + if not os.path.isfile(project_file): + raise FileNotFoundError( + "buildkite.yml not found in: " + "{}".format(comment.project) + ) + project_obj = project_loader(yaml, project_file) + if not project_obj["stages"][comment.group]: + raise ValueError( + "Group not found in {} buildkite.yml: {}".format( + comment.project, comment.group + ) + ) + + group = fetch_group( + stages=project_obj["stages"][comment.group], + project=comment.project, + category="mandatory", + ) + groups.append(group) + + return groups + + +# A comment like "/test filebeat extended unitTest-macos" +def fetch_pr_comment_step_pipeline(comment: PRComment, yaml: YAML) -> list[Group]: + groups = [] + doc = pipeline_loader(yaml) + if comment.project in doc["projects"]: + project_file = os.path.join(comment.project, "buildkite.yml") + if not os.path.isfile(project_file): + raise FileNotFoundError( + "buildkite.yml not found in: " + "{}".format(comment.project) + ) + project_obj = project_loader(yaml, project_file) + if not project_obj["stages"][comment.group]: + raise ValueError( + "Group not found in {} buildkite.yml: {}".format( + comment.project, comment.group + ) + ) + group = fetch_group( + stages=project_obj["stages"][comment.group], + project=comment.project, + category="mandatory", + ) + + filtered_steps = list( + filter(lambda step: step.name == comment.step, group.steps) + ) + + if not filtered_steps: + raise ValueError( + "Step {} not found in {} buildkite.yml".format( + comment.step, comment.project + ) + ) + group.steps = filtered_steps + groups.append(group) + + return groups + + +def pr_comment_pipeline(pr_comment: PRComment, yaml: YAML) -> list[Group]: + + if pr_comment.command == "/test": + + # A comment like "/test" for a PR + # We rerun the PR pipeline + if not pr_comment.group: + return fetch_pr_pipeline(yaml) + + # A comment like "/test filebeat" + # We don't know what group to run hence raise an error + if pr_comment.project and not pr_comment.group: + raise ValueError( + "Specify group or/and step for {}".format(pr_comment.project) + ) + + # A comment like "/test filebeat extended" + # We rerun the filebeat extended pipeline for the PR + if pr_comment.group and not pr_comment.step: + return fetch_pr_comment_group_pipeline(pr_comment, yaml) + + # A comment like "/test filebeat extended unitTest-macos" + if pr_comment.step: + return fetch_pr_comment_step_pipeline(pr_comment, yaml) + + +# TODO: validate unique stages! +def main() -> None: + yaml = YAML(typ="safe") + all_groups = [] + if is_pr(): + if os.getenv("GITHUB_PR_TRIGGER_COMMENT"): + comment = PRComment(os.getenv("GITHUB_PR_TRIGGER_COMMENT")) + all_groups = pr_comment_pipeline(comment, yaml) + else: + all_groups = fetch_pr_pipeline(yaml) + # TODO what to load when not in PR + + # Produce the dynamic pipeline + print( + "# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json" + ) + yaml.dump(BuildkitePipeline(all_groups).create_entity(), sys.stdout) + + +def pipeline_loader(yaml: YAML = YAML(typ="safe")): + with open(".buildkite/buildkite.yml", "r", encoding="utf8") as file: + return yaml.load(file) + + +def project_loader(yaml: YAML = YAML(typ="safe"), project_file: str = ""): + with open(project_file, "r", encoding="utf8") as project_fp: + return yaml.load(project_fp) + + +if __name__ == "__main__": + + # pylint: disable=E1120 + main() diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml index 34321b61161..84f725981fe 100644 --- a/.buildkite/pipeline.yml +++ b/.buildkite/pipeline.yml @@ -1,5 +1,7 @@ # yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json +env: + ASDF_MAGE_VERSION: '1.15.0' steps: - - label: "Example test" - command: echo "Hello!" + - label: "Generate dynamic pipeline" + command: ".buildkite/scripts/generate_pipeline.sh" diff --git a/.buildkite/pull-requests.json b/.buildkite/pull-requests.json index 8018411a743..8219bb2c9e9 100644 --- a/.buildkite/pull-requests.json +++ b/.buildkite/pull-requests.json @@ -9,8 +9,8 @@ "set_commit_status": true, "build_on_commit": true, "build_on_comment": true, - "trigger_comment_regex": "^(?:(?:buildkite\\W+)?(?:build|test)\\W+(?:this|it))|^/test$", - "always_trigger_comment_regex": "^(?:(?:buildkite\\W+)?(?:build|test)\\W+(?:this|it))|^/test$", + "trigger_comment_regex": "^/test .*$", + "always_trigger_comment_regex": "^/test .*$", "skip_ci_labels": [ ], "skip_target_branches": [ ], "skip_ci_on_only_changed": [ ], @@ -223,6 +223,86 @@ "skip_target_branches": [ ], "skip_ci_on_only_changed": [ ], "always_require_ci_on_changed": ["^x-pack/metricbeat/.*", "^.buildkite/.*", "^go.mod", "^pytest.ini", "^dev-tools/.*", "^libbeat/.*", "^testing/.*", "^x-pack/libbeat/.*"] + }, + { + "enabled": true, + "pipelineSlug": "beats-xpack-auditbeat", + "allow_org_users": true, + "allowed_repo_permissions": ["admin", "write"], + "allowed_list": [ ], + "set_commit_status": true, + "build_on_commit": true, + "build_on_comment": true, + "trigger_comment_regex": "^/test x-pack/auditbeat$", + "always_trigger_comment_regex": "^/test x-pack/auditbeat$", + "skip_ci_labels": [ ], + "skip_target_branches": [ ], + "skip_ci_on_only_changed": [ ], + "always_require_ci_on_changed": ["^x-pack/auditbeat/.*", "^.buildkite/.*", "^go.mod", "^pytest.ini", "^dev-tools/.*", "^libbeat/.*", "^testing/.*", "^x-pack/libbeat/.*"] + }, + { + "enabled": true, + "pipelineSlug": "beats-xpack-dockerlogbeat", + "allow_org_users": true, + "allowed_repo_permissions": ["admin", "write"], + "allowed_list": [ ], + "set_commit_status": true, + "build_on_commit": true, + "build_on_comment": true, + "trigger_comment_regex": "^/test x-pack/dockerlogbeat$", + "always_trigger_comment_regex": "^/test x-pack/dockerlogbeat$", + "skip_ci_labels": [ ], + "skip_target_branches": [ ], + "skip_ci_on_only_changed": [ ], + "always_require_ci_on_changed": ["^x-pack/dockerlogbeat/.*", "^.buildkite/.*", "^go.mod", "^pytest.ini", "^dev-tools/.*", "^libbeat/.*", "^testing/.*", "^x-pack/libbeat/.*"] + }, + { + "enabled": true, + "pipelineSlug": "beats-xpack-filebeat", + "allow_org_users": true, + "allowed_repo_permissions": ["admin", "write"], + "allowed_list": [ ], + "set_commit_status": true, + "build_on_commit": true, + "build_on_comment": true, + "trigger_comment_regex": "^/test x-pack/filebeat$", + "always_trigger_comment_regex": "^/test x-pack/filebeat$", + "skip_ci_labels": [ ], + "skip_target_branches": [ ], + "skip_ci_on_only_changed": [ ], + "always_require_ci_on_changed": ["^x-pack/filebeat/.*", "^.buildkite/.*", "^go.mod", "^pytest.ini", "^dev-tools/.*", "^libbeat/.*", "^testing/.*", "^x-pack/libbeat/.*"] + }, + { + "enabled": true, + "pipelineSlug": "beats-xpack-heartbeat", + "allow_org_users": true, + "allowed_repo_permissions": ["admin", "write"], + "allowed_list": [ ], + "set_commit_status": true, + "build_on_commit": true, + "build_on_comment": true, + "trigger_comment_regex": "^/test x-pack/heartbeat$", + "always_trigger_comment_regex": "^/test x-pack/heartbeat$", + "skip_ci_labels": [ ], + "skip_target_branches": [ ], + "skip_ci_on_only_changed": [ ], + "always_require_ci_on_changed": ["^x-pack/heartbeat/.*", "^.buildkite/.*", "^go.mod", "^pytest.ini", "^dev-tools/.*", "^libbeat/.*", "^testing/.*", "^x-pack/libbeat/.*"] + }, + { + "enabled": true, + "pipelineSlug": "beats-xpack-osquerybeat", + "allow_org_users": true, + "allowed_repo_permissions": ["admin", "write"], + "allowed_list": [ ], + "set_commit_status": true, + "build_on_commit": true, + "build_on_comment": true, + "trigger_comment_regex": "^/test x-pack/osquerybeat$", + "always_trigger_comment_regex": "^/test x-pack/osquerybeat$", + "skip_ci_labels": [ ], + "skip_target_branches": [ ], + "skip_ci_on_only_changed": [ ], + "always_require_ci_on_changed": ["^x-pack/osquerybeat/.*", "^.buildkite/.*", "^go.mod", "^pytest.ini", "^dev-tools/.*", "^libbeat/.*", "^testing/.*", "^x-pack/libbeat/.*"] } ] } diff --git a/.buildkite/pytest.ini b/.buildkite/pytest.ini new file mode 100644 index 00000000000..3eff7473d9f --- /dev/null +++ b/.buildkite/pytest.ini @@ -0,0 +1,11 @@ +[pytest] +junit_family=xunit1 + +addopts = --strict-markers +markers = + load: Load tests + tag(name): Tag tests with Go-like semantics + +# Ignore setup and teardown for the timeout +#timeout_func_only = True + diff --git a/.buildkite/scripts/cloud_tests.sh b/.buildkite/scripts/cloud_tests.sh new file mode 100755 index 00000000000..d96baf670a9 --- /dev/null +++ b/.buildkite/scripts/cloud_tests.sh @@ -0,0 +1,25 @@ +#!/usr/bin/env bash + +# What Terraform Module will run +if [[ "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-metricbeat" ]]; then + export MODULE_DIR="x-pack/metricbeat/module/aws" +elif [[ "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-filebeat" ]]; then + export MODULE_DIR="x-pack/filebeat/input/awss3/_meta/terraform" +fi + +source .buildkite/scripts/install_tools.sh + +set -euo pipefail + +trap 'teardown || true; unset_secrets' EXIT + +# Prepare the cloud resources using Terraform +startCloudTestEnv "${MODULE_DIR}" + +# Run tests +echo "--- Run Cloud Tests for $BEATS_PROJECT_NAME" +pushd "${BEATS_PROJECT_NAME}" > /dev/null + +mage build test + +popd > /dev/null diff --git a/.buildkite/scripts/common.sh b/.buildkite/scripts/common.sh index b797ec718aa..201fa8a42c7 100755 --- a/.buildkite/scripts/common.sh +++ b/.buildkite/scripts/common.sh @@ -9,14 +9,43 @@ arch_type="$(uname -m)" GITHUB_PR_TRIGGER_COMMENT=${GITHUB_PR_TRIGGER_COMMENT:-""} GITHUB_PR_LABELS=${GITHUB_PR_LABELS:-""} ONLY_DOCS=${ONLY_DOCS:-"true"} +OSS_MODULE_PATTERN="^[a-z0-9]+beat\\/module\\/([^\\/]+)\\/.*" +XPACK_MODULE_PATTERN="^x-pack\\/[a-z0-9]+beat\\/module\\/([^\\/]+)\\/.*" +# define if needed run the whole pipeline for the particular beat [ -z "${run_libbeat+x}" ] && run_libbeat="$(buildkite-agent meta-data get run_libbeat --default "false")" [ -z "${run_metricbeat+x}" ] && run_metricbeat="$(buildkite-agent meta-data get run_metricbeat --default "false")" [ -z "${run_packetbeat+x}" ] && run_packetbeat="$(buildkite-agent meta-data get run_packetbeat --default "false")" [ -z "${run_winlogbeat+x}" ] && run_winlogbeat="$(buildkite-agent meta-data get run_winlogbeat --default "false")" +[ -z "${run_xpack_libbeat+x}" ] && run_xpack_libbeat="$(buildkite-agent meta-data get run_xpack_libbeat --default "false")" +[ -z "${run_xpack_metricbeat+x}" ] && run_xpack_metricbeat="$(buildkite-agent meta-data get run_xpack_metricbeat --default "false")" +[ -z "${run_xpack_packetbeat+x}" ] && run_xpack_packetbeat="$(buildkite-agent meta-data get run_xpack_packetbeat --default "false")" +[ -z "${run_xpack_winlogbeat+x}" ] && run_xpack_winlogbeat="$(buildkite-agent meta-data get run_xpack_winlogbeat --default "false")" +[ -z "${run_xpack_auditbeat+x}" ] && run_xpack_auditbeat="$(buildkite-agent meta-data get run_xpack_auditbeat --default "false")" +[ -z "${run_xpack_filebeat+x}" ] && run_xpack_filebeat="$(buildkite-agent meta-data get run_xpack_filebeat --default "false")" +[ -z "${run_xpack_heartbeat+x}" ] && run_xpack_heartbeat="$(buildkite-agent meta-data get run_xpack_heartbeat --default "false")" +[ -z "${run_xpack_osquerybeat+x}" ] && run_xpack_osquerybeat="$(buildkite-agent meta-data get run_xpack_osquerybeat --default "false")" + +# define if needed run ARM platform-specific tests for the particular beat [ -z "${run_libbeat_arm_tests+x}" ] && run_libbeat_arm_tests="$(buildkite-agent meta-data get run_libbeat_arm_tests --default "false")" [ -z "${run_packetbeat_arm_tests+x}" ] && run_packetbeat_arm_tests="$(buildkite-agent meta-data get run_packetbeat_arm_tests --default "false")" +[ -z "${run_xpack_auditbeat_arm_tests+x}" ] && run_xpack_auditbeat_arm_tests="$(buildkite-agent meta-data get run_xpack_auditbeat_arm_tests --default "false")" +[ -z "${run_xpack_filebeat_arm_tests+x}" ] && run_xpack_filebeat_arm_tests="$(buildkite-agent meta-data get run_xpack_filebeat_arm_tests --default "false")" +[ -z "${run_xpack_libbeat_arm_tests+x}" ] && run_xpack_libbeat_arm_tests="$(buildkite-agent meta-data get run_xpack_libbeat_arm_tests --default "false")" +[ -z "${run_xpack_packetbeat_arm_tests+x}" ] && run_xpack_packetbeat_arm_tests="$(buildkite-agent meta-data get run_xpack_packetbeat_arm_tests --default "false")" + +# define if needed run MacOS platform-specific tests for the particular beat [ -z "${run_metricbeat_macos_tests+x}" ] && run_metricbeat_macos_tests="$(buildkite-agent meta-data get run_metricbeat_macos_tests --default "false")" [ -z "${run_packetbeat_macos_tests+x}" ] && run_packetbeat_macos_tests="$(buildkite-agent meta-data get run_packetbeat_macos_tests --default "false")" +[ -z "${run_xpack_auditbeat_macos_tests+x}" ] && run_xpack_auditbeat_macos_tests="$(buildkite-agent meta-data get run_xpack_auditbeat_macos_tests --default "false")" +[ -z "${run_xpack_filebeat_macos_tests+x}" ] && run_xpack_filebeat_macos_tests="$(buildkite-agent meta-data get run_xpack_filebeat_macos_tests --default "false")" +[ -z "${run_xpack_metricbeat_macos_tests+x}" ] && run_xpack_metricbeat_macos_tests="$(buildkite-agent meta-data get run_xpack_metricbeat_macos_tests --default "false")" +[ -z "${run_xpack_packetbeat_macos_tests+x}" ] && run_xpack_packetbeat_macos_tests="$(buildkite-agent meta-data get run_xpack_packetbeat_macos_tests --default "false")" +[ -z "${run_xpack_heartbeat_macos_tests+x}" ] && run_xpack_heartbeat_macos_tests="$(buildkite-agent meta-data get run_xpack_heartbeat_macos_tests --default "false")" +[ -z "${run_xpack_osquerybeat_macos_tests+x}" ] && run_xpack_osquerybeat_macos_tests="$(buildkite-agent meta-data get run_xpack_osquerybeat_macos_tests --default "false")" + +# define if needed run cloud-specific tests for the particular beat +[ -z "${run_xpack_metricbeat_aws_tests+x}" ] && run_xpack_metricbeat_aws_tests="$(buildkite-agent meta-data get run_xpack_metricbeat_aws_tests --default "false")" +[ -z "${run_xpack_filebeat_aws_tests+x}" ] && run_xpack_filebeat_aws_tests="$(buildkite-agent meta-data get run_xpack_filebeat_aws_tests --default "false")" metricbeat_changeset=( "^metricbeat/.*" @@ -34,6 +63,22 @@ winlogbeat_changeset=( "^winlogbeat/.*" ) +xpack_auditbeat_changeset=( + "^x-pack/auditbeat/.*" + ) + +xpack_dockerlogbeat_changeset=( + "^x-pack/dockerlogbeat/.*" + ) + +xpack_heartbeat_changeset=( + "^x-pack/heartbeat/.*" + ) + +xpack_filebeat_changeset=( + "^x-pack/filebeat/.*" + ) + xpack_libbeat_changeset=( "^x-pack/libbeat/.*" ) @@ -42,6 +87,10 @@ xpack_metricbeat_changeset=( "^x-pack/metricbeat/.*" ) +xpack_osquerybeat_changeset=( + "^x-pack/osquerybeat/.*" + ) + xpack_packetbeat_changeset=( "^x-pack/packetbeat/.*" ) @@ -81,6 +130,51 @@ packaging_changeset=( ".go-version" ) +case "${BUILDKITE_PIPELINE_SLUG}" in + "beats-metricbeat") + BEAT_CHANGESET_REFERENCE=${metricbeat_changeset[@]} + ;; + "beats-libbeat") + BEAT_CHANGESET_REFERENCE=${libbeat_changeset[@]} + ;; + "beats-packetbeat") + BEAT_CHANGESET_REFERENCE=${packetbeat_changeset[@]} + ;; + "beats-winlogbeat") + BEAT_CHANGESET_REFERENCE=${winlogbeat_changeset[@]} + ;; + "beats-xpack-auditbeat") + BEAT_CHANGESET_REFERENCE=${xpack_auditbeat_changeset[@]} + ;; + "beats-xpack-dockerlogbeat") + BEAT_CHANGESET_REFERENCE=${xpack_dockerlogbeat_changeset[@]} + ;; + "beats-xpack-filebeat") + BEAT_CHANGESET_REFERENCE=${xpack_filebeat_changeset[@]} + ;; + "beats-xpack-heartbeat") + BEAT_CHANGESET_REFERENCE=${xpack_heartbeat_changeset[@]} + ;; + "beats-xpack-libbeat") + BEAT_CHANGESET_REFERENCE=${xpack_libbeat_changeset[@]} + ;; + "beats-xpack-metricbeat") + BEAT_CHANGESET_REFERENCE=${xpack_metricbeat_changeset[@]} + ;; + "beats-xpack-osquerybeat") + BEAT_CHANGESET_REFERENCE=${xpack_osquerybeat_changeset[@]} + ;; + "beats-xpack-packetbeat") + BEAT_CHANGESET_REFERENCE=${xpack_packetbeat_changeset[@]} + ;; + "beats-xpack-winlogbeat") + BEAT_CHANGESET_REFERENCE=${xpack_winlogbeat_changeset[@]} + ;; + *) + echo "The changeset for the ${BUILDKITE_PIPELINE_SLUG} pipeline hasn't been defined yet." + ;; +esac + check_and_set_beat_vars() { if [[ -n "$BEATS_PROJECT_NAME" && "$BEATS_PROJECT_NAME" == *"x-pack/"* ]]; then BEATS_XPACK_PROJECT_NAME=${BEATS_PROJECT_NAME//-/} #remove - @@ -89,8 +183,8 @@ check_and_set_beat_vars() { BEATS_GH_LABEL=${BEATS_XPACK_LABEL_PROJECT_NAME} TRIGGER_SPECIFIC_BEAT="run_${BEATS_XPACK_PROJECT_NAME}" TRIGGER_SPECIFIC_ARM_TESTS="run_${BEATS_XPACK_PROJECT_NAME}_arm_tests" + TRIGGER_SPECIFIC_AWS_TESTS="run_${BEATS_XPACK_PROJECT_NAME}_aws_tests" TRIGGER_SPECIFIC_MACOS_TESTS="run_${BEATS_XPACK_PROJECT_NAME}_macos_tests" - declare -n BEAT_CHANGESET_REFERENCE="${BEATS_XPACK_PROJECT_NAME}_changeset" echo "Beats project name is $BEATS_XPACK_PROJECT_NAME" mandatory_changeset=( "${BEAT_CHANGESET_REFERENCE[@]}" @@ -101,8 +195,8 @@ check_and_set_beat_vars() { BEATS_GH_LABEL=${BEATS_PROJECT_NAME} TRIGGER_SPECIFIC_BEAT="run_${BEATS_PROJECT_NAME}" TRIGGER_SPECIFIC_ARM_TESTS="run_${BEATS_PROJECT_NAME}_arm_tests" + TRIGGER_SPECIFIC_AWS_TESTS="run_${BEATS_PROJECT_NAME}_aws_tests" TRIGGER_SPECIFIC_MACOS_TESTS="run_${BEATS_PROJECT_NAME}_macos_tests" - declare -n BEAT_CHANGESET_REFERENCE="${BEATS_PROJECT_NAME}_changeset" echo "Beats project name is $BEATS_PROJECT_NAME" mandatory_changeset=( "${BEAT_CHANGESET_REFERENCE[@]}" @@ -113,8 +207,10 @@ check_and_set_beat_vars() { BEATS_GH_COMMENT="/test ${BEATS_PROJECT_NAME}" BEATS_GH_MACOS_COMMENT="${BEATS_GH_COMMENT} for macos" BEATS_GH_ARM_COMMENT="${BEATS_GH_COMMENT} for arm" - BAETS_GH_MACOS_LABEL="macOS" - BAETS_GH_ARM_LABEL="arm" + BEATS_GH_AWS_COMMENT="${BEATS_GH_COMMENT} for aws cloud" + BEATS_GH_MACOS_LABEL="macOS" + BEATS_GH_ARM_LABEL="arm" + BEATS_GH_AWS_LABEL="aws" } with_docker_compose() { @@ -127,6 +223,19 @@ with_docker_compose() { docker-compose version } +with_Terraform() { + echo "Setting up the Terraform environment..." + local path_to_file="${WORKSPACE}/terraform.zip" + create_workspace + check_platform_architeture + retry 5 curl -sSL -o ${path_to_file} "https://releases.hashicorp.com/terraform/${ASDF_TERRAFORM_VERSION}/terraform_${ASDF_TERRAFORM_VERSION}_${platform_type_lowercase}_${go_arch_type}.zip" + unzip -q ${path_to_file} -d ${BIN}/ + rm ${path_to_file} + chmod +x ${BIN}/terraform + export PATH="${BIN}:${PATH}" + terraform version +} + create_workspace() { if [[ ! -d "${BIN}" ]]; then mkdir -p "${BIN}" @@ -168,6 +277,8 @@ with_mage() { for pkg in "${install_packages[@]}"; do go install "${pkg}@latest" done + echo "Download modules to local cache" + retry 3 go mod download } with_go() { @@ -222,10 +333,10 @@ with_dependencies() { if [ "${platform_type}" == "Linux" ]; then if [ "${linuxType}" = "ubuntu" ]; then sudo apt-get update - sudo apt-get install -y libsystemd-dev libpcap-dev + sudo apt-get install -y libsystemd-dev libpcap-dev librpm-dev elif [ "${linuxType}" = "rhel" ]; then # sudo dnf update -y - sudo dnf install -y systemd-devel + sudo dnf install -y systemd-devel rpm-devel wget https://mirror.stream.centos.org/9-stream/CRB/${arch_type}/os/Packages/libpcap-devel-1.10.0-4.el9.${arch_type}.rpm #TODO: move this step to our own image sudo dnf install -y libpcap-devel-1.10.0-4.el9.${arch_type}.rpm #TODO: move this step to our own image fi @@ -283,9 +394,15 @@ are_paths_changed() { are_changed_only_paths() { local patterns=("${@}") - local changelist=() - local changed_files=$(git diff --name-only HEAD@{1} HEAD) - if [ -z "$changed_files" ] || grep -qE "$(IFS=\|; echo "${patterns[*]}")" <<< "$changed_files"; then + local changed_files=($(git diff --name-only HEAD@{1} HEAD)) + local matched_files=() + for pattern in "${patterns[@]}"; do + local matched=($(grep -E "${pattern}" <<< "${changed_files[@]}")) + if [ "${#matched[@]}" -gt 0 ]; then + matched_files+=("${matched[@]}") + fi + done + if [ "${#matched_files[@]}" -eq "${#changed_files[@]}" ] || [ "${#changed_files[@]}" -eq 0 ]; then return 0 fi return 1 @@ -300,8 +417,8 @@ are_conditions_met_mandatory_tests() { are_conditions_met_arm_tests() { if are_conditions_met_mandatory_tests; then #from https://github.com/elastic/beats/blob/c5e79a25d05d5bdfa9da4d187fe89523faa42afc/Jenkinsfile#L145-L171 - if [[ "$BUILDKITE_PIPELINE_SLUG" == "beats-libbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-packetbeat" ]]; then - if [[ "${GITHUB_PR_TRIGGER_COMMENT}" == "${BEATS_GH_ARM_COMMENT}" || "${GITHUB_PR_LABELS}" =~ "${BAETS_GH_ARM_LABEL}" || "${!TRIGGER_SPECIFIC_ARM_TESTS}" == "true" ]]; then + if [[ "$BUILDKITE_PIPELINE_SLUG" == "beats-libbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-packetbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-auditbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-filebeat" ]]; then + if [[ "${GITHUB_PR_TRIGGER_COMMENT}" == "${BEATS_GH_ARM_COMMENT}" || "${GITHUB_PR_LABELS}" =~ ${BEATS_GH_ARM_LABEL} || "${!TRIGGER_SPECIFIC_ARM_TESTS}" == "true" ]]; then return 0 fi fi @@ -311,8 +428,19 @@ are_conditions_met_arm_tests() { are_conditions_met_macos_tests() { if are_conditions_met_mandatory_tests; then #from https://github.com/elastic/beats/blob/c5e79a25d05d5bdfa9da4d187fe89523faa42afc/Jenkinsfile#L145-L171 - if [[ "$BUILDKITE_PIPELINE_SLUG" == "beats-metricbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-packetbeat" ]]; then - if [[ "${GITHUB_PR_TRIGGER_COMMENT}" == "${BEATS_GH_MACOS_COMMENT}" || "${GITHUB_PR_LABELS}" =~ "${BAETS_GH_MACOS_LABEL}" || "${!TRIGGER_SPECIFIC_MACOS_TESTS}" == "true" ]]; then # from https://github.com/elastic/beats/blob/c5e79a25d05d5bdfa9da4d187fe89523faa42afc/metricbeat/Jenkinsfile.yml#L3-L12 + if [[ "$BUILDKITE_PIPELINE_SLUG" == "beats-metricbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-packetbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-metricbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-auditbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-filebeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-heartbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-osquerybeat" ]]; then + if [[ "${GITHUB_PR_TRIGGER_COMMENT}" == "${BEATS_GH_MACOS_COMMENT}" || "${GITHUB_PR_LABELS}" =~ ${BEATS_GH_MACOS_LABEL} || "${!TRIGGER_SPECIFIC_MACOS_TESTS}" == "true" ]]; then # from https://github.com/elastic/beats/blob/c5e79a25d05d5bdfa9da4d187fe89523faa42afc/metricbeat/Jenkinsfile.yml#L3-L12 + return 0 + fi + fi + fi + return 1 +} + +are_conditions_met_aws_tests() { + if are_conditions_met_mandatory_tests; then #from https://github.com/elastic/beats/blob/c5e79a25d05d5bdfa9da4d187fe89523faa42afc/Jenkinsfile#L145-L171 + if [[ "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-metricbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-filebeat" ]]; then + if [[ "${GITHUB_PR_TRIGGER_COMMENT}" == "${BEATS_GH_AWS_COMMENT}" || "${GITHUB_PR_LABELS}" =~ ${BEATS_GH_AWS_LABEL} || "${!TRIGGER_SPECIFIC_AWS_TESTS}" == "true" ]]; then # from https://github.com/elastic/beats/blob/c5e79a25d05d5bdfa9da4d187fe89523faa42afc/metricbeat/Jenkinsfile.yml#L3-L12 return 0 fi fi @@ -336,19 +464,152 @@ config_git() { fi } +defineModuleFromTheChangeSet() { + # This method gathers the module name, if required, in order to run the ITs only if the changeset affects a specific module. + # For such, it's required to look for changes under the module folder and exclude anything else such as asciidoc and png files. + # This method defines and exports the MODULE variable with a particular module name or '' if changeset doesn't affect a specific module + local project_path=$1 + local project_path_transformed=$(echo "$project_path" | sed 's/\//\\\//g') + local project_path_exclussion="((?!^${project_path_transformed}\\/).)*\$" + local exclude=("^(${project_path_exclussion}|((?!\\/module\\/).)*\$|.*\\.asciidoc|.*\\.png)") + + if [[ "$project_path" == *"x-pack/"* ]]; then + local pattern=("$XPACK_MODULE_PATTERN") + else + local pattern=("$OSS_MODULE_PATTERN") + fi + local changed_modules="" + local module_dirs=$(find "$project_path/module" -mindepth 1 -maxdepth 1 -type d) + for module_dir in $module_dirs; do + if are_paths_changed $module_dir && ! are_changed_only_paths "${exclude[@]}"; then + if [[ -z "$changed_modules" ]]; then + changed_modules=$(basename "$module_dir") + else + changed_modules+=",$(basename "$module_dir")" + fi + fi + done + if [[ -z "$changed_modules" ]]; then # TODO: remove this condition and uncomment the line below when the issue https://github.com/elastic/ingest-dev/issues/2993 is solved + export MODULE="aws" + else + export MODULE="${changed_modules}" # TODO: remove this line and uncomment the line below when the issue https://github.com/elastic/ingest-dev/issues/2993 is solved + # export MODULE="${changed_modules}" # TODO: uncomment the line when the issue https://github.com/elastic/ingest-dev/issues/2993 is solved + fi +} + +terraformInit() { + local dir=$1 + echo "Terraform Init on $dir" + pushd "${dir}" > /dev/null + terraform init + popd > /dev/null +} + +withAWS() { + # This method gathers the masked AWS credentials from pre-command hook and sets the right AWS variable names. + export AWS_ACCESS_KEY_ID=$BEATS_AWS_ACCESS_KEY + export AWS_SECRET_ACCESS_KEY=$BEATS_AWS_SECRET_KEY + export TEST_TAGS="${TEST_TAGS:+$TEST_TAGS,}aws" +} + +startCloudTestEnv() { + local dir=$1 + withAWS + echo "--- Run docker-compose services for emulated cloud env" + docker-compose -f .ci/jobs/docker-compose.yml up -d #TODO: move all docker-compose files from the .ci to .buildkite folder before switching to BK + with_Terraform + terraformInit "$dir" + export TF_VAR_BRANCH=$(echo "${BUILDKITE_BRANCH}" | tr '[:upper:]' '[:lower:]' | sed 's/[^a-z0-9-]/-/g') + export TF_VAR_BUILD_ID="${BUILDKITE_BUILD_ID}" + export TF_VAR_CREATED_DATE=$(date +%s) + export TF_VAR_ENVIRONMENT="ci" + export TF_VAR_REPO="${REPO}" + pushd "${dir}" > /dev/null + terraform apply -auto-approve + popd > /dev/null +} + +withNodeJSEnv() { + # HOME="${WORKSPACE}" + local version=$1 + # local nvmPath="${HOME}/.nvm/versions/node/${version}/bin" + echo "Installing nvm" + curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.1/install.sh | bash + export NVM_DIR="$HOME/.nvm" + [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" + echo "Installing the NodeJs version $version" + nvm install "$version" + # export PATH="${nvmPath}:${PATH}" + nvm use "$version" + node --version +} + +installNodeJsDependencies() { + # Install dependencies to run browsers + if [ "${platform_type}" == "Linux" ]; then + sudo apt-get install -y \ + libatk1.0-0 \ + libatk-bridge2.0-0 \ + libcups2 \ + libxkbcommon0 \ + libatspi2.0-0 \ + libxcomposite1 \ + libxdamage1 \ + libxfixes3 \ + libxrandr2 \ + libgbm1 \ + libpango-1.0-0 \ + libcairo2 \ + libasound2 + if [ $? -ne 0 ]; then + echo "Error: Failed to install dependencies." + exit 1 + else + echo "Dependencies installed successfully." + fi + elif [ "${platform_type}" == "Darwin" ]; then + echo "TBD" + else + echo "Unsupported platform type." + exit 1 + fi +} + +teardown() { + # Teardown resources after using them + echo "---Terraform Cleanup" + .ci/scripts/terraform-cleanup.sh "${MODULE_DIR}" #TODO: move all docker-compose files from the .ci to .buildkite folder before switching to BK + + echo "---Docker Compose Cleanup" + docker-compose -f .ci/jobs/docker-compose.yml down -v #TODO: move all docker-compose files from the .ci to .buildkite folder before switching to BK +} + +unset_secrets () { + for var in $(printenv | sed 's;=.*;;' | sort); do + if [[ "$var" == AWS_* || "$var" == BEATS_AWS_* ]]; then + unset "$var" + fi + done +} + if ! are_changed_only_paths "${docs_changeset[@]}" ; then - ONLY_DOCS="false" + export ONLY_DOCS="false" echo "Changes include files outside the docs_changeset vairiabe. ONLY_DOCS=$ONLY_DOCS." else echo "All changes are related to DOCS. ONLY_DOCS=$ONLY_DOCS." fi if are_paths_changed "${go_mod_changeset[@]}" ; then - GO_MOD_CHANGES="true" + export GO_MOD_CHANGES="true" fi if are_paths_changed "${packaging_changeset[@]}" ; then - PACKAGING_CHANGES="true" + export PACKAGING_CHANGES="true" +fi + +if [[ "$BUILDKITE_STEP_KEY" == "xpack-winlogbeat-pipeline" || "$BUILDKITE_STEP_KEY" == "xpack-metricbeat-pipeline" || "$BUILDKITE_STEP_KEY" == "xpack-dockerlogbeat-pipeline" || "$BUILDKITE_STEP_KEY" == "metricbeat-pipeline" || "$BUILDKITE_STEP_KEY" == "xpack-auditbeat-pipeline" ]]; then + # Set the MODULE env variable if possible, it should be defined before generating pipeline's steps. It is used in multiple pipelines. + defineModuleFromTheChangeSet "${BEATS_PROJECT_NAME}" fi check_and_set_beat_vars diff --git a/.buildkite/scripts/generate_metricbeat_pipeline.sh b/.buildkite/scripts/generate_metricbeat_pipeline.sh index e91896eb70c..477f8fb25a3 100755 --- a/.buildkite/scripts/generate_metricbeat_pipeline.sh +++ b/.buildkite/scripts/generate_metricbeat_pipeline.sh @@ -149,7 +149,7 @@ if are_conditions_met_packaging; then image: "${IMAGE_UBUNTU_X86_64}" machineType: "${GCP_HI_PERF_MACHINE_TYPE}" env: - PLATFORMS: "+all linux/amd64 linux/arm64 windows/amd64 darwin/amd64 darwin/arm64" + PLATFORMS: "${PACKAGING_PLATFORMS}" - label: ":linux: Packaging ARM" key: "packaging-arm" @@ -159,7 +159,7 @@ if are_conditions_met_packaging; then imagePrefix: "${IMAGE_UBUNTU_ARM_64}" instanceType: "${AWS_ARM_INSTANCE_TYPE}" env: - PLATFORMS: "linux/arm64" + PLATFORMS: "${PACKAGING_ARM_PLATFORMS}" PACKAGES: "docker" YAML diff --git a/.buildkite/scripts/generate_packetbeat_pipeline.sh b/.buildkite/scripts/generate_packetbeat_pipeline.sh index 89ea7a33e20..97bdb531a6b 100755 --- a/.buildkite/scripts/generate_packetbeat_pipeline.sh +++ b/.buildkite/scripts/generate_packetbeat_pipeline.sh @@ -92,7 +92,7 @@ else exit 0 fi -if are_conditions_met_arm_tests && are_conditions_met_macos_tests; then +if are_conditions_met_arm_tests || are_conditions_met_macos_tests; then cat >> $pipelineName <<- YAML - group: "Extended Tests" @@ -150,7 +150,7 @@ if are_conditions_met_packaging; then image: "${IMAGE_UBUNTU_X86_64}" machineType: "${GCP_HI_PERF_MACHINE_TYPE}" env: - PLATFORMS: "+all linux/amd64 linux/arm64 windows/amd64 darwin/amd64 darwin/arm64" + PLATFORMS: "${PACKAGING_PLATFORMS}" - label: ":linux: Packaging ARM" key: "packaging-arm" @@ -160,7 +160,7 @@ if are_conditions_met_packaging; then imagePrefix: "${IMAGE_UBUNTU_ARM_64}" instanceType: "${AWS_ARM_INSTANCE_TYPE}" env: - PLATFORMS: "linux/arm64" + PLATFORMS: "${PACKAGING_ARM_PLATFORMS}" PACKAGES: "docker" YAML diff --git a/.buildkite/scripts/generate_pipeline.sh b/.buildkite/scripts/generate_pipeline.sh new file mode 100755 index 00000000000..877ea018f06 --- /dev/null +++ b/.buildkite/scripts/generate_pipeline.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash +set -euo pipefail + +echo "~~~ Install dependencies" +python3 -mpip install --quiet "ruamel.yaml<0.18.0" +# temporary solution until we have this into a base container +curl -fsSL --retry-max-time 60 --retry 3 --retry-delay 5 -o /usr/bin/yq https://github.com/mikefarah/yq/releases/latest/download/yq_linux_amd64 +chmod a+x /usr/bin/yq + +.buildkite/scripts/run_dynamic_pipeline_tests.sh + +echo "+++ Run pipeline generator in dry-run mode" +python3 .buildkite/pipeline.py | yq . + +echo "~~~ Upload pipeline" +python3 .buildkite/pipeline.py | buildkite-agent pipeline upload diff --git a/.buildkite/scripts/generate_winlogbeat_pipeline.sh b/.buildkite/scripts/generate_winlogbeat_pipeline.sh index 1eb1b459c92..ce812016e79 100755 --- a/.buildkite/scripts/generate_winlogbeat_pipeline.sh +++ b/.buildkite/scripts/generate_winlogbeat_pipeline.sh @@ -96,7 +96,7 @@ if are_conditions_met_packaging; then image: "${IMAGE_UBUNTU_X86_64}" machineType: "${GCP_HI_PERF_MACHINE_TYPE}" env: - PLATFORMS: "+all linux/amd64 linux/arm64 windows/amd64 darwin/amd64 darwin/arm64" + PLATFORMS: "${PACKAGING_PLATFORMS}" YAML diff --git a/.buildkite/scripts/generate_xpack_auditbeat_pipeline.sh b/.buildkite/scripts/generate_xpack_auditbeat_pipeline.sh new file mode 100755 index 00000000000..f29e6152e60 --- /dev/null +++ b/.buildkite/scripts/generate_xpack_auditbeat_pipeline.sh @@ -0,0 +1,166 @@ +#!/usr/bin/env bash + +source .buildkite/scripts/common.sh + +set -euo pipefail + +pipelineName="pipeline.xpack-auditbeat-dynamic.yml" + +echo "Add the mandatory and extended tests without additional conditions into the pipeline" +if are_conditions_met_mandatory_tests; then + cat > $pipelineName <<- YAML + +steps: + + - group: "Mandatory Tests" + key: "mandatory-tests" + steps: + + - label: ":linux: Ubuntu Unit (MODULE) Tests" + key: "mandatory-linux-unit-test" + command: "cd $BEATS_PROJECT_NAME && mage build unitTest" + env: + MODULE: $MODULE + agents: + provider: "gcp" + image: "${IMAGE_UBUNTU_X86_64}" + machineType: "${GCP_DEFAULT_MACHINE_TYPE}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.xml" + + - label: ":rhel: RHEL-9 Unit Tests" + key: "mandatory-rhel9-unit-test" + command: "cd $BEATS_PROJECT_NAME && mage build unitTest" + agents: + provider: "gcp" + image: "${IMAGE_RHEL9_X86_64}" + machineType: "${GCP_DEFAULT_MACHINE_TYPE}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + + + - label: ":windows: Windows Unit Tests - {{matrix.image}}" + command: ".buildkite/scripts/win_unit_tests.ps1" + key: "mandatory-win-unit-tests" + agents: + provider: "gcp" + image: "{{matrix.image}}" + machineType: "${GCP_WIN_MACHINE_TYPE}" + disk_size: 100 + disk_type: "pd-ssd" + matrix: + setup: + image: + - "${IMAGE_WIN_2016}" + - "${IMAGE_WIN_2022}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + +## TODO: this condition will be changed in the Phase 3 of the Migration Plan https://docs.google.com/document/d/1IPNprVtcnHlem-uyGZM0zGzhfUuFAh4LeSl9JFHMSZQ/edit#heading=h.sltz78yy249h + + - group: "Extended Windows Tests" + key: "extended-win-tests" + steps: + + - label: ":windows: Windows Unit Tests - {{matrix.image}}" + command: ".buildkite/scripts/win_unit_tests.ps1" + key: "extended-win-unit-tests" + agents: + provider: "gcp" + image: "{{matrix.image}}" + machineType: "${GCP_WIN_MACHINE_TYPE}" + disk_size: 100 + disk_type: "pd-ssd" + matrix: + setup: + image: + - "${IMAGE_WIN_10}" + - "${IMAGE_WIN_11}" + - "${IMAGE_WIN_2019}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + + +YAML +else + echo "The conditions don't match to requirements for generating pipeline steps." + exit 0 +fi + +if are_conditions_met_arm_tests || are_conditions_met_macos_tests ; then + cat >> $pipelineName <<- YAML + + - group: "Extended Tests" + key: "extended-tests" + steps: + +YAML +fi + +if are_conditions_met_macos_tests; then + cat >> $pipelineName <<- YAML + + - label: ":mac: MacOS Unit Tests" + key: "extended-macos-unit-tests" + command: ".buildkite/scripts/unit_tests.sh" + agents: + provider: "orka" + imagePrefix: "${IMAGE_MACOS_X86_64}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + +YAML +fi + +if are_conditions_met_arm_tests; then + cat >> $pipelineName <<- YAML + - label: ":linux: ARM Ubuntu Unit Tests" + key: "extended-arm64-unit-test" + command: "cd $BEATS_PROJECT_NAME && mage build unitTest" + agents: + provider: "aws" + imagePrefix: "${IMAGE_UBUNTU_ARM_64}" + instanceType: "${AWS_ARM_INSTANCE_TYPE}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + +YAML +fi + +echo "Check and add the Packaging into the pipeline" +if are_conditions_met_packaging; then + cat >> $pipelineName <<- YAML + + - wait: ~ + depends_on: + - step: "mandatory-tests" + allow_failure: false + + - group: "Packaging" # TODO: check conditions for future the main pipeline migration: https://github.com/elastic/beats/pull/28589 + key: "packaging" + steps: + - label: ":linux: Packaging Linux" + key: "packaging-linux" + command: "cd $BEATS_PROJECT_NAME && mage package" + agents: + provider: "gcp" + image: "${IMAGE_UBUNTU_X86_64}" + machineType: "${GCP_HI_PERF_MACHINE_TYPE}" + disk_size: 100 + disk_type: "pd-ssd" + env: + PLATFORMS: "${PACKAGING_PLATFORMS}" + + - label: ":linux: Packaging ARM" + key: "packaging-arm" + command: "cd $BEATS_PROJECT_NAME && mage package" + agents: + provider: "aws" + imagePrefix: "${IMAGE_UBUNTU_ARM_64}" + instanceType: "${AWS_ARM_INSTANCE_TYPE}" + env: + PLATFORMS: "${PACKAGING_ARM_PLATFORMS}" + PACKAGES: "docker" + +YAML +fi + +echo "--- Printing dynamic steps" #TODO: remove if the pipeline is public +cat $pipelineName + +echo "--- Loading dynamic steps" +buildkite-agent pipeline upload $pipelineName diff --git a/.buildkite/scripts/generate_xpack_dockerlogbeat_pipeline.sh b/.buildkite/scripts/generate_xpack_dockerlogbeat_pipeline.sh new file mode 100755 index 00000000000..46e92f8ddf9 --- /dev/null +++ b/.buildkite/scripts/generate_xpack_dockerlogbeat_pipeline.sh @@ -0,0 +1,83 @@ +#!/usr/bin/env bash + +source .buildkite/scripts/common.sh + +set -euo pipefail + +pipelineName="pipeline.xpack-dockerlogbeat-dynamic.yml" + +echo "Add the mandatory and extended tests without additional conditions into the pipeline" +if are_conditions_met_mandatory_tests; then + cat > $pipelineName <<- YAML + +steps: + + - group: "Mandatory Tests" + key: "mandatory-tests" + steps: + - label: ":linux: Ubuntu Unit Tests" + key: "mandatory-linux-unit-test" + command: "cd $BEATS_PROJECT_NAME && mage build unitTest" + agents: + provider: "gcp" + image: "${IMAGE_UBUNTU_X86_64}" + machineType: "${GCP_DEFAULT_MACHINE_TYPE}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.xml" + + - label: ":go: Go Integration Tests" + key: "mandatory-int-test" + command: "cd $BEATS_PROJECT_NAME && mage goIntegTest" + env: + MODULE: $MODULE + agents: + provider: "gcp" + image: "${IMAGE_UBUNTU_X86_64}" + machineType: "${GCP_DEFAULT_MACHINE_TYPE}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.xml" + +YAML +fi + +echo "Check and add the Packaging into the pipeline" +if are_conditions_met_packaging; then + cat >> $pipelineName <<- YAML + + - wait: ~ + depends_on: + - step: "mandatory-tests" + allow_failure: false + + - group: "Packaging" # TODO: check conditions for future the main pipeline migration: https://github.com/elastic/beats/pull/28589 + key: "packaging" + steps: + - label: ":linux: Packaging Linux" + key: "packaging-linux" + command: "cd $BEATS_PROJECT_NAME && mage package" + agents: + provider: "gcp" + image: "${IMAGE_UBUNTU_X86_64}" + machineType: "${GCP_HI_PERF_MACHINE_TYPE}" + disk_size: 100 + disk_type: "pd-ssd" + env: + PLATFORMS: "${PACKAGING_PLATFORMS}" + + - label: ":linux: Packaging ARM" + key: "packaging-arm" + command: "cd $BEATS_PROJECT_NAME && mage package" + agents: + provider: "aws" + imagePrefix: "${IMAGE_UBUNTU_ARM_64}" + instanceType: "${AWS_ARM_INSTANCE_TYPE}" + env: + PLATFORMS: "${PACKAGING_ARM_PLATFORMS}" + PACKAGES: "docker" + +YAML +fi + +echo "--- Printing dynamic steps" #TODO: remove if the pipeline is public +cat $pipelineName + +echo "--- Loading dynamic steps" +buildkite-agent pipeline upload $pipelineName diff --git a/.buildkite/scripts/generate_xpack_filebeat_pipeline.sh b/.buildkite/scripts/generate_xpack_filebeat_pipeline.sh new file mode 100755 index 00000000000..80d28770bf0 --- /dev/null +++ b/.buildkite/scripts/generate_xpack_filebeat_pipeline.sh @@ -0,0 +1,190 @@ +#!/usr/bin/env bash + +source .buildkite/scripts/common.sh + +set -euo pipefail + +pipelineName="pipeline.xpack-filebeat-dynamic.yml" + +echo "Add the mandatory and extended tests without additional conditions into the pipeline" +if are_conditions_met_mandatory_tests; then + cat > $pipelineName <<- YAML + +steps: + + - group: "Mandatory Tests" + key: "mandatory-tests" + steps: + - label: ":linux: Ubuntu Unit Tests" + key: "mandatory-linux-unit-test" + command: "cd $BEATS_PROJECT_NAME && mage build unitTest" + agents: + provider: "gcp" + image: "${IMAGE_UBUNTU_X86_64}" + machineType: "${GCP_DEFAULT_MACHINE_TYPE}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.xml" + + - label: ":go: Go Integration Tests" + key: "mandatory-int-test" + command: "cd $BEATS_PROJECT_NAME && mage goIntegTest" + agents: + provider: "gcp" + image: "${IMAGE_UBUNTU_X86_64}" + machineType: "${GCP_HI_PERF_MACHINE_TYPE}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.xml" + + - label: ":python: Python Integration Tests" + key: "mandatory-python-int-test" + command: "cd $BEATS_PROJECT_NAME && mage pythonIntegTest" + agents: + provider: "gcp" + image: "${IMAGE_UBUNTU_X86_64}" + machineType: "${GCP_HI_PERF_MACHINE_TYPE}" + disk_size: 100 + disk_type: "pd-ssd" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.xml" + + - label: ":windows: Windows Unit Tests - {{matrix.image}}" + command: ".buildkite/scripts/win_unit_tests.ps1" + key: "mandatory-win-unit-tests" + agents: + provider: "gcp" + image: "{{matrix.image}}" + machineType: "${GCP_WIN_MACHINE_TYPE}" + disk_size: 100 + disk_type: "pd-ssd" + matrix: + setup: + image: + - "${IMAGE_WIN_2016}" + - "${IMAGE_WIN_2022}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + +## TODO: this condition will be changed in the Phase 3 of the Migration Plan https://docs.google.com/document/d/1IPNprVtcnHlem-uyGZM0zGzhfUuFAh4LeSl9JFHMSZQ/edit#heading=h.sltz78yy249h + + - group: "Extended Windows Tests" + key: "extended-win-tests" + steps: + + - label: ":windows: Windows Unit Tests - {{matrix.image}}" + command: ".buildkite/scripts/win_unit_tests.ps1" + key: "extended-win-unit-tests" + agents: + provider: "gcp" + image: "{{matrix.image}}" + machineType: "${GCP_WIN_MACHINE_TYPE}" + disk_size: 100 + disk_type: "pd-ssd" + matrix: + setup: + image: + - "${IMAGE_WIN_10}" + - "${IMAGE_WIN_11}" + - "${IMAGE_WIN_2019}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + +YAML +else + echo "The conditions don't match to requirements for generating pipeline steps." + exit 0 +fi + +if are_conditions_met_arm_tests || are_conditions_met_macos_tests || are_conditions_met_aws_tests; then + cat >> $pipelineName <<- YAML + + - group: "Extended Tests" + key: "extended-tests" + steps: + +YAML +fi + +if are_conditions_met_macos_tests; then + cat >> $pipelineName <<- YAML + + - label: ":mac: MacOS Unit Tests" + key: "extended-macos-unit-tests" + command: ".buildkite/scripts/unit_tests.sh" + agents: + provider: "orka" + imagePrefix: "${IMAGE_MACOS_X86_64}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + +YAML +fi + +if are_conditions_met_arm_tests; then + cat >> $pipelineName <<- YAML + - label: ":linux: ARM Ubuntu Unit Tests" + key: "extended-arm64-unit-test" + command: "cd $BEATS_PROJECT_NAME && mage build unitTest" + agents: + provider: "aws" + imagePrefix: "${IMAGE_UBUNTU_ARM_64}" + instanceType: "${AWS_ARM_INSTANCE_TYPE}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + +YAML +fi + +if are_conditions_met_aws_tests; then + cat >> $pipelineName <<- YAML + - label: ":linux: Cloud Tests" + key: "extended-cloud-test" + command: ".buildkite/scripts/cloud_tests.sh" + env: + MODULE: $MODULE + agents: + provider: "gcp" + image: "${DEFAULT_UBUNTU_X86_64_IMAGE}" + machineType: "${GCP_HI_PERF_MACHINE_TYPE}" + disk_size: 100 + disk_type: "pd-ssd" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + +YAML +fi + +echo "Check and add the Packaging into the pipeline" +if are_conditions_met_packaging; then + cat >> $pipelineName <<- YAML + + - wait: ~ + depends_on: + - step: "mandatory-tests" + allow_failure: false + + - group: "Packaging" # TODO: check conditions for future the main pipeline migration: https://github.com/elastic/beats/pull/28589 + key: "packaging" + steps: + - label: ":linux: Packaging Linux" + key: "packaging-linux" + command: "cd $BEATS_PROJECT_NAME && mage package" + agents: + provider: "gcp" + image: "${IMAGE_UBUNTU_X86_64}" + machineType: "${GCP_HI_PERF_MACHINE_TYPE}" + disk_size: 100 + disk_type: "pd-ssd" + env: + PLATFORMS: "${PACKAGING_PLATFORMS}" + + - label: ":linux: Packaging ARM" + key: "packaging-arm" + command: "cd $BEATS_PROJECT_NAME && mage package" + agents: + provider: "aws" + imagePrefix: "${IMAGE_UBUNTU_ARM_64}" + instanceType: "${AWS_ARM_INSTANCE_TYPE}" + env: + PLATFORMS: "${PACKAGING_ARM_PLATFORMS}" + PACKAGES: "docker" + +YAML +fi + +echo "--- Printing dynamic steps" #TODO: remove if the pipeline is public +cat $pipelineName + +echo "--- Loading dynamic steps" +buildkite-agent pipeline upload $pipelineName diff --git a/.buildkite/scripts/generate_xpack_heartbeat_pipeline.sh b/.buildkite/scripts/generate_xpack_heartbeat_pipeline.sh new file mode 100755 index 00000000000..b51ca0ab3b9 --- /dev/null +++ b/.buildkite/scripts/generate_xpack_heartbeat_pipeline.sh @@ -0,0 +1,145 @@ +#!/usr/bin/env bash + +source .buildkite/scripts/common.sh + +set -euo pipefail + +pipelineName="pipeline.xpack-heartbeat-dynamic.yml" + +echo "Add the mandatory and extended tests without additional conditions into the pipeline" +if are_conditions_met_mandatory_tests; then + cat > $pipelineName <<- YAML + +steps: + + - group: "Mandatory Tests" + key: "mandatory-tests" + steps: + - label: ":linux: Ubuntu Unit Tests" + key: "mandatory-linux-unit-test" + command: ".buildkite/scripts/unit_tests.sh" + agents: + provider: "gcp" + image: "${DEFAULT_UBUNTU_X86_64_IMAGE}" + machineType: "${GCP_DEFAULT_MACHINE_TYPE}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.xml" + + - label: ":go: Go Integration Tests" + key: "mandatory-int-test" + command: ".buildkite/scripts/go_int_tests.sh" + agents: + provider: "gcp" + image: "${DEFAULT_UBUNTU_X86_64_IMAGE}" + machineType: "${GCP_HI_PERF_MACHINE_TYPE}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.xml" + +# ## TODO: there are windows test failures already reported +# ## https://github.com/elastic/beats/issues/23957 and https://github.com/elastic/beats/issues/23958 +# ## waiting for being fixed. + +# - label: ":windows: Windows Unit Tests - {{matrix.image}}" +# command: ".buildkite/scripts/win_unit_tests.ps1" +# key: "mandatory-win-unit-tests" +# agents: +# provider: "gcp" +# image: "{{matrix.image}}" +# machineType: "${GCP_WIN_MACHINE_TYPE}" +# disk_size: 100 +# disk_type: "pd-ssd" +# matrix: +# setup: +# image: +# - "${IMAGE_WIN_2016}" +# - "${IMAGE_WIN_2022}" +# artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + +# ## TODO: this condition will be changed in the Phase 3 of the Migration Plan https://docs.google.com/document/d/1IPNprVtcnHlem-uyGZM0zGzhfUuFAh4LeSl9JFHMSZQ/edit#heading=h.sltz78yy249h + +# - group: "Extended Windows Tests" +# key: "extended-win-tests" +# steps: + +# - label: ":windows: Windows Unit Tests - {{matrix.image}}" +# command: ".buildkite/scripts/win_unit_tests.ps1" +# key: "extended-win-unit-tests" +# agents: +# provider: "gcp" +# image: "{{matrix.image}}" +# machineType: "${GCP_WIN_MACHINE_TYPE}" +# disk_size: 100 +# disk_type: "pd-ssd" +# matrix: +# setup: +# image: +# - "${IMAGE_WIN_10}" +# - "${IMAGE_WIN_11}" +# - "${IMAGE_WIN_2019}" +# artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + +YAML +else + echo "The conditions don't match to requirements for generating pipeline steps." + exit 0 +fi + +if are_conditions_met_macos_tests; then + cat >> $pipelineName <<- YAML + + - group: "Extended Tests" + key: "extended-tests" + steps: + + - label: ":mac: MacOS Unit Tests" + key: "extended-macos-unit-tests" + command: ".buildkite/scripts/unit_tests.sh" + agents: + provider: "orka" + imagePrefix: "${IMAGE_MACOS_X86_64}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + +YAML +fi + +echo "Check and add the Packaging into the pipeline" +if are_conditions_met_packaging; then + cat >> $pipelineName <<- YAML + + - wait: ~ + depends_on: + - step: "mandatory-tests" + allow_failure: false + + - group: "Packaging" # TODO: check conditions for future the main pipeline migration: https://github.com/elastic/beats/pull/28589 + key: "packaging" + steps: + - label: ":linux: Packaging Linux" + key: "packaging-linux" + command: "cd $BEATS_PROJECT_NAME && mage package" + agents: + provider: "gcp" + image: "${IMAGE_UBUNTU_X86_64}" + machineType: "${GCP_HI_PERF_MACHINE_TYPE}" + disk_size: 100 + disk_type: "pd-ssd" + env: + PLATFORMS: "${PACKAGING_PLATFORMS}" + + - label: ":linux: Packaging ARM" + key: "packaging-arm" + command: "cd $BEATS_PROJECT_NAME && mage package" + agents: + provider: "aws" + imagePrefix: "${IMAGE_UBUNTU_ARM_64}" + instanceType: "${AWS_ARM_INSTANCE_TYPE}" + env: + PLATFORMS: "${PACKAGING_ARM_PLATFORMS}" + PACKAGES: "docker" + +YAML +fi + +echo "--- Printing dynamic steps" #TODO: remove if the pipeline is public +cat $pipelineName + +echo "--- Loading dynamic steps" +buildkite-agent pipeline upload $pipelineName diff --git a/.buildkite/scripts/generate_xpack_metricbeat_pipeline.sh b/.buildkite/scripts/generate_xpack_metricbeat_pipeline.sh new file mode 100755 index 00000000000..ddc3ce2c8f2 --- /dev/null +++ b/.buildkite/scripts/generate_xpack_metricbeat_pipeline.sh @@ -0,0 +1,191 @@ +#!/usr/bin/env bash + +source .buildkite/scripts/common.sh + +set -euo pipefail + +pipelineName="pipeline.xpack-metricbeat-dynamic.yml" + +echo "Add the mandatory and extended tests without additional conditions into the pipeline" +if are_conditions_met_mandatory_tests; then + cat > $pipelineName <<- YAML + +steps: + + - group: "Mandatory Tests" + key: "mandatory-tests" + steps: + - label: ":linux: Ubuntu Unit Tests" + key: "mandatory-linux-unit-test" + command: "cd $BEATS_PROJECT_NAME && mage build unitTest" + agents: + provider: "gcp" + image: "${IMAGE_UBUNTU_X86_64}" + machineType: "${GCP_DEFAULT_MACHINE_TYPE}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.xml" + + - label: ":go: Go Integration Tests" + key: "mandatory-int-test" + command: ".buildkite/scripts/go_int_tests.sh" + env: + MODULE: $MODULE + agents: + provider: "gcp" + image: "${DEFAULT_UBUNTU_X86_64_IMAGE}" + machineType: "${GCP_DEFAULT_MACHINE_TYPE}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.xml" + + - label: ":python: Python Integration Tests" + key: "mandatory-python-int-test" + command: ".buildkite/scripts/py_int_tests.sh" + env: + MODULE: $MODULE + agents: + provider: "gcp" + image: "${DEFAULT_UBUNTU_X86_64_IMAGE}" + machineType: "${GCP_DEFAULT_MACHINE_TYPE}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.xml" + + - label: ":windows: Windows Unit Tests - {{matrix.image}}" + command: ".buildkite/scripts/win_unit_tests.ps1" + key: "mandatory-win-unit-tests" + agents: + provider: "gcp" + image: "{{matrix.image}}" + machineType: "${GCP_WIN_MACHINE_TYPE}" + disk_size: 100 + disk_type: "pd-ssd" + matrix: + setup: + image: + - "${IMAGE_WIN_2016}" + - "${IMAGE_WIN_2022}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + +## TODO: this condition will be changed in the Phase 3 of the Migration Plan https://docs.google.com/document/d/1IPNprVtcnHlem-uyGZM0zGzhfUuFAh4LeSl9JFHMSZQ/edit#heading=h.sltz78yy249h + - group: "Extended Windows Tests" + key: "extended-win-tests" + steps: + - label: ":windows: Windows 10 Unit Tests" + key: "extended-win-10-unit-tests" + command: ".buildkite/scripts/win_unit_tests.ps1" + agents: + provider: "gcp" + image: "${IMAGE_WIN_10}" + machineType: "${GCP_WIN_MACHINE_TYPE}" + disk_size: 100 + disk_type: "pd-ssd" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + + - label: ":windows: Windows 11 Unit Tests" + key: "extended-win-11-unit-tests" + command: ".buildkite/scripts/win_unit_tests.ps1" + agents: + provider: "gcp" + image: "${IMAGE_WIN_11}" + machineType: "${GCP_WIN_MACHINE_TYPE}" + disk_size: 100 + disk_type: "pd-ssd" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + + - label: ":windows: Win 2019 Unit Tests" + key: "extended-win-2019-unit-tests" + command: ".buildkite/scripts/win_unit_tests.ps1" + agents: + provider: "gcp" + image: "${IMAGE_WIN_2019}" + machineType: "${GCP_WIN_MACHINE_TYPE}" + disk_size: 100 + disk_type: "pd-ssd" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + +YAML +else + echo "The conditions don't match to requirements for generating pipeline steps." + exit 0 +fi + +#TODO: replace by commented-out below condition when issues mentioned in the PR https://github.com/elastic/beats/pull/38081 are resolved +if are_conditions_met_aws_tests || are_conditions_met_macos_tests ; then + cat >> $pipelineName <<- YAML + + - group: "Extended Tests" + key: "extended-tests" + steps: + +YAML +fi + +if are_conditions_met_macos_tests; then + cat >> $pipelineName <<- YAML + + - label: ":mac: MacOS Unit Tests" + key: "extended-macos-unit-tests" + command: ".buildkite/scripts/unit_tests.sh" + agents: + provider: "orka" + imagePrefix: "${IMAGE_MACOS_X86_64}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + +YAML +fi + +if are_conditions_met_aws_tests; then + cat >> $pipelineName <<- YAML + - label: ":linux: Cloud Tests" + key: "extended-cloud-test" + command: ".buildkite/scripts/cloud_tests.sh" + env: + MODULE: $MODULE + agents: + provider: "gcp" + image: "${DEFAULT_UBUNTU_X86_64_IMAGE}" + machineType: "${GCP_DEFAULT_MACHINE_TYPE}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + +YAML +fi + +echo "Check and add the Packaging into the pipeline" +if are_conditions_met_packaging; then + cat >> $pipelineName <<- YAML + + - wait: ~ + depends_on: + - step: "mandatory-tests" + allow_failure: false + + - group: "Packaging" # TODO: check conditions for future the main pipeline migration: https://github.com/elastic/beats/pull/28589 + key: "packaging" + steps: + - label: ":linux: Packaging Linux" + key: "packaging-linux" + command: "cd $BEATS_PROJECT_NAME && mage package" + agents: + provider: "gcp" + image: "${IMAGE_UBUNTU_X86_64}" + machineType: "${GCP_HI_PERF_MACHINE_TYPE}" + disk_size: 100 + disk_type: "pd-ssd" + env: + PLATFORMS: "${PACKAGING_PLATFORMS}" + + - label: ":linux: Packaging ARM" + key: "packaging-arm" + command: "cd $BEATS_PROJECT_NAME && mage package" + agents: + provider: "aws" + imagePrefix: "${IMAGE_UBUNTU_ARM_64}" + instanceType: "${AWS_ARM_INSTANCE_TYPE}" + env: + PLATFORMS: "${PACKAGING_ARM_PLATFORMS}" + PACKAGES: "docker" + +YAML +fi + +echo "--- Printing dynamic steps" #TODO: remove if the pipeline is public +cat $pipelineName + +echo "--- Loading dynamic steps" +buildkite-agent pipeline upload $pipelineName diff --git a/.buildkite/scripts/generate_xpack_osquerybeat_pipeline.sh b/.buildkite/scripts/generate_xpack_osquerybeat_pipeline.sh new file mode 100755 index 00000000000..d6887c4c382 --- /dev/null +++ b/.buildkite/scripts/generate_xpack_osquerybeat_pipeline.sh @@ -0,0 +1,130 @@ +#!/usr/bin/env bash + +source .buildkite/scripts/common.sh + +set -euo pipefail + +pipelineName="pipeline.xpack-osquerybeat-dynamic.yml" + +echo "Add the mandatory and extended tests without additional conditions into the pipeline" +if are_conditions_met_mandatory_tests; then + cat > $pipelineName <<- YAML + +steps: + + - group: "Mandatory Tests" + key: "mandatory-tests" + steps: + - label: ":linux: Ubuntu Unit Tests" + key: "mandatory-linux-unit-test" + command: "cd $BEATS_PROJECT_NAME && mage build unitTest" + agents: + provider: "gcp" + image: "${IMAGE_UBUNTU_X86_64}" + machineType: "${GCP_DEFAULT_MACHINE_TYPE}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.xml" + + - label: ":go: Go Integration Tests" + key: "mandatory-int-test" + command: "cd $BEATS_PROJECT_NAME && mage goIntegTest" + agents: + provider: "gcp" + image: "${IMAGE_UBUNTU_X86_64}" + machineType: "${GCP_HI_PERF_MACHINE_TYPE}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.xml" + + - label: ":windows: Windows Unit Tests - {{matrix.image}}" + command: ".buildkite/scripts/win_unit_tests.ps1" + key: "mandatory-win-unit-tests" + agents: + provider: "gcp" + image: "{{matrix.image}}" + machineType: "${GCP_WIN_MACHINE_TYPE}" + disk_size: 100 + disk_type: "pd-ssd" + matrix: + setup: + image: + - "${IMAGE_WIN_2016}" + - "${IMAGE_WIN_2022}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + +## TODO: this condition will be changed in the Phase 3 of the Migration Plan https://docs.google.com/document/d/1IPNprVtcnHlem-uyGZM0zGzhfUuFAh4LeSl9JFHMSZQ/edit#heading=h.sltz78yy249h + + - group: "Extended Windows Tests" + key: "extended-win-tests" + steps: + + - label: ":windows: Windows Unit Tests - {{matrix.image}}" + command: ".buildkite/scripts/win_unit_tests.ps1" + key: "extended-win-unit-tests" + agents: + provider: "gcp" + image: "{{matrix.image}}" + machineType: "${GCP_WIN_MACHINE_TYPE}" + disk_size: 100 + disk_type: "pd-ssd" + matrix: + setup: + image: + - "${IMAGE_WIN_10}" + - "${IMAGE_WIN_11}" + - "${IMAGE_WIN_2019}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + +YAML +else + echo "The conditions don't match to requirements for generating pipeline steps." + exit 0 +fi + +if are_conditions_met_macos_tests; then + cat >> $pipelineName <<- YAML + + - group: "Extended Tests" + key: "extended-tests" + steps: + + - label: ":mac: MacOS Unit Tests" + key: "extended-macos-unit-tests" + command: ".buildkite/scripts/unit_tests.sh" + agents: + provider: "orka" + imagePrefix: "${IMAGE_MACOS_X86_64}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + +YAML +fi + +echo "Check and add the Packaging into the pipeline" +if are_conditions_met_packaging; then + cat >> $pipelineName <<- YAML + + - wait: ~ + depends_on: + - step: "mandatory-tests" + allow_failure: false + + - group: "Packaging" # TODO: check conditions for future the main pipeline migration: https://github.com/elastic/beats/pull/28589 + key: "packaging" + steps: + - label: ":linux: Packaging Linux" + key: "packaging-linux" + command: "cd $BEATS_PROJECT_NAME && mage package" + agents: + provider: "gcp" + image: "${IMAGE_UBUNTU_X86_64}" + machineType: "${GCP_HI_PERF_MACHINE_TYPE}" + disk_size: 100 + disk_type: "pd-ssd" + env: + PLATFORMS: "${PACKAGING_PLATFORMS}" + +YAML +fi + +echo "--- Printing dynamic steps" #TODO: remove if the pipeline is public +cat $pipelineName + +echo "--- Loading dynamic steps" +buildkite-agent pipeline upload $pipelineName diff --git a/.buildkite/scripts/generate_xpack_packetbeat_pipeline.sh b/.buildkite/scripts/generate_xpack_packetbeat_pipeline.sh new file mode 100644 index 00000000000..4eb2a1c3e04 --- /dev/null +++ b/.buildkite/scripts/generate_xpack_packetbeat_pipeline.sh @@ -0,0 +1,195 @@ +#!/usr/bin/env bash + +source .buildkite/scripts/common.sh + +set -euo pipefail + +pipelineName="pipeline.xpack-packetbeat-dynamic.yml" + +echo "Add the mandatory and extended tests without additional conditions into the pipeline" +if are_conditions_met_mandatory_tests; then + cat > $pipelineName <<- YAML + +steps: + + - group: "Mandatory Tests" + key: "mandatory-tests" + steps: + - label: ":linux: Ubuntu Unit Tests" + key: "mandatory-linux-unit-test" + command: "cd $BEATS_PROJECT_NAME && mage build unitTest" + agents: + provider: "gcp" + image: "${IMAGE_UBUNTU_X86_64}" + machineType: "${GCP_DEFAULT_MACHINE_TYPE}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.xml" + + - label: ":linux: Ubuntu System Tests" + key: "mandatory-linux-system-test" + command: "cd $BEATS_PROJECT_NAME && mage systemTest" + agents: + provider: "gcp" + image: "${IMAGE_UBUNTU_X86_64}" + machineType: "${GCP_DEFAULT_MACHINE_TYPE}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.xml" + + - label: ":rhel: RHEL-9 Unit Tests" + key: "mandatory-rhel9-unit-test" + command: ".buildkite/scripts/unit_tests.sh" + agents: + provider: "gcp" + image: "${IMAGE_RHEL9_X86_64}" + machineType: "${GCP_DEFAULT_MACHINE_TYPE}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + + + - label: ":windows: Windows Unit Tests - {{matrix.image}}" + command: ".buildkite/scripts/win_unit_tests.ps1" + key: "mandatory-win-unit-tests" + agents: + provider: "gcp" + image: "{{matrix.image}}" + machineType: "${GCP_WIN_MACHINE_TYPE}" + disk_size: 100 + disk_type: "pd-ssd" + matrix: + setup: + image: + - "${IMAGE_WIN_2016}" + - "${IMAGE_WIN_2022}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + + ## TODO: uncomment when the issue https://github.com/elastic/beats/issues/38142 is solved + # - label: ":windows: Windows 2022 System Tests" + # key: "mandatory-win-2022-system-tests" + # command: ".buildkite/scripts/win_unit_tests.ps1 systemtest" + # agents: + # provider: "gcp" + # image: "${IMAGE_WIN_2022}" + # machineType: "${GCP_WIN_MACHINE_TYPE}" + # disk_size: 100 + # disk_type: "pd-ssd" + # artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + +## TODO: this condition will be changed in the Phase 3 of the Migration Plan https://docs.google.com/document/d/1IPNprVtcnHlem-uyGZM0zGzhfUuFAh4LeSl9JFHMSZQ/edit#heading=h.sltz78yy249h + + - group: "Extended Windows Tests" + key: "extended-win-tests" + steps: + + - label: ":windows: Windows Unit Tests - {{matrix.image}}" + command: ".buildkite/scripts/win_unit_tests.ps1" + key: "extended-win-unit-tests" + agents: + provider: "gcp" + image: "{{matrix.image}}" + machineType: "${GCP_WIN_MACHINE_TYPE}" + disk_size: 100 + disk_type: "pd-ssd" + matrix: + setup: + image: + - "${IMAGE_WIN_10}" + - "${IMAGE_WIN_11}" + - "${IMAGE_WIN_2019}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + + ## TODO: uncomment when the issue https://github.com/elastic/beats/issues/38142 is solved + # - label: ":windows: Windows 10 System Tests" + # key: "extended-win-10-system-tests" + # command: ".buildkite/scripts/win_unit_tests.ps1 systemtest" + # agents: + # provider: "gcp" + # image: "${IMAGE_WIN_10}" + # machineType: "${GCP_WIN_MACHINE_TYPE}" + # disk_size: 100 + # disk_type: "pd-ssd" + # artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + +YAML +else + echo "The conditions don't match to requirements for generating pipeline steps." + exit 0 +fi + +if are_conditions_met_arm_tests || are_conditions_met_macos_tests ; then + cat >> $pipelineName <<- YAML + + - group: "Extended Tests" + key: "extended-tests" + steps: + +YAML +fi + +if are_conditions_met_macos_tests; then + cat >> $pipelineName <<- YAML + + - label: ":mac: MacOS Unit Tests" + key: "extended-macos-unit-tests" + command: ".buildkite/scripts/unit_tests.sh" + agents: + provider: "orka" + imagePrefix: "${IMAGE_MACOS_X86_64}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + +YAML +fi + +if are_conditions_met_arm_tests; then + cat >> $pipelineName <<- YAML + - label: ":linux: ARM Ubuntu Unit Tests" + key: "extended-arm64-unit-test" + command: "cd $BEATS_PROJECT_NAME && mage build unitTest" + agents: + provider: "aws" + imagePrefix: "${IMAGE_UBUNTU_ARM_64}" + instanceType: "${AWS_ARM_INSTANCE_TYPE}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + +YAML +fi + +echo "Check and add the Packaging into the pipeline" +if are_conditions_met_packaging; then + cat >> $pipelineName <<- YAML + + - wait: ~ + depends_on: + - step: "mandatory-tests" + allow_failure: false + + - group: "Packaging" # TODO: check conditions for future the main pipeline migration: https://github.com/elastic/beats/pull/28589 + key: "packaging" + steps: + - label: ":linux: Packaging Linux" + key: "packaging-linux" + command: "cd $BEATS_PROJECT_NAME && mage package" + agents: + provider: "gcp" + image: "${IMAGE_UBUNTU_X86_64}" + machineType: "${GCP_HI_PERF_MACHINE_TYPE}" + disk_size: 100 + disk_type: "pd-ssd" + env: + PLATFORMS: "${PACKAGING_PLATFORMS}" + + - label: ":linux: Packaging ARM" + key: "packaging-arm" + command: "cd $BEATS_PROJECT_NAME && mage package" + agents: + provider: "aws" + imagePrefix: "${IMAGE_UBUNTU_ARM_64}" + instanceType: "${AWS_ARM_INSTANCE_TYPE}" + env: + PLATFORMS: "${PACKAGING_ARM_PLATFORMS}" + PACKAGES: "docker" + +YAML +fi + +echo "--- Printing dynamic steps" #TODO: remove if the pipeline is public +cat $pipelineName + +echo "--- Loading dynamic steps" +buildkite-agent pipeline upload $pipelineName diff --git a/.buildkite/scripts/generate_xpack_winlogbeat_pipeline.sh b/.buildkite/scripts/generate_xpack_winlogbeat_pipeline.sh new file mode 100755 index 00000000000..108a70c1562 --- /dev/null +++ b/.buildkite/scripts/generate_xpack_winlogbeat_pipeline.sh @@ -0,0 +1,110 @@ +#!/usr/bin/env bash + +source .buildkite/scripts/common.sh + +set -euo pipefail + +pipelineName="pipeline.xpack-winlogbeat-dynamic.yml" + +echo "Add the mandatory and extended tests without additional conditions into the pipeline" +if are_conditions_met_mandatory_tests; then + cat > $pipelineName <<- YAML + +steps: + + - group: "Mandatory Tests" + key: "mandatory-tests" + steps: + + - label: ":windows: Windows 2019 Unit (MODULE) Tests" + key: "mandatory-win-2019-unit-tests" + command: ".buildkite/scripts/win_unit_tests.ps1" + env: + MODULE: $MODULE + agents: + provider: "gcp" + image: "${IMAGE_WIN_2019}" + machine_type: "${GCP_WIN_MACHINE_TYPE}" + disk_size: 100 + disk_type: "pd-ssd" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + + - label: ":windows: Windows 2016/2022 Unit Tests - {{matrix.image}}" + command: ".buildkite/scripts/win_unit_tests.ps1" + key: "mandatory-win-unit-tests" + agents: + provider: "gcp" + image: "{{matrix.image}}" + machine_type: "${GCP_WIN_MACHINE_TYPE}" + disk_size: 100 + disk_type: "pd-ssd" + matrix: + setup: + image: + - "${IMAGE_WIN_2016}" + - "${IMAGE_WIN_2022}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + +# echo "Add the extended windows tests into the pipeline" +# TODO: ADD conditions from the main pipeline + + - group: "Extended Windows Tests" + key: "extended-win-tests" + steps: + + - label: ":windows: Windows Unit Tests - {{matrix.image}}" + command: ".buildkite/scripts/win_unit_tests.ps1" + key: "extended-win-unit-tests" + agents: + provider: "gcp" + image: "{{matrix.image}}" + machineType: "${GCP_WIN_MACHINE_TYPE}" + disk_size: 100 + disk_type: "pd-ssd" + matrix: + setup: + image: + - "${IMAGE_WIN_10}" + - "${IMAGE_WIN_11}" + - "${IMAGE_WIN_2019}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + +YAML +else + echo "The conditions don't match to requirements for generating pipeline steps." + exit 0 +fi + +echo "Check and add the Packaging into the pipeline" +if are_conditions_met_packaging; then + cat >> $pipelineName <<- YAML + + - wait: ~ + depends_on: + - step: "mandatory-tests" + allow_failure: false + + - group: "Packaging" # TODO: check conditions for future the main pipeline migration: https://github.com/elastic/beats/pull/28589 + key: "packaging" + steps: + + - label: ":linux: Packaging Linux" + key: "packaging-linux" + command: "cd $BEATS_PROJECT_NAME && mage package" + agents: + provider: "gcp" + image: "${IMAGE_UBUNTU_X86_64}" + machineType: "${GCP_HI_PERF_MACHINE_TYPE}" + disk_size: 100 + disk_type: "pd-ssd" + env: + PLATFORMS: "${PACKAGING_PLATFORMS}" + +YAML +fi + +echo "--- Printing dynamic steps" #TODO: remove if the pipeline is public +cat $pipelineName + +echo "--- Loading dynamic steps" +buildkite-agent pipeline upload $pipelineName diff --git a/.buildkite/scripts/install_tools.sh b/.buildkite/scripts/install_tools.sh index 80e70ae96c5..3d25cf8e5c9 100755 --- a/.buildkite/scripts/install_tools.sh +++ b/.buildkite/scripts/install_tools.sh @@ -41,6 +41,16 @@ with_mage with_python with_dependencies config_git + +if [[ "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-heartbeat" ]]; then + # Install NodeJS + withNodeJSEnv "${NODEJS_VERSION}" + installNodeJsDependencies + + echo "Install @elastic/synthetics" + npm i -g @elastic/synthetics +fi + mage dumpVariables #sudo command doesn't work at the "pre-command" hook because of another user environment (root with strange permissions) diff --git a/.buildkite/auditbeat/scripts/package-step.sh b/.buildkite/scripts/packaging/package-step.sh similarity index 55% rename from .buildkite/auditbeat/scripts/package-step.sh rename to .buildkite/scripts/packaging/package-step.sh index cb06895879a..9eddfafcfba 100755 --- a/.buildkite/auditbeat/scripts/package-step.sh +++ b/.buildkite/scripts/packaging/package-step.sh @@ -4,44 +4,45 @@ set -euo pipefail source .buildkite/env-scripts/util.sh -changeset="^auditbeat/ +changeset="^${BEATS_PROJECT_NAME}/ ^go.mod ^pytest.ini ^dev-tools/ ^libbeat/ ^testing/ -^\.buildkite/auditbeat/" +^\.buildkite/${BEATS_PROJECT_NAME}/" if are_files_changed "$changeset"; then bk_pipeline=$(cat <<-YAML steps: - - label: ":ubuntu: Packaging Linux X86" + - label: ":ubuntu: ${BEATS_PROJECT_NAME}/Packaging Linux X86" key: "package-linux-x86" env: PLATFORMS: "+all linux/amd64 linux/arm64 windows/amd64 darwin/amd64 darwin/arm64" - command: - - ".buildkite/auditbeat/scripts/package.sh" + SNAPSHOT: true + command: ".buildkite/scripts/packaging/package.sh" notify: - github_commit_status: - context: "Auditbeat/Packaging: Linux X86" + context: "${BEATS_PROJECT_NAME}/Packaging: Linux X86" agents: - provider: "gcp" + provider: gcp image: "${IMAGE_UBUNTU_X86_64}" + machineType: "${GCP_HI_PERF_MACHINE_TYPE}" - - label: ":linux: Packaging Linux ARM" + - label: ":linux: ${BEATS_PROJECT_NAME}/Packaging Linux ARM" key: "package-linux-arm" env: PLATFORMS: "linux/arm64" PACKAGES: "docker" - command: - - ".buildkite/auditbeat/scripts/package.sh" + SNAPSHOT: true + command: ".buildkite/scripts/packaging/package.sh" notify: - github_commit_status: - context: "Auditbeat/Packaging: ARM" + context: "${BEATS_PROJECT_NAME}/Packaging: ARM" agents: provider: "aws" - imagePrefix: "${IMAGE_UBUNTU_ARM_64}" - instanceType: "t4g.large" + imagePrefix: "${AWS_IMAGE_UBUNTU_ARM_64}" + instanceType: "${AWS_ARM_INSTANCE_TYPE}" YAML ) echo "${bk_pipeline}" | buildkite-agent pipeline upload diff --git a/.buildkite/scripts/packaging/package-util.sh b/.buildkite/scripts/packaging/package-util.sh new file mode 100755 index 00000000000..4a50457cc9c --- /dev/null +++ b/.buildkite/scripts/packaging/package-util.sh @@ -0,0 +1,22 @@ +#!/usr/bin/env bash + +set -euo pipefail + +is_pr() { + if [[ $BUILDKITE_PULL_REQUEST != false ]]; then + return 0 + else + return 1 + fi +} + +define_tags() { + aliasVersion="${VERSION%.*}${IMG_POSTFIX}" + tags=("${BUILDKITE_COMMIT}") + + if is_pr; then + tags+=("pr-${GITHUB_PR_NUMBER}") + else + tags+=("${SOURCE_TAG}" "${aliasVersion}") + fi +} diff --git a/.buildkite/scripts/packaging/package.sh b/.buildkite/scripts/packaging/package.sh new file mode 100755 index 00000000000..5744ee0776b --- /dev/null +++ b/.buildkite/scripts/packaging/package.sh @@ -0,0 +1,51 @@ +#!/usr/bin/env bash + +set -euo pipefail + +source .buildkite/scripts/packaging/package-util.sh + +IMG_POSTFIX="-SNAPSHOT" +VARIANTS=("" "-ubi" "-oss") +VERSION="$(make get-version)" +SOURCE_TAG+="${VERSION}${IMG_POSTFIX}" +TARGET="observability-ci/${BEATS_PROJECT_NAME}" + +echo "--- Creating package" +mage -d "${BEATS_PROJECT_NAME}" package + +echo "--- Distribution list" +dir="${BEATS_PROJECT_NAME}/build/distributions" +buildkite-agent artifact upload "$dir/*.tar.gz;$dir/*.tar.gz.sha512" + +echo "--- Docker image list" +docker images + +define_tags + +targetSuffix="" +if [[ ${HW_TYPE} == "aarch64" || ${HW_TYPE} == "arm64" ]]; then + targetSuffix="-arm64" +fi + +for variant in "${VARIANTS[@]}"; do + source="beats/${BEATS_PROJECT_NAME}${variant}" + + for tag in "${tags[@]}"; do + targetTag=$tag${targetSuffix} + + sourceName="${DOCKER_REGISTRY}/${source}:${SOURCE_TAG}" + targetName="${DOCKER_REGISTRY}/${TARGET}:${targetTag}" + #TODO Remove following line once beats fully migrated to Buildkite and Jenkins builds will be disabled + #Avoid clashing with the Jenkins produced images + targetName="${targetName}-buildkite" + + if docker image inspect "${sourceName}" &>/dev/null; then + echo "--- Tag & Push with target: $targetName" + echo "Source name: $sourceName" + docker tag "$sourceName" "$targetName" + docker push "$targetName" + else + echo "Docker image ${sourceName} does not exist" + fi + done +done diff --git a/.buildkite/scripts/run_dynamic_pipeline_tests.sh b/.buildkite/scripts/run_dynamic_pipeline_tests.sh new file mode 100755 index 00000000000..8eb72d7a96b --- /dev/null +++ b/.buildkite/scripts/run_dynamic_pipeline_tests.sh @@ -0,0 +1,43 @@ +#!/usr/bin/env bash +# Run tests for the dynamic pipeline generator only if it's a PR and related files have been changed +# this will allow us to fail fast, if e.g. a PR has broken the generator + +set -euo pipefail + +are_paths_changed() { + local patterns=("${@}") + local changelist=() + for pattern in "${patterns[@]}"; do + changed_files=($(git diff --name-only HEAD@{1} HEAD | grep -E "$pattern")) + if [ "${#changed_files[@]}" -gt 0 ]; then + changelist+=("${changed_files[@]}") + fi + done + + if [ "${#changelist[@]}" -gt 0 ]; then + echo "Files changed:" + echo "${changelist[*]}" + return 0 + else + echo "No files changed within specified changeset:" + echo "${patterns[*]}" + return 1 + fi +} + +pipeline_generator_changeset=( + "^.buildkite/pipeline.py" + "^*/buildkite.yml" +) + +if ! are_paths_changed "${pipeline_generator_changeset[@]}" || [[ "${BUILDKITE_PULL_REQUEST}" == "false" ]]; then + echo "~~~ Skipping pipeline generator tests" + exit +fi + +echo "~~~ Running pipeline generator tests" + +python3 -mpip install --quiet "pytest" +pushd .buildkite +pytest . +popd diff --git a/.buildkite/scripts/setenv.sh b/.buildkite/scripts/setenv.sh index 25121de212f..8da3796aa1e 100755 --- a/.buildkite/scripts/setenv.sh +++ b/.buildkite/scripts/setenv.sh @@ -1,13 +1,21 @@ #!/usr/bin/env bash set -euo pipefail - +REPO="beats" +TMP_FOLDER="tmp.${REPO}" +DOCKER_REGISTRY="docker.elastic.co" SETUP_GVM_VERSION="v0.5.1" DOCKER_COMPOSE_VERSION="1.21.0" DOCKER_COMPOSE_VERSION_AARCH64="v2.21.0" SETUP_WIN_PYTHON_VERSION="3.11.0" NMAP_WIN_VERSION="7.12" # Earlier versions of NMap provide WinPcap (the winpcap packages don't install nicely because they pop-up a UI) GO_VERSION=$(cat .go-version) +ASDF_MAGE_VERSION="1.15.0" +PACKAGING_PLATFORMS="+all linux/amd64 linux/arm64 windows/amd64 darwin/amd64 darwin/arm64" +PACKAGING_ARM_PLATFORMS="linux/arm64" +ASDF_TERRAFORM_VERSION="1.0.2" +AWS_REGION="eu-central-1" +NODEJS_VERSION="18.17.1" export SETUP_GVM_VERSION export DOCKER_COMPOSE_VERSION @@ -15,6 +23,15 @@ export DOCKER_COMPOSE_VERSION_AARCH64 export SETUP_WIN_PYTHON_VERSION export NMAP_WIN_VERSION export GO_VERSION +export ASDF_MAGE_VERSION +export PACKAGING_PLATFORMS +export PACKAGING_ARM_PLATFORMS +export REPO +export TMP_FOLDER +export DOCKER_REGISTRY +export ASDF_TERRAFORM_VERSION +export AWS_REGION +export NODEJS_VERSION exportVars() { local platform_type="$(uname)" @@ -41,10 +58,26 @@ exportVars() { fi } - -if [[ "$BUILDKITE_PIPELINE_SLUG" == "beats-metricbeat" ]]; then +if [[ "$BUILDKITE_PIPELINE_SLUG" == "beats-metricbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-metricbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-winlogbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-auditbeat" ]]; then exportVars export RACE_DETECTOR="true" export TEST_COVERAGE="true" export DOCKER_PULL="0" + export TEST_TAGS="${TEST_TAGS:+$TEST_TAGS,}oracle" +fi + +if [[ "$BUILDKITE_STEP_KEY" == "xpack-winlogbeat-pipeline" || "$BUILDKITE_STEP_KEY" == "xpack-metricbeat-pipeline" || "$BUILDKITE_STEP_KEY" == "xpack-dockerlogbeat-pipeline" || "$BUILDKITE_STEP_KEY" == "xpack-filebeat-pipeline" || "$BUILDKITE_STEP_KEY" == "metricbeat-pipeline" || "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-heartbeat" ]]; then + source .buildkite/scripts/common.sh + if [[ "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-heartbeat" ]]; then + export ELASTIC_SYNTHETICS_CAPABLE=true + else + # Set the MODULE env variable if possible, it should be defined before generating pipeline's steps. It is used in multiple pipelines. + defineModuleFromTheChangeSet "${BEATS_PROJECT_NAME}" + fi +fi + +if [[ "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-heartbeat" ]]; then + # Set the MODULE env variable if possible, it should be defined before generating pipeline's steps. It is used in multiple pipelines. + source .buildkite/scripts/common.sh + defineModuleFromTheChangeSet "${BEATS_PROJECT_NAME}" fi diff --git a/.buildkite/scripts/win_unit_tests.ps1 b/.buildkite/scripts/win_unit_tests.ps1 index b3c5c58fac0..6c3af6e9321 100644 --- a/.buildkite/scripts/win_unit_tests.ps1 +++ b/.buildkite/scripts/win_unit_tests.ps1 @@ -1,3 +1,7 @@ +param( + [string]$testType = "unittest" +) + $ErrorActionPreference = "Stop" # set -e $WorkFolder = $env:BEATS_PROJECT_NAME $WORKSPACE = Get-Location @@ -120,6 +124,23 @@ function withNmap($version) { } Start-Process -FilePath $nmapDownloadPath -ArgumentList "/S" -Wait } +function google_cloud_auth { + $tempFileName = "google-cloud-credentials.json" + $secretFileLocation = Join-Path $env:TEMP $tempFileName + $null = New-Item -ItemType File -Path $secretFileLocation + Set-Content -Path $secretFileLocation -Value $env:PRIVATE_CI_GCS_CREDENTIALS_SECRET + gcloud auth activate-service-account --key-file $secretFileLocation > $null 2>&1 + $env:GOOGLE_APPLICATION_CREDENTIALS = $secretFileLocation +} + +function google_cloud_auth_cleanup { + if (Test-Path $env:GOOGLE_APPLICATION_CREDENTIALS) { + Remove-Item $env:GOOGLE_APPLICATION_CREDENTIALS -Force + Remove-Item Env:\GOOGLE_APPLICATION_CREDENTIALS + } else { + Write-Host "No GCP credentials were added" + } +} fixCRLF @@ -129,7 +150,7 @@ withPython $env:SETUP_WIN_PYTHON_VERSION withMinGW -if ($env:BUILDKITE_PIPELINE_SLUG -eq "beats-packetbeat") { +if ($env:BUILDKITE_PIPELINE_SLUG -eq "beats-packetbeat" -or $env:BUILDKITE_PIPELINE_SLUG -eq "beats-xpack-filebeat") { withNmap $env:NMAP_WIN_VERSION } @@ -142,10 +163,23 @@ $env:MAGEFILE_CACHE = $magefile New-Item -ItemType Directory -Force -Path "build" -if ($env:BUILDKITE_PIPELINE_SLUG -eq "beats-xpack-libbeat") { - mage -w reader/etw build goUnitTest -} else { - mage build unitTest +if ($testType -eq "unittest") { + if ($env:BUILDKITE_PIPELINE_SLUG -eq "beats-xpack-libbeat") { + mage -w reader/etw build goUnitTest + } else { + mage build unitTest + } +} +elseif ($testType -eq "systemtest") { + try { + google_cloud_auth + mage systemTest + } finally { + google_cloud_auth_cleanup + } +} +else { + Write-Host "Unknown test type. Please specify 'unittest' or 'systemtest'." } $EXITCODE=$LASTEXITCODE diff --git a/.buildkite/test_pipeline.py b/.buildkite/test_pipeline.py new file mode 100644 index 00000000000..75fd949ccc8 --- /dev/null +++ b/.buildkite/test_pipeline.py @@ -0,0 +1,71 @@ +import os + +import pytest +import pipeline + + +@pytest.fixture +def ubuntu2204_aws_agent(): + return { + "command": "fake-cmd", + "platform": "platform-ingest-beats-ubuntu-2204-aarch64", + "provider": "aws" + } + + +@pytest.fixture() +def fake_simple_group(): + return { + "unitTest": { + "command": "fake-cmd", + "platform": "family/platform-ingest-beats-ubuntu-2204", + }, + "integrationTest": { + "command": "fake-integration", + "platform": "family/platform-ingest-beats-ubuntu-2204", + "env": { + "FOO": "BAR", + }, + }, + } + + +def test_fetch_stage(ubuntu2204_aws_agent): + step = pipeline.fetch_stage("test", ubuntu2204_aws_agent, "fake", "fake-category") + assert step.create_entity() == { + "label": "fake test", + "command": ["cd fake", "fake-cmd"], + "notify": [ + { + "github_commit_status": { + "context": "Fake: test", + } + } + ], + "agents": { + "provider": "aws", + "imagePrefix": "platform-ingest-beats-ubuntu-2204-aarch64", + "instanceType": "t4g.large", + }, + "artifact_paths": [ + "fake/build/*.xml", + "fake/build/*.json", + ], + } + + +def test_fetch_group(fake_simple_group): + group = pipeline.fetch_group(fake_simple_group, "fake-project", "testing") + assert len(group.steps) == 2 + for step in group.steps: + assert "testing" == step.category + assert "gcp" == step.agent.provider + + assert group.steps[1].env.get("FOO") == "BAR" + + +def test_is_pr(): + os.environ["BUILDKITE_PULL_REQUEST"] = "1234" + assert pipeline.is_pr() is True + os.environ["BUILDKITE_PULL_REQUEST"] = "false" + assert pipeline.is_pr() is False diff --git a/.buildkite/x-pack/pipeline.xpack.auditbeat.yml b/.buildkite/x-pack/pipeline.xpack.auditbeat.yml new file mode 100644 index 00000000000..d88bf2a4ff0 --- /dev/null +++ b/.buildkite/x-pack/pipeline.xpack.auditbeat.yml @@ -0,0 +1,62 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json +name: "beats-xpack-auditbeat" + +env: + IMAGE_UBUNTU_X86_64: "family/platform-ingest-beats-ubuntu-2204" + IMAGE_UBUNTU_ARM_64: "platform-ingest-beats-ubuntu-2204-aarch64" + DEFAULT_UBUNTU_X86_64_IMAGE: "family/core-ubuntu-2204" + IMAGE_RHEL9_X86_64: "family/platform-ingest-beats-rhel-9" + IMAGE_WIN_10: "family/general-windows-10" + IMAGE_WIN_11: "family/general-windows-11" + IMAGE_WIN_2016: "family/core-windows-2016" + IMAGE_WIN_2019: "family/core-windows-2019" + IMAGE_WIN_2022: "family/core-windows-2022" + IMAGE_MACOS_X86_64: "generic-13-ventura-x64" + GCP_DEFAULT_MACHINE_TYPE: "c2d-highcpu-8" + GCP_HI_PERF_MACHINE_TYPE: "c2d-highcpu-16" + GCP_WIN_MACHINE_TYPE: "n2-standard-8" + AWS_ARM_INSTANCE_TYPE: "t4g.xlarge" + BEATS_PROJECT_NAME: "x-pack/auditbeat" + +steps: + + - input: "Input Parameters" + key: "force-run-stages" + fields: + - select: "Auditbeat - run_xpack_auditbeat" + key: "run_xpack_auditbeat" + options: + - label: "True" + value: "true" + - label: "False" + value: "false" + default: "false" + - select: "Auditbeat - run_xpack_auditbeat_macos_tests" + key: "run_xpack_auditbeat_macos_tests" + options: + - label: "True" + value: "true" + - label: "False" + value: "false" + default: "false" + - select: "Auditbeat - run_xpack_auditbeat_arm_tests" + key: "run_xpack_auditbeat_arm_tests" + options: + - label: "True" + value: "true" + - label: "False" + value: "false" + default: "false" + + if: "build.source == 'ui'" + + - wait: ~ + if: "build.source == 'ui'" + allow_dependency_failure: false + + - label: ":linux: Load dynamic x-pack auditbeat pipeline" + key: "xpack-auditbeat-pipeline" + command: ".buildkite/scripts/generate_xpack_auditbeat_pipeline.sh" + notify: + - github_commit_status: + context: "${BEATS_PROJECT_NAME}: Load dynamic pipeline's steps" diff --git a/.buildkite/x-pack/pipeline.xpack.dockerlogbeat.yml b/.buildkite/x-pack/pipeline.xpack.dockerlogbeat.yml new file mode 100644 index 00000000000..bcc2610e175 --- /dev/null +++ b/.buildkite/x-pack/pipeline.xpack.dockerlogbeat.yml @@ -0,0 +1,38 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json +name: "beats-xpack-dockerlogbeat" + +env: + IMAGE_UBUNTU_X86_64: "family/platform-ingest-beats-ubuntu-2204" + DEFAULT_UBUNTU_X86_64_IMAGE: "family/core-ubuntu-2204" + IMAGE_UBUNTU_ARM_64: "platform-ingest-beats-ubuntu-2204-aarch64" + GCP_DEFAULT_MACHINE_TYPE: "c2d-highcpu-8" + GCP_HI_PERF_MACHINE_TYPE: "c2d-highcpu-16" + AWS_ARM_INSTANCE_TYPE: "t4g.xlarge" + BEATS_PROJECT_NAME: "x-pack/dockerlogbeat" + +steps: + + - input: "Input Parameters" + key: "force-run-stages" + fields: + - select: "Dockerlogbeat - run_xpack_dockerlogbeat" + key: "run_xpack_dockerlogbeat" + options: + - label: "True" + value: "true" + - label: "False" + value: "false" + default: "false" + + if: "build.source == 'ui'" + + - wait: ~ + if: "build.source == 'ui'" + allow_dependency_failure: false + + - label: ":linux: Load dynamic x-pack dockerlogbeat pipeline" + key: "xpack-dockerlogbeat-pipeline" + command: ".buildkite/scripts/generate_xpack_dockerlogbeat_pipeline.sh" + notify: + - github_commit_status: + context: "${BEATS_PROJECT_NAME}: Load dynamic pipeline's steps" diff --git a/.buildkite/x-pack/pipeline.xpack.filebeat.yml b/.buildkite/x-pack/pipeline.xpack.filebeat.yml new file mode 100644 index 00000000000..a324353b65f --- /dev/null +++ b/.buildkite/x-pack/pipeline.xpack.filebeat.yml @@ -0,0 +1,69 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json +name: "beats-xpack-filebeat" + +env: + IMAGE_UBUNTU_X86_64: "family/platform-ingest-beats-ubuntu-2204" + IMAGE_UBUNTU_ARM_64: "platform-ingest-beats-ubuntu-2204-aarch64" + DEFAULT_UBUNTU_X86_64_IMAGE: "family/core-ubuntu-2204" + IMAGE_WIN_10: "family/general-windows-10" + IMAGE_WIN_11: "family/general-windows-11" + IMAGE_WIN_2016: "family/core-windows-2016" + IMAGE_WIN_2019: "family/core-windows-2019" + IMAGE_WIN_2022: "family/core-windows-2022" + IMAGE_MACOS_X86_64: "generic-13-ventura-x64" + GCP_DEFAULT_MACHINE_TYPE: "c2d-highcpu-8" + GCP_HI_PERF_MACHINE_TYPE: "c2d-highcpu-16" + GCP_WIN_MACHINE_TYPE: "n2-standard-8" + AWS_ARM_INSTANCE_TYPE: "t4g.xlarge" + BEATS_PROJECT_NAME: "x-pack/filebeat" + +steps: + + - input: "Input Parameters" + key: "force-run-stages" + fields: + - select: "Filebeat - run_xpack_filebeat" + key: "run_xpack_filebeat" + options: + - label: "True" + value: "true" + - label: "False" + value: "false" + default: "false" + - select: "Filebeat - run_xpack_filebeat_macos_tests" + key: "run_xpack_filebeat_macos_tests" + options: + - label: "True" + value: "true" + - label: "False" + value: "false" + default: "false" + - select: "Filebeat - run_xpack_filebeat_arm_tests" + key: "run_xpack_filebeat_arm_tests" + options: + - label: "True" + value: "true" + - label: "False" + value: "false" + default: "false" + - select: "Filebeat - run_xpack_filebeat_aws_tests" + key: "run_xpack_filebeat_aws_tests" + options: + - label: "True" + value: "true" + - label: "False" + value: "false" + default: "false" + + if: "build.source == 'ui'" + + - wait: ~ + if: "build.source == 'ui'" + allow_dependency_failure: false + + - label: ":linux: Load dynamic x-pack filebeat pipeline" + key: "xpack-filebeat-pipeline" + command: ".buildkite/scripts/generate_xpack_filebeat_pipeline.sh" + notify: + - github_commit_status: + context: "${BEATS_PROJECT_NAME}: Load dynamic pipeline's steps" diff --git a/.buildkite/x-pack/pipeline.xpack.heartbeat.yml b/.buildkite/x-pack/pipeline.xpack.heartbeat.yml new file mode 100644 index 00000000000..2804e98996a --- /dev/null +++ b/.buildkite/x-pack/pipeline.xpack.heartbeat.yml @@ -0,0 +1,54 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json +name: "beats-xpack-heartbeat" + + +env: + IMAGE_UBUNTU_X86_64: "family/platform-ingest-beats-ubuntu-2204" + IMAGE_UBUNTU_ARM_64: "platform-ingest-beats-ubuntu-2204-aarch64" + DEFAULT_UBUNTU_X86_64_IMAGE: "family/core-ubuntu-2204" + IMAGE_WIN_10: "family/general-windows-10" + IMAGE_WIN_11: "family/general-windows-11" + IMAGE_WIN_2016: "family/core-windows-2016" + IMAGE_WIN_2019: "family/core-windows-2019" + IMAGE_WIN_2022: "family/core-windows-2022" + IMAGE_MACOS_X86_64: "generic-13-ventura-x64" + GCP_DEFAULT_MACHINE_TYPE: "c2d-highcpu-8" + GCP_HI_PERF_MACHINE_TYPE: "c2d-highcpu-16" + GCP_WIN_MACHINE_TYPE: "n2-standard-8" + AWS_ARM_INSTANCE_TYPE: "t4g.xlarge" + BEATS_PROJECT_NAME: "x-pack/heartbeat" + +steps: + + - input: "Input Parameters" + key: "force-run-stages" + fields: + - select: "Heartbeat - run_xpack_heartbeat" + key: "run_xpack_heartbeat" + options: + - label: "True" + value: "true" + - label: "False" + value: "false" + default: "false" + - select: "Heartbeat - run_xpack_heartbeat_macos_tests" + key: "run_xpack_heartbeat_macos_tests" + options: + - label: "True" + value: "true" + - label: "False" + value: "false" + default: "false" + + if: "build.source == 'ui'" + + - wait: ~ + if: "build.source == 'ui'" + allow_dependency_failure: false + + - label: ":linux: Load dynamic x-pack heartbeat pipeline" + key: "xpack-heartbeat-pipeline" + command: ".buildkite/scripts/generate_xpack_heartbeat_pipeline.sh" + notify: + - github_commit_status: + context: "${BEATS_PROJECT_NAME}: Load dynamic pipeline's steps" diff --git a/.buildkite/x-pack/pipeline.xpack.libbeat.yml b/.buildkite/x-pack/pipeline.xpack.libbeat.yml index 01695fa4fb6..0c745b1a09d 100644 --- a/.buildkite/x-pack/pipeline.xpack.libbeat.yml +++ b/.buildkite/x-pack/pipeline.xpack.libbeat.yml @@ -20,7 +20,7 @@ steps: - input: "Input Parameters" key: "input-run-all-stages" fields: - - select: "Packetbeat - run_xpack_libbeat" + - select: "Libbeat - run_xpack_libbeat" key: "run_xpack_libbeat" options: - label: "True" @@ -28,7 +28,7 @@ steps: - label: "False" value: "false" default: "false" - - select: "Packetbeat - run_xpack_libbeat_arm_tests" + - select: "Libbeat - run_xpack_libbeat_arm_tests" key: "run_xpack_libbeat_arm_tests" options: - label: "True" @@ -42,8 +42,8 @@ steps: if: "build.source == 'ui'" allow_dependency_failure: false - - label: ":linux: Load dynamic packetbeat pipeline" - key: "packetbeat-pipeline" + - label: ":linux: Load dynamic x-pack libbeat pipeline" + key: "libbeat-pipeline" command: ".buildkite/scripts/generate_xpack_libbeat_pipeline.sh" notify: - github_commit_status: diff --git a/.buildkite/x-pack/pipeline.xpack.metricbeat.yml b/.buildkite/x-pack/pipeline.xpack.metricbeat.yml index 34321b61161..216f3134344 100644 --- a/.buildkite/x-pack/pipeline.xpack.metricbeat.yml +++ b/.buildkite/x-pack/pipeline.xpack.metricbeat.yml @@ -1,5 +1,61 @@ # yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json +name: "beats-xpack-metricbeat" + +env: + IMAGE_UBUNTU_X86_64: "family/platform-ingest-beats-ubuntu-2204" + DEFAULT_UBUNTU_X86_64_IMAGE: "family/core-ubuntu-2204" + IMAGE_UBUNTU_ARM_64: "platform-ingest-beats-ubuntu-2204-aarch64" + IMAGE_WIN_10: "family/general-windows-10" + IMAGE_WIN_11: "family/general-windows-11" + IMAGE_WIN_2016: "family/core-windows-2016" + IMAGE_WIN_2019: "family/core-windows-2019" + IMAGE_WIN_2022: "family/core-windows-2022" + IMAGE_MACOS_X86_64: "generic-13-ventura-x64" + GCP_DEFAULT_MACHINE_TYPE: "c2d-highcpu-8" + GCP_HI_PERF_MACHINE_TYPE: "c2d-highcpu-16" + GCP_WIN_MACHINE_TYPE: "n2-standard-8" + AWS_ARM_INSTANCE_TYPE: "t4g.xlarge" + BEATS_PROJECT_NAME: "x-pack/metricbeat" steps: - - label: "Example test" - command: echo "Hello!" + + - input: "Input Parameters" + key: "force-run-stages" + fields: + - select: "Metricbeat - run_xpack_metricbeat" + key: "run_xpack_metricbeat" + options: + - label: "True" + value: "true" + - label: "False" + value: "false" + default: "false" + - select: "Metricbeat - run_xpack_metricbeat_macos_tests" + key: "run_xpack_metricbeat_macos_tests" + options: + - label: "True" + value: "true" + - label: "False" + value: "false" + default: "false" + - select: "Metricbeat - run_xpack_metricbeat_aws_tests" + key: "run_xpack_metricbeat_aws_tests" + options: + - label: "True" + value: "true" + - label: "False" + value: "false" + default: "false" + + if: "build.source == 'ui'" + + - wait: ~ + if: "build.source == 'ui'" + allow_dependency_failure: false + + - label: ":linux: Load dynamic x-pack metricbeat pipeline" + key: "xpack-metricbeat-pipeline" + command: ".buildkite/scripts/generate_xpack_metricbeat_pipeline.sh" + notify: + - github_commit_status: + context: "${BEATS_PROJECT_NAME}: Load dynamic pipeline's steps" diff --git a/.buildkite/x-pack/pipeline.xpack.osquerybeat.yml b/.buildkite/x-pack/pipeline.xpack.osquerybeat.yml new file mode 100644 index 00000000000..57726c03828 --- /dev/null +++ b/.buildkite/x-pack/pipeline.xpack.osquerybeat.yml @@ -0,0 +1,50 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json +name: "beats-xpack-osquerybeat" + +env: + IMAGE_UBUNTU_X86_64: "family/platform-ingest-beats-ubuntu-2204" + IMAGE_WIN_10: "family/general-windows-10" + IMAGE_WIN_11: "family/general-windows-11" + IMAGE_WIN_2016: "family/core-windows-2016" + IMAGE_WIN_2019: "family/core-windows-2019" + IMAGE_WIN_2022: "family/core-windows-2022" + IMAGE_MACOS_X86_64: "generic-13-ventura-x64" + GCP_DEFAULT_MACHINE_TYPE: "c2d-highcpu-8" + GCP_HI_PERF_MACHINE_TYPE: "c2d-highcpu-16" + GCP_WIN_MACHINE_TYPE: "n2-standard-8" + BEATS_PROJECT_NAME: "x-pack/osquerybeat" + +steps: + + - input: "Input Parameters" + key: "force-run-stages" + fields: + - select: "Osquerybeat - run_xpack_osquerybeat" + key: "run_xpack_osquerybeat" + options: + - label: "True" + value: "true" + - label: "False" + value: "false" + default: "false" + - select: "Osquerybeat - run_xpack_osquerybeat_macos_tests" + key: "run_xpack_osquerybeat_macos_tests" + options: + - label: "True" + value: "true" + - label: "False" + value: "false" + default: "false" + + if: "build.source == 'ui'" + + - wait: ~ + if: "build.source == 'ui'" + allow_dependency_failure: false + + - label: ":linux: Load dynamic x-pack osquerybeat pipeline" + key: "xpack-osquerybeat-pipeline" + command: ".buildkite/scripts/generate_xpack_osquerybeat_pipeline.sh" + notify: + - github_commit_status: + context: "${BEATS_PROJECT_NAME}: Load dynamic pipeline's steps" diff --git a/.buildkite/x-pack/pipeline.xpack.packetbeat.yml b/.buildkite/x-pack/pipeline.xpack.packetbeat.yml index 34321b61161..750b59e716d 100644 --- a/.buildkite/x-pack/pipeline.xpack.packetbeat.yml +++ b/.buildkite/x-pack/pipeline.xpack.packetbeat.yml @@ -1,5 +1,61 @@ # yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json +name: "beats-xpack-packetbeat" + +env: + IMAGE_UBUNTU_X86_64: "family/platform-ingest-beats-ubuntu-2204" + IMAGE_UBUNTU_ARM_64: "platform-ingest-beats-ubuntu-2204-aarch64" + IMAGE_RHEL9_X86_64: "family/platform-ingest-beats-rhel-9" + IMAGE_WIN_10: "family/general-windows-10" + IMAGE_WIN_11: "family/general-windows-11" + IMAGE_WIN_2016: "family/core-windows-2016" + IMAGE_WIN_2019: "family/core-windows-2019" + IMAGE_WIN_2022: "family/core-windows-2022" + IMAGE_MACOS_X86_64: "generic-13-ventura-x64" + GCP_DEFAULT_MACHINE_TYPE: "c2d-highcpu-8" + GCP_HI_PERF_MACHINE_TYPE: "c2d-highcpu-16" + GCP_WIN_MACHINE_TYPE: "n2-standard-8" + AWS_ARM_INSTANCE_TYPE: "t4g.xlarge" + BEATS_PROJECT_NAME: "x-pack/packetbeat" steps: - - label: "Example test" - command: echo "Hello!" + + - input: "Input Parameters" + key: "force-run-stages" + fields: + - select: "Packetbeat - run_xpack_packetbeat" + key: "run_xpack_packetbeat" + options: + - label: "True" + value: "true" + - label: "False" + value: "false" + default: "false" + - select: "Packetbeat - run_xpack_packetbeat_macos_tests" + key: "run_xpack_packetbeat_macos_tests" + options: + - label: "True" + value: "true" + - label: "False" + value: "false" + default: "false" + - select: "Packetbeat - run_xpack_packetbeat_arm_tests" + key: "run_xpack_packetbeat_arm_tests" + options: + - label: "True" + value: "true" + - label: "False" + value: "false" + default: "false" + + if: "build.source == 'ui'" + + - wait: ~ + if: "build.source == 'ui'" + allow_dependency_failure: false + + - label: ":linux: Load dynamic x-pack packetbeat pipeline" + key: "packetbeat-pipeline" + command: ".buildkite/scripts/generate_xpack_packetbeat_pipeline.sh" + notify: + - github_commit_status: + context: "${BEATS_PROJECT_NAME}: Load dynamic pipeline's steps" diff --git a/.buildkite/x-pack/pipeline.xpack.winlogbeat.yml b/.buildkite/x-pack/pipeline.xpack.winlogbeat.yml index 34321b61161..5c8acefd698 100644 --- a/.buildkite/x-pack/pipeline.xpack.winlogbeat.yml +++ b/.buildkite/x-pack/pipeline.xpack.winlogbeat.yml @@ -1,5 +1,39 @@ # yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json +name: "beats-xpack-winlogbeat" + +env: + IMAGE_UBUNTU_X86_64: "family/platform-ingest-beats-ubuntu-2204" + IMAGE_WIN_10: "family/general-windows-10" + IMAGE_WIN_11: "family/general-windows-11" + IMAGE_WIN_2016: "family/core-windows-2016" + IMAGE_WIN_2019: "family/core-windows-2019" + IMAGE_WIN_2022: "family/core-windows-2022" + GCP_HI_PERF_MACHINE_TYPE: "c2d-highcpu-16" + GCP_WIN_MACHINE_TYPE: "n2-standard-8" + BEATS_PROJECT_NAME: "x-pack/winlogbeat" steps: - - label: "Example test" - command: echo "Hello!" + + - input: "Input Parameters" + key: "force-run-stages" + fields: + - select: "Winlogbeat - run_xpack_winlogbeat" + key: "run_xpack_winlogbeat" + options: + - label: "True" + value: "true" + - label: "False" + value: "false" + default: "false" + if: "build.source == 'ui'" + + - wait: ~ + if: "build.source == 'ui'" + allow_dependency_failure: false + + - label: ":linux: Load dynamic x-pack winlogbeat pipeline" + key: "xpack-winlogbeat-pipeline" + command: ".buildkite/scripts/generate_xpack_winlogbeat_pipeline.sh" + notify: + - github_commit_status: + context: "${BEATS_PROJECT_NAME}: Load dynamic pipeline's steps" diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index c10616bd3d6..e189cdbf51c 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -19,7 +19,7 @@ CHANGELOG* /NOTICE.txt @elastic/beats-tech-leads /.ci/ @elastic/elastic-agent-data-plane @elastic/ingest-eng-prod -/.github/ @elastic/elastic-agent-data-plane +/.github/ @elastic/ingest-eng-prod /auditbeat/ @elastic/sec-linux-platform /deploy/ @elastic/elastic-agent-data-plane /deploy/kubernetes @elastic/elastic-agent-data-plane @elastic/obs-cloudnative-monitoring @@ -68,6 +68,7 @@ CHANGELOG* /metricbeat/ @elastic/elastic-agent-data-plane /metricbeat/docs/ # Listed without an owner to avoid maintaining doc ownership for each input and module. /metricbeat/helper/kubernetes @elastic/obs-cloudnative-monitoring +/metricbeat/module/aerospike @elastic/obs-infraobs-integrations /metricbeat/module/apache @elastic/obs-infraobs-integrations /metricbeat/module/beat/ @elastic/stack-monitoring /metricbeat/module/ceph @elastic/obs-infraobs-integrations diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 304f3add387..7fcaca8ac9e 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -38,3 +38,19 @@ updates: # Skip github.com/elastic/mito because it requires documentation updates. - dependency-name: github.com/elastic/mito open-pull-requests-limit: 2 + + # GitHub actions + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" + day: "sunday" + time: "22:00" + labels: + - automation + - dependabot + groups: + github-actions: + patterns: + - "*" + open-pull-requests-limit: 5 diff --git a/CHANGELOG-developer.next.asciidoc b/CHANGELOG-developer.next.asciidoc index 14901ead1bc..b617edfaf04 100644 --- a/CHANGELOG-developer.next.asciidoc +++ b/CHANGELOG-developer.next.asciidoc @@ -181,6 +181,7 @@ The list below covers the major changes between 7.0.0-rc2 and main only. - Elide retryable HTTP client construction in Filebeat HTTPJSON and CEL inputs if not needed. {pull}36916[36916] - Allow assignment of packetbeat protocols to interfaces. {issue}36574[36564] {pull}36852[36852] - Add Active Directory entity collector for Filebeat entity analytics. {pull}37854[37854] +- Make logs for empty and small files less noisy when using fingerprint file identity in filestream. {pull}38421[38421] ==== Deprecated diff --git a/CHANGELOG.next.asciidoc b/CHANGELOG.next.asciidoc index 36808ed17a1..e0653d6139f 100644 --- a/CHANGELOG.next.asciidoc +++ b/CHANGELOG.next.asciidoc @@ -100,16 +100,24 @@ fields added to events containing the Beats version. {pull}37553[37553] - Prevent HTTPJSON holding response bodies between executions. {issue}35219[35219] {pull}38116[38116] - Fix "failed processing S3 event for object key" error on aws-s3 input when key contains the "+" character {issue}38012[38012] {pull}38125[38125] - Fix duplicated addition of regexp extension in CEL input. {pull}38181[38181] +- Fix the incorrect values generated by the uri_parts processor. {pull}38216[38216] +- Fix HTTPJSON handling of empty object bodies in POST requests. {issue}33961[33961] {pull}38290[38290] +- Fix PEM key validation for CEL and HTTPJSON inputs. {pull}38405[38405] +- Fix filebeat gcs input panic {pull}38407[38407] +- Rename `activity_guid` to `activity_id` in ETW input events to suit other Windows inputs. {pull}38530[38530] *Heartbeat* - Fix panics when parsing dereferencing invalid parsed url. {pull}34702[34702] - Fix setuid root when running under cgroups v2. {pull}37794[37794] - Adjust State loader to only retry when response code status is 5xx {pull}37981[37981] +- Reset prctl dumpable flag after cap drop. {pull}38269[38269] *Metricbeat* +- Fix Azure Monitor 429 error by causing metricbeat to retry the request again. {pull}38294[38294] - Fix fields not being parsed correctly in postgresql/database {issue}25301[25301] {pull}37720[37720] +- rabbitmq/queue - Change the mapping type of `rabbitmq.queue.consumers.utilisation.pct` to `scaled_float` from `long` because the values fall within the range of `[0.0, 1.0]`. Previously, conversion to integer resulted in reporting either `0` or `1`. *Osquerybeat* @@ -144,6 +152,7 @@ Setting environmental variable ELASTIC_NETINFO:false in Elastic Agent pod will d - Upgrade go-sysinfo from 1.12.0 to 1.13.1. {pull}37996[37996] - Make `range` condition work with numeric values as strings. {pull}38080[38080] - Allow users to configure number of output workers (for outputs that support workers) with either `worker` or `workers`. {pull}38257[38257] +- Kafka output now validates the `topics` and `topic` configuration values {pull}38058[38058] *Auditbeat* @@ -152,6 +161,7 @@ Setting environmental variable ELASTIC_NETINFO:false in Elastic Agent pod will d *Filebeat* +- Adding Saved Object name field to Kibana audit logs {pull}38307[38307] - Update SQL input documentation regarding Oracle DSNs {pull}37590[37590] - add documentation for decode_xml_wineventlog processor field mappings. {pull}32456[32456] - httpjson input: Add request tracing logger. {issue}32402[32402] {pull}32412[32412] @@ -199,6 +209,13 @@ Setting environmental variable ELASTIC_NETINFO:false in Elastic Agent pod will d - Update CEL mito extensions to v1.9.0 to add keys/values helper. {pull}37971[37971] - Add logging for cache processor file reads and writes. {pull}38052[38052] - Add parseDateInTZ value template for the HTTPJSON input {pull}37738[37738] +- Support VPC endpoint for aws-s3 input SQS queue url. {pull}38189[38189] +- Improve rate limit handling by HTTPJSON {issue}36207[36207] {pull}38161[38161] {pull}38237[38237] +- Add parseDateInTZ value template for the HTTPJSON input. {pull}37738[37738] +- Add support for complex event objects in the HTTP Endpoint input. {issue}37910[37910] {pull}38193[38193] +- Parse more fields from Elasticsearch slowlogs {pull}38295[38295] +- Update CEL mito extensions to v1.10.0 to add keys/values helper. {pull}38504[38504] +- Add support for Active Directory an entity analytics provider. {pull}37919[37919] *Auditbeat* @@ -225,6 +242,7 @@ Setting environmental variable ELASTIC_NETINFO:false in Elastic Agent pod will d - Add a `/inputs/` route to the HTTP monitoring endpoint that exposes metrics for each metricset instance. {pull}36971[36971] - Add linux IO metrics to system/process {pull}37213[37213] - Add new memory/cgroup metrics to Kibana module {pull}37232[37232] +- Add SSL support to mysql module {pull}37997[37997] *Metricbeat* @@ -263,6 +281,7 @@ Setting environmental variable ELASTIC_NETINFO:false in Elastic Agent pod will d *Filebeat* +- Deprecate `syslog` input in favor of `syslog` processor. {issue}37555[37555] {pull}38277[38277] *Heartbeat* diff --git a/NOTICE.txt b/NOTICE.txt index 2f66d290a43..e5cf2df78b6 100644 --- a/NOTICE.txt +++ b/NOTICE.txt @@ -12524,11 +12524,11 @@ Contents of probable licence file $GOMODCACHE/github.com/elastic/elastic-agent-a -------------------------------------------------------------------------------- Dependency : github.com/elastic/elastic-agent-client/v7 -Version: v7.8.0 +Version: v7.8.1 Licence type (autodetected): Elastic -------------------------------------------------------------------------------- -Contents of probable licence file $GOMODCACHE/github.com/elastic/elastic-agent-client/v7@v7.8.0/LICENSE.txt: +Contents of probable licence file $GOMODCACHE/github.com/elastic/elastic-agent-client/v7@v7.8.1/LICENSE.txt: ELASTIC LICENSE AGREEMENT @@ -15223,11 +15223,11 @@ Contents of probable licence file $GOMODCACHE/github.com/elastic/go-sysinfo@v1.1 -------------------------------------------------------------------------------- Dependency : github.com/elastic/go-ucfg -Version: v0.8.6 +Version: v0.8.7 Licence type (autodetected): Apache-2.0 -------------------------------------------------------------------------------- -Contents of probable licence file $GOMODCACHE/github.com/elastic/go-ucfg@v0.8.6/LICENSE: +Contents of probable licence file $GOMODCACHE/github.com/elastic/go-ucfg@v0.8.7/LICENSE: Apache License Version 2.0, January 2004 @@ -15434,11 +15434,11 @@ Contents of probable licence file $GOMODCACHE/github.com/elastic/go-ucfg@v0.8.6/ -------------------------------------------------------------------------------- Dependency : github.com/elastic/gosigar -Version: v0.14.2 +Version: v0.14.3 Licence type (autodetected): Apache-2.0 -------------------------------------------------------------------------------- -Contents of probable licence file $GOMODCACHE/github.com/elastic/gosigar@v0.14.2/LICENSE: +Contents of probable licence file $GOMODCACHE/github.com/elastic/gosigar@v0.14.3/LICENSE: Apache License Version 2.0, January 2004 @@ -15645,11 +15645,11 @@ limitations under the License. -------------------------------------------------------------------------------- Dependency : github.com/elastic/mito -Version: v1.9.0 +Version: v1.10.0 Licence type (autodetected): Apache-2.0 -------------------------------------------------------------------------------- -Contents of probable licence file $GOMODCACHE/github.com/elastic/mito@v1.9.0/LICENSE: +Contents of probable licence file $GOMODCACHE/github.com/elastic/mito@v1.10.0/LICENSE: Apache License @@ -20695,11 +20695,11 @@ SOFTWARE. -------------------------------------------------------------------------------- Dependency : github.com/lestrrat-go/jwx/v2 -Version: v2.0.19 +Version: v2.0.21 Licence type (autodetected): MIT -------------------------------------------------------------------------------- -Contents of probable licence file $GOMODCACHE/github.com/lestrrat-go/jwx/v2@v2.0.19/LICENSE: +Contents of probable licence file $GOMODCACHE/github.com/lestrrat-go/jwx/v2@v2.0.21/LICENSE: The MIT License (MIT) @@ -22892,11 +22892,11 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -------------------------------------------------------------------------------- Dependency : github.com/stretchr/testify -Version: v1.8.4 +Version: v1.9.0 Licence type (autodetected): MIT -------------------------------------------------------------------------------- -Contents of probable licence file $GOMODCACHE/github.com/stretchr/testify@v1.8.4/LICENSE: +Contents of probable licence file $GOMODCACHE/github.com/stretchr/testify@v1.9.0/LICENSE: MIT License @@ -24984,11 +24984,11 @@ THE SOFTWARE. -------------------------------------------------------------------------------- Dependency : golang.org/x/crypto -Version: v0.17.0 +Version: v0.21.0 Licence type (autodetected): BSD-3-Clause -------------------------------------------------------------------------------- -Contents of probable licence file $GOMODCACHE/golang.org/x/crypto@v0.17.0/LICENSE: +Contents of probable licence file $GOMODCACHE/golang.org/x/crypto@v0.21.0/LICENSE: Copyright (c) 2009 The Go Authors. All rights reserved. @@ -25095,11 +25095,11 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -------------------------------------------------------------------------------- Dependency : golang.org/x/net -Version: v0.19.0 +Version: v0.21.0 Licence type (autodetected): BSD-3-Clause -------------------------------------------------------------------------------- -Contents of probable licence file $GOMODCACHE/golang.org/x/net@v0.19.0/LICENSE: +Contents of probable licence file $GOMODCACHE/golang.org/x/net@v0.21.0/LICENSE: Copyright (c) 2009 The Go Authors. All rights reserved. @@ -25206,11 +25206,11 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -------------------------------------------------------------------------------- Dependency : golang.org/x/sys -Version: v0.15.0 +Version: v0.18.0 Licence type (autodetected): BSD-3-Clause -------------------------------------------------------------------------------- -Contents of probable licence file $GOMODCACHE/golang.org/x/sys@v0.15.0/LICENSE: +Contents of probable licence file $GOMODCACHE/golang.org/x/sys@v0.18.0/LICENSE: Copyright (c) 2009 The Go Authors. All rights reserved. @@ -25852,11 +25852,11 @@ Contents of probable licence file $GOMODCACHE/google.golang.org/grpc@v1.58.3/LIC -------------------------------------------------------------------------------- Dependency : google.golang.org/protobuf -Version: v1.32.0 +Version: v1.33.0 Licence type (autodetected): BSD-3-Clause -------------------------------------------------------------------------------- -Contents of probable licence file $GOMODCACHE/google.golang.org/protobuf@v1.32.0/LICENSE: +Contents of probable licence file $GOMODCACHE/google.golang.org/protobuf@v1.33.0/LICENSE: Copyright (c) 2018 The Go Authors. All rights reserved. @@ -46531,11 +46531,11 @@ SOFTWARE. -------------------------------------------------------------------------------- Dependency : github.com/lestrrat-go/httprc -Version: v1.0.4 +Version: v1.0.5 Licence type (autodetected): MIT -------------------------------------------------------------------------------- -Contents of probable licence file $GOMODCACHE/github.com/lestrrat-go/httprc@v1.0.4/LICENSE: +Contents of probable licence file $GOMODCACHE/github.com/lestrrat-go/httprc@v1.0.5/LICENSE: MIT License @@ -50813,11 +50813,11 @@ SOFTWARE. -------------------------------------------------------------------------------- Dependency : github.com/stretchr/objx -Version: v0.5.0 +Version: v0.5.2 Licence type (autodetected): MIT -------------------------------------------------------------------------------- -Contents of probable licence file $GOMODCACHE/github.com/stretchr/objx@v0.5.0/LICENSE: +Contents of probable licence file $GOMODCACHE/github.com/stretchr/objx@v0.5.2/LICENSE: The MIT License @@ -53894,11 +53894,11 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -------------------------------------------------------------------------------- Dependency : golang.org/x/term -Version: v0.15.0 +Version: v0.18.0 Licence type (autodetected): BSD-3-Clause -------------------------------------------------------------------------------- -Contents of probable licence file $GOMODCACHE/golang.org/x/term@v0.15.0/LICENSE: +Contents of probable licence file $GOMODCACHE/golang.org/x/term@v0.18.0/LICENSE: Copyright (c) 2009 The Go Authors. All rights reserved. diff --git a/auditbeat/buildkite.yml b/auditbeat/buildkite.yml new file mode 100644 index 00000000000..2abf9d68407 --- /dev/null +++ b/auditbeat/buildkite.yml @@ -0,0 +1,56 @@ +when: + changeset: ## when PR contains any of those entries in the changeset + - "auditbeat/**" + - "@ci" ## special token regarding the changeset for the ci + - "@oss" ## special token regarding the changeset for the oss +stages: + # mandatory stage - it runs always for: + # - branches/tags + # - on PRs + # - GitHub comment /test auditbeat + # - GitHub label auditbeat + mandatory: + # NOTE: stage name should be unique! + unitTest: + command: "mage build unitTest" + platform: "family/platform-ingest-beats-ubuntu-2204" + crosscompile: + command: "make crosscompile" + platform: "family/platform-ingest-beats-ubuntu-2204" + env: + GOX_FLAGS: "-arch amd64" + unitTest-rhel-9: + command: "mage build unitTest" + platform: "family/platform-ingest-beats-rhel-9" + unitTest-windows-2022: + command: "mage build unitTest" + platform: "family/platform-ingest-beats-windows-2022" + unitTest-windows-2016: + command: "mage build unitTest" + platform: "family/platform-ingest-beats-windows-2016" + # optional stage - it runs on: + # - branches/tags + # - on PRs if: + # - GitHub comment /test auditbeat . i.e: /test auditbeat integTest + # - GitHub label . i.e: integTest or unitTest-arm or unitTest-macos ... + extended: + # NOTE: stage name should be unique! + integTest: + command: "mage build integTest" + platform: "platform-ingest-beats-ubuntu-2204-aarch64" + provider: "aws" + integTest-arm: + command: "mage build integTest" + platform: "platform-ingest-beats-ubuntu-2204-aarch64" + provider: "aws" + unitTest-arm: + command: "mage build unitTest" + platform: "platform-ingest-beats-ubuntu-2204-aarch64" + provider: "aws" + unitTest-macos: + command: "mage build unitTest" + platform: "generic-13-ventura-x64" + provider: "orka" + unitTest-windows-2019: + command: "mage build unitTest" + platform: "family/core-windows-2019" diff --git a/auditbeat/module/file_integrity/exeobjparser_test.go b/auditbeat/module/file_integrity/exeobjparser_test.go index 0958305afb8..f8ca144e4ed 100644 --- a/auditbeat/module/file_integrity/exeobjparser_test.go +++ b/auditbeat/module/file_integrity/exeobjparser_test.go @@ -19,8 +19,11 @@ package file_integrity import ( + "errors" "fmt" + "io/fs" "math" + "os" "reflect" "strconv" "testing" @@ -44,6 +47,12 @@ func TestExeObjParser(t *testing.T) { t.Skip("skipping test on garbled PE file: see https://github.com/elastic/beats/issues/35705") } + if _, ci := os.LookupEnv("CI"); ci { + if _, err := os.Stat(target); err != nil && errors.Is(fs.ErrNotExist, err) { + t.Skip("skipping test because target binary was not found: see https://github.com/elastic/beats/issues/38211") + } + } + got := make(mapstr.M) err := exeObjParser(nil).Parse(got, target) if err != nil { diff --git a/auditbeat/module/file_integrity/metricset_test.go b/auditbeat/module/file_integrity/metricset_test.go index 4ad58aa89fa..2a6c33e1798 100644 --- a/auditbeat/module/file_integrity/metricset_test.go +++ b/auditbeat/module/file_integrity/metricset_test.go @@ -62,6 +62,7 @@ func TestData(t *testing.T) { func TestActions(t *testing.T) { skipOnCIForDarwinAMD64(t) + skipOnBuildkiteWindows(t) defer abtest.SetupDataDir(t)() @@ -154,6 +155,7 @@ func TestActions(t *testing.T) { func TestExcludedFiles(t *testing.T) { skipOnCIForDarwinAMD64(t) + skipOnBuildkiteWindows(t) defer abtest.SetupDataDir(t)() @@ -201,6 +203,7 @@ func TestExcludedFiles(t *testing.T) { func TestIncludedExcludedFiles(t *testing.T) { skipOnCIForDarwinAMD64(t) + skipOnBuildkiteWindows(t) defer abtest.SetupDataDir(t)() @@ -949,3 +952,9 @@ func skipOnCIForDarwinAMD64(t testing.TB) { t.Skip("Skip test on CI for darwin/amd64") } } + +func skipOnBuildkiteWindows(t testing.TB) { + if os.Getenv("BUILDKITE") == "true" && runtime.GOOS == "windows" { + t.Skip("Skip on Buildkite Windows: Shortened TMP problem") + } +} diff --git a/catalog-info.yaml b/catalog-info.yaml index 5e0f94fd2df..f6e78e87b7c 100644 --- a/catalog-info.yaml +++ b/catalog-info.yaml @@ -53,9 +53,9 @@ spec: cancel_intermediate_builds_branch_filter: "!main !7.* !8.*" skip_intermediate_builds: true skip_intermediate_builds_branch_filter: "!main !7.* !8.*" - # TODO uncomment this environment variable when pipeline definition is updated - # env: - # ELASTIC_PR_COMMENTS_ENABLED: 'true' + env: + # TODO set to true once https://github.com/elastic/ingest-dev/issues/3001 has been resolved + ELASTIC_PR_COMMENTS_ENABLED: "false" teams: ingest-fp: access_level: MANAGE_BUILD_AND_READ @@ -101,7 +101,8 @@ spec: skip_intermediate_builds: true skip_intermediate_builds_branch_filter: "!main !7.* !8.*" env: - ELASTIC_PR_COMMENTS_ENABLED: "true" + # TODO set to truue once https://github.com/elastic/ingest-dev/issues/3001 has been resolved + ELASTIC_PR_COMMENTS_ENABLED: "false" teams: ingest-fp: access_level: MANAGE_BUILD_AND_READ @@ -109,7 +110,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -147,7 +148,8 @@ spec: skip_intermediate_builds: true skip_intermediate_builds_branch_filter: "!main !7.* !8.*" env: - ELASTIC_PR_COMMENTS_ENABLED: "true" + # TODO set to truue once https://github.com/elastic/ingest-dev/issues/3001 has been resolved + ELASTIC_PR_COMMENTS_ENABLED: "false" teams: ingest-fp: access_level: MANAGE_BUILD_AND_READ @@ -155,7 +157,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -192,8 +194,9 @@ spec: cancel_intermediate_builds_branch_filter: "!main !7.* !8.*" skip_intermediate_builds: true skip_intermediate_builds_branch_filter: "!main !7.* !8.*" - # env: - # ELASTIC_PR_COMMENTS_ENABLED: "true" TODO: uncomment when pipeline is ready + env: + # TODO set to true once https://github.com/elastic/ingest-dev/issues/3001 has been resolved + ELASTIC_PR_COMMENTS_ENABLED: "false" teams: ingest-fp: access_level: MANAGE_BUILD_AND_READ @@ -201,7 +204,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -239,7 +242,8 @@ spec: skip_intermediate_builds: true skip_intermediate_builds_branch_filter: "!main !7.* !8.*" env: - ELASTIC_PR_COMMENTS_ENABLED: "true" + # TODO set to true once https://github.com/elastic/ingest-dev/issues/3001 has been resolved + ELASTIC_PR_COMMENTS_ENABLED: "false" teams: ingest-fp: access_level: MANAGE_BUILD_AND_READ @@ -247,7 +251,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -284,8 +288,9 @@ spec: cancel_intermediate_builds_branch_filter: "!main !7.* !8.*" skip_intermediate_builds: true skip_intermediate_builds_branch_filter: "!main !7.* !8.*" - # env: - # ELASTIC_PR_COMMENTS_ENABLED: "true" TODO: uncomment when pipeline is ready + env: + # TODO set to true once https://github.com/elastic/ingest-dev/issues/3001 has been resolved + ELASTIC_PR_COMMENTS_ENABLED: "false" teams: ingest-fp: access_level: MANAGE_BUILD_AND_READ @@ -293,7 +298,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -331,7 +336,8 @@ spec: skip_intermediate_builds: true skip_intermediate_builds_branch_filter: "!main !7.17 !8.*" env: - ELASTIC_PR_COMMENTS_ENABLED: "true" + # TODO set to true once https://github.com/elastic/ingest-dev/issues/3001 has been resolved + ELASTIC_PR_COMMENTS_ENABLED: "false" teams: ingest-fp: access_level: MANAGE_BUILD_AND_READ @@ -339,7 +345,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -377,7 +383,8 @@ spec: skip_intermediate_builds: true skip_intermediate_builds_branch_filter: "!main !7.17 !8.*" env: - ELASTIC_PR_COMMENTS_ENABLED: "true" + # TODO set to true once https://github.com/elastic/ingest-dev/issues/3001 has been resolved + ELASTIC_PR_COMMENTS_ENABLED: "false" teams: ingest-fp: access_level: MANAGE_BUILD_AND_READ @@ -385,7 +392,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -421,8 +428,9 @@ spec: cancel_intermediate_builds_branch_filter: "!main !7.17 !8.*" skip_intermediate_builds: true skip_intermediate_builds_branch_filter: "!main !7.17 !8.*" - # env: - # ELASTIC_PR_COMMENTS_ENABLED: "true" TODO: uncomment when pipeline is ready + env: + # TODO set to true once https://github.com/elastic/ingest-dev/issues/3001 has been resolved + ELASTIC_PR_COMMENTS_ENABLED: "false" teams: ingest-fp: access_level: MANAGE_BUILD_AND_READ @@ -430,7 +438,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -468,7 +476,8 @@ spec: skip_intermediate_builds: true skip_intermediate_builds_branch_filter: "!main !7.17 !8.*" env: - ELASTIC_PR_COMMENTS_ENABLED: "true" + # TODO set to true once https://github.com/elastic/ingest-dev/issues/3001 has been resolved + ELASTIC_PR_COMMENTS_ENABLED: "false" teams: ingest-fp: access_level: MANAGE_BUILD_AND_READ @@ -476,7 +485,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -497,9 +506,9 @@ spec: name: beats-xpack-winlogbeat description: "Beats x-pack winlogbeat pipeline" spec: - # branch_configuration: "main 7.17 8.*" #TODO: uncomment after tests + branch_configuration: "main 7.17 8.*" pipeline_file: ".buildkite/x-pack/pipeline.xpack.winlogbeat.yml" - # maximum_timeout_in_minutes: 120 #TODO: uncomment after tests + maximum_timeout_in_minutes: 120 provider_settings: trigger_mode: none # don't trigger jobs from github activity build_pull_request_forks: false @@ -513,8 +522,9 @@ spec: cancel_intermediate_builds_branch_filter: "!main !7.17 !8.*" skip_intermediate_builds: true skip_intermediate_builds_branch_filter: "!main !7.17 !8.*" - # env: - # ELASTIC_PR_COMMENTS_ENABLED: "true" #TODO: uncomment after tests + env: + # TODO set to true once https://github.com/elastic/ingest-dev/issues/3001 has been resolved + ELASTIC_PR_COMMENTS_ENABLED: "false" teams: ingest-fp: access_level: MANAGE_BUILD_AND_READ @@ -522,7 +532,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -543,9 +553,9 @@ spec: name: beats-xpack-packetbeat description: "Beats x-pack packetbeat pipeline" spec: - # branch_configuration: "main 7.17 8.*" #TODO: uncomment after tests + branch_configuration: "main 7.17 8.*" pipeline_file: ".buildkite/x-pack/pipeline.xpack.packetbeat.yml" - # maximum_timeout_in_minutes: 120 #TODO: uncomment after tests + maximum_timeout_in_minutes: 120 provider_settings: trigger_mode: none # don't trigger jobs from github activity build_pull_request_forks: false @@ -559,8 +569,9 @@ spec: cancel_intermediate_builds_branch_filter: "!main !7.17 !8.*" skip_intermediate_builds: true skip_intermediate_builds_branch_filter: "!main !7.17 !8.*" - # env: - # ELASTIC_PR_COMMENTS_ENABLED: "true" #TODO: uncomment after tests + env: + # TODO set to true once https://github.com/elastic/ingest-dev/issues/3001 has been resolved + ELASTIC_PR_COMMENTS_ENABLED: "false" teams: ingest-fp: access_level: MANAGE_BUILD_AND_READ @@ -568,7 +579,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -589,9 +600,9 @@ spec: name: beats-xpack-libbeat description: "Beats x-pack libbeat pipeline" spec: - # branch_configuration: "main 7.17 8.*" #TODO: uncomment after tests + branch_configuration: "main 7.17 8.*" pipeline_file: ".buildkite/x-pack/pipeline.xpack.libbeat.yml" - # maximum_timeout_in_minutes: 120 #TODO: uncomment after tests + maximum_timeout_in_minutes: 120 provider_settings: trigger_mode: none # don't trigger jobs from github activity build_pull_request_forks: false @@ -605,8 +616,9 @@ spec: cancel_intermediate_builds_branch_filter: "!main !7.17 !8.*" skip_intermediate_builds: true skip_intermediate_builds_branch_filter: "!main !7.17 !8.*" - # env: - # ELASTIC_PR_COMMENTS_ENABLED: "true" #TODO: uncomment after tests + env: + # TODO set to true once https://github.com/elastic/ingest-dev/issues/3001 has been resolved + ELASTIC_PR_COMMENTS_ENABLED: "false" teams: ingest-fp: access_level: MANAGE_BUILD_AND_READ @@ -614,7 +626,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -635,9 +647,9 @@ spec: name: beats-xpack-metricbeat description: "Beats x-pack metricbeat pipeline" spec: - # branch_configuration: "7.17" #TODO: uncomment after tests + branch_configuration: "main 7.17 8.*" pipeline_file: ".buildkite/x-pack/pipeline.xpack.metricbeat.yml" - maximum_timeout_in_minutes: 480 + maximum_timeout_in_minutes: 120 provider_settings: trigger_mode: none # don't trigger jobs from github activity build_pull_request_forks: false @@ -648,11 +660,12 @@ spec: build.pull_request.id == null || (build.creator.name == 'elasticmachine' && build.pull_request.id != null) repository: elastic/beats cancel_intermediate_builds: true - cancel_intermediate_builds_branch_filter: "!7.17" + cancel_intermediate_builds_branch_filter: "!main !7.17 !8.*" skip_intermediate_builds: true - skip_intermediate_builds_branch_filter: "!7.17" - # env: - # ELASTIC_PR_COMMENTS_ENABLED: "true" #TODO: uncomment after tests + skip_intermediate_builds_branch_filter: "!main !7.17 !8.*" + env: + # TODO set to true once https://github.com/elastic/ingest-dev/issues/3001 has been resolved + ELASTIC_PR_COMMENTS_ENABLED: "false" teams: ingest-fp: access_level: MANAGE_BUILD_AND_READ @@ -660,7 +673,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -693,3 +706,273 @@ spec: access_level: BUILD_AND_READ everyone: access_level: READ_ONLY +--- +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json +apiVersion: backstage.io/v1alpha1 +kind: Resource +metadata: + name: buildkite-elastic-agent-binary-dra-7-17 + description: Buildkite pipeline for packaging Elastic Agent core binary and publish it to DRA + links: + - title: Pipeline + url: https://buildkite.com/elastic/buildkite-elastic-agent-binary-dra-7-17 + +spec: + type: buildkite-pipeline + owner: group:ingest-fp + system: buildkite + implementation: + apiVersion: buildkite.elastic.dev/v1 + kind: Pipeline + metadata: + name: buildkite-elastic-agent-binary-dra-7-17 + description: Buildkite pipeline for packaging Elastic Agent core binary and publish it to DRA + spec: + pipeline_file: ".buildkite/x-pack/elastic-agent/.buildkite/pipeline.xpack.elastic-agent-binary-dra.yml" + provider_settings: + build_branches: true + build_pull_requests: true + cancel_deleted_branch_builds: true + filter_condition: 'build.branch == "7.17" || build.pull_request.base_branch == "7.17"' + filter_enabled: true + repository: elastic/beats + teams: + ingest-fp: + access_level: MANAGE_BUILD_AND_READ + everyone: + access_level: BUILD_AND_READ + +--- +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json +apiVersion: backstage.io/v1alpha1 +kind: Resource +metadata: + name: buildkite-pipeline-beats-xpack-auditbeat + description: "Beats x-pack auditbeat pipeline" + links: + - title: Pipeline + url: https://buildkite.com/elastic/beats-xpack-auditbeat + +spec: + type: buildkite-pipeline + owner: group:ingest-fp + system: buildkite + implementation: + apiVersion: buildkite.elastic.dev/v1 + kind: Pipeline + metadata: + name: beats-xpack-auditbeat + description: "Beats x-pack auditbeat pipeline" + spec: + # branch_configuration: "main 7.17 8.*" #TODO: uncomment after tests + pipeline_file: ".buildkite/x-pack/pipeline.xpack.auditbeat.yml" + maximum_timeout_in_minutes: 120 + provider_settings: + trigger_mode: none # don't trigger jobs from github activity + build_pull_request_forks: false + build_pull_requests: true # requires filter_enabled and filter_condition settings as below when used with buildkite-pr-bot + build_tags: true + filter_enabled: true + filter_condition: >- + build.pull_request.id == null || (build.creator.name == 'elasticmachine' && build.pull_request.id != null) + repository: elastic/beats + cancel_intermediate_builds: true + cancel_intermediate_builds_branch_filter: "!main !7.17 !8.*" + skip_intermediate_builds: true + skip_intermediate_builds_branch_filter: "!main !7.17 !8.*" + env: + # TODO set to true once https://github.com/elastic/ingest-dev/issues/3001 has been resolved + ELASTIC_PR_COMMENTS_ENABLED: "false" + teams: + ingest-fp: + access_level: MANAGE_BUILD_AND_READ + everyone: + access_level: READ_ONLY + +--- +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json +apiVersion: backstage.io/v1alpha1 +kind: Resource +metadata: + name: buildkite-pipeline-beats-xpack-dockerlogbeat + description: "Beats x-pack dockerlogbeat pipeline" + links: + - title: Pipeline + url: https://buildkite.com/elastic/beats-xpack-dockerlogbeat + +spec: + type: buildkite-pipeline + owner: group:ingest-fp + system: buildkite + implementation: + apiVersion: buildkite.elastic.dev/v1 + kind: Pipeline + metadata: + name: beats-xpack-dockerlogbeat + description: "Beats x-pack dockerlogbeat pipeline" + spec: + # branch_configuration: "main 7.17 8.*" #TODO: uncomment after tests + pipeline_file: ".buildkite/x-pack/pipeline.xpack.dockerlogbeat.yml" + maximum_timeout_in_minutes: 120 + provider_settings: + trigger_mode: none # don't trigger jobs from github activity + build_pull_request_forks: false + build_pull_requests: true # requires filter_enabled and filter_condition settings as below when used with buildkite-pr-bot + build_tags: true + filter_enabled: true + filter_condition: >- + build.pull_request.id == null || (build.creator.name == 'elasticmachine' && build.pull_request.id != null) + repository: elastic/beats + cancel_intermediate_builds: true + cancel_intermediate_builds_branch_filter: "!main !7.17 !8.*" + skip_intermediate_builds: true + skip_intermediate_builds_branch_filter: "!main !7.17 !8.*" + env: + # TODO set to true once https://github.com/elastic/ingest-dev/issues/3001 has been resolved + ELASTIC_PR_COMMENTS_ENABLED: "false" + teams: + ingest-fp: + access_level: MANAGE_BUILD_AND_READ + everyone: + access_level: READ_ONLY + +--- +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json +apiVersion: backstage.io/v1alpha1 +kind: Resource +metadata: + name: buildkite-pipeline-beats-xpack-filebeat + description: "Beats x-pack filebeat pipeline" + links: + - title: Pipeline + url: https://buildkite.com/elastic/beats-xpack-filebeat + +spec: + type: buildkite-pipeline + owner: group:ingest-fp + system: buildkite + implementation: + apiVersion: buildkite.elastic.dev/v1 + kind: Pipeline + metadata: + name: beats-xpack-filebeat + description: "Beats x-pack filebeat pipeline" + spec: + # branch_configuration: "main 7.17 8.*" #TODO: uncomment after tests + pipeline_file: ".buildkite/x-pack/pipeline.xpack.filebeat.yml" + maximum_timeout_in_minutes: 120 + provider_settings: + trigger_mode: none # don't trigger jobs from github activity + build_pull_request_forks: false + build_pull_requests: true # requires filter_enabled and filter_condition settings as below when used with buildkite-pr-bot + build_tags: true + filter_enabled: true + filter_condition: >- + build.pull_request.id == null || (build.creator.name == 'elasticmachine' && build.pull_request.id != null) + repository: elastic/beats + cancel_intermediate_builds: true + cancel_intermediate_builds_branch_filter: "!main !7.17 !8.*" + skip_intermediate_builds: true + skip_intermediate_builds_branch_filter: "!main !7.17 !8.*" + env: + # TODO set to true once https://github.com/elastic/ingest-dev/issues/3001 has been resolved + ELASTIC_PR_COMMENTS_ENABLED: "false" + teams: + ingest-fp: + access_level: MANAGE_BUILD_AND_READ + everyone: + access_level: READ_ONLY + +--- +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json +apiVersion: backstage.io/v1alpha1 +kind: Resource +metadata: + name: buildkite-pipeline-beats-xpack-heartbeat + description: "Beats x-pack heartbeat pipeline" + links: + - title: Pipeline + url: https://buildkite.com/elastic/beats-xpack-heartbeat + +spec: + type: buildkite-pipeline + owner: group:ingest-fp + system: buildkite + implementation: + apiVersion: buildkite.elastic.dev/v1 + kind: Pipeline + metadata: + name: beats-xpack-heartbeat + description: "Beats x-pack heartbeat pipeline" + spec: + # branch_configuration: "main 7.17 8.*" #TODO: uncomment after tests + pipeline_file: ".buildkite/x-pack/pipeline.xpack.heartbeat.yml" + maximum_timeout_in_minutes: 120 + provider_settings: + trigger_mode: none # don't trigger jobs from github activity + build_pull_request_forks: false + build_pull_requests: true # requires filter_enabled and filter_condition settings as below when used with buildkite-pr-bot + build_tags: true + filter_enabled: true + filter_condition: >- + build.pull_request.id == null || (build.creator.name == 'elasticmachine' && build.pull_request.id != null) + repository: elastic/beats + cancel_intermediate_builds: true + cancel_intermediate_builds_branch_filter: "!main !7.17 !8.*" + skip_intermediate_builds: true + skip_intermediate_builds_branch_filter: "!main !7.17 !8.*" + env: + # TODO set to true once https://github.com/elastic/ingest-dev/issues/3001 has been resolved + ELASTIC_PR_COMMENTS_ENABLED: "false" + teams: + ingest-fp: + access_level: MANAGE_BUILD_AND_READ + everyone: + access_level: READ_ONLY + +--- +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json +apiVersion: backstage.io/v1alpha1 +kind: Resource +metadata: + name: buildkite-pipeline-beats-xpack-osquerybeat + description: "Beats x-pack osquerybeat pipeline" + links: + - title: Pipeline + url: https://buildkite.com/elastic/beats-xpack-osquerybeat + +spec: + type: buildkite-pipeline + owner: group:ingest-fp + system: buildkite + implementation: + apiVersion: buildkite.elastic.dev/v1 + kind: Pipeline + metadata: + name: beats-xpack-osquerybeat + description: "Beats x-pack osquerybeat pipeline" + spec: + # branch_configuration: "main 7.17 8.*" #TODO: uncomment after tests + pipeline_file: ".buildkite/x-pack/pipeline.xpack.osquerybeat.yml" + maximum_timeout_in_minutes: 120 + provider_settings: + trigger_mode: none # don't trigger jobs from github activity + build_pull_request_forks: false + build_pull_requests: true # requires filter_enabled and filter_condition settings as below when used with buildkite-pr-bot + build_tags: true + filter_enabled: true + filter_condition: >- + build.pull_request.id == null || (build.creator.name == 'elasticmachine' && build.pull_request.id != null) + repository: elastic/beats + cancel_intermediate_builds: true + cancel_intermediate_builds_branch_filter: "!main !7.17 !8.*" + skip_intermediate_builds: true + skip_intermediate_builds_branch_filter: "!main !7.17 !8.*" + env: + # TODO set to true once https://github.com/elastic/ingest-dev/issues/3001 has been resolved + ELASTIC_PR_COMMENTS_ENABLED: "false" + teams: + ingest-fp: + access_level: MANAGE_BUILD_AND_READ + everyone: + access_level: READ_ONLY diff --git a/dev-tools/mage/crossbuild.go b/dev-tools/mage/crossbuild.go index f500611e5cc..94b972ea25e 100644 --- a/dev-tools/mage/crossbuild.go +++ b/dev-tools/mage/crossbuild.go @@ -327,7 +327,7 @@ func (b GolangCrossBuilder) Build() error { args = append(args, "--rm", - "--env", "GOFLAGS=-mod=readonly", + "--env", "GOFLAGS=-mod=readonly -buildvcs=false", "--env", "MAGEFILE_VERBOSE="+verbose, "--env", "MAGEFILE_TIMEOUT="+EnvOr("MAGEFILE_TIMEOUT", ""), "--env", fmt.Sprintf("SNAPSHOT=%v", Snapshot), diff --git a/filebeat/buildkite.yml b/filebeat/buildkite.yml new file mode 100644 index 00000000000..3fcabc5f1ce --- /dev/null +++ b/filebeat/buildkite.yml @@ -0,0 +1,60 @@ +when: + branches: true ## for all the branches + changeset: ## when PR contains any of those entries in the changeset + - "filebeat/**" + - "@ci" ## special token regarding the changeset for the ci + - "@oss" ## special token regarding the changeset for the oss +stages: + mandatory: + unitTest: + command: "mage unitTest" + platform: "family/platform-ingest-beats-ubuntu-2204" + goIntegTest: + command: "mage goIntegTest" + platform: "family/platform-ingest-beats-ubuntu-2204" + pythonIntegTest: + command: "mage pythonIntegTest" + platform: "family/platform-ingest-beats-ubuntu-2204" + unitTest-windows-2022: + command: "mage build unitTest" + platform: "family/platform-ingest-beats-windows-2022" + unitTest-windows-2016: + command: "mage build unitTest" + platform: "family/platform-ingest-beats-windows-2016" + + extended_win: + unitTest-windows-2019: + command: "mage build unitTest" + platform: "family/platform-ingest-beats-windows-2019" + unitTest-windows-11: + command: "mage build unitTest" + platform: "family/platform-ingest-beats-windows-11" + unitTest-windows-10: + command: "mage build unitTest" + platform: "family/platform-ingest-beats-windows-10" + extended: + unitTest-arm: + command: "mage build unitTest" + platform: "platform-ingest-beats-ubuntu-2204-aarch64" + provider: "aws" # move this inside the platform leaf + when: + comments: + - "/test filebeat for arm" + labels: + - "arm" + parameters: + - "armTest" + branches: true ## for all the branches + tags: true ## for all the tags + unitTest-macos: + command: ".buildkite/filebeat/scripts/unit-tests.sh" + platform: "generic-13-ventura-x64" + provider: "orka" + when: + comments: + - "/test filebeat for macos" + labels: + - "macOS" + parameters: + - "macosTest" + tags: true ## for all the tags diff --git a/filebeat/docs/fields.asciidoc b/filebeat/docs/fields.asciidoc index ddc887d246f..5b03cdfb0a9 100644 --- a/filebeat/docs/fields.asciidoc +++ b/filebeat/docs/fields.asciidoc @@ -50730,6 +50730,61 @@ type: keyword -- +*`elasticsearch.slowlog.user.realm`*:: ++ +-- +The authentication realm the user was authenticated against + +type: keyword + +example: default_file + +-- + +*`elasticsearch.slowlog.user.effective.realm`*:: ++ +-- +The authentication realm the effective user was authenticated against + +type: keyword + +example: default_file + +-- + +*`elasticsearch.slowlog.auth.type`*:: ++ +-- +The authentication type used to authenticate the user. One of TOKEN | REALM | API_KEY + +type: keyword + +example: REALM + +-- + +*`elasticsearch.slowlog.apikey.id`*:: ++ +-- +The id of the API key used + +type: keyword + +example: WzL_kb6VSvOhAq0twPvHOQ + +-- + +*`elasticsearch.slowlog.apikey.name`*:: ++ +-- +The name of the API key used + +type: keyword + +example: my-api-key + +-- + [[exported-fields-envoyproxy]] == Envoyproxy fields @@ -86793,6 +86848,17 @@ example: 6295bdd0-0a0e-11e7-825f-6748cda7d858 -- +*`kibana.saved_object.name`*:: ++ +-- +The name of the saved object associated with this event. + +type: keyword + +example: my-saved-object + +-- + *`kibana.add_to_spaces`*:: + -- diff --git a/filebeat/docs/inputs/input-syslog.asciidoc b/filebeat/docs/inputs/input-syslog.asciidoc index e43eabea378..3e0555d03b9 100644 --- a/filebeat/docs/inputs/input-syslog.asciidoc +++ b/filebeat/docs/inputs/input-syslog.asciidoc @@ -3,6 +3,10 @@ [id="{beatname_lc}-input-{type}"] === Syslog input +deprecated:[8.14.0] + +The syslog input is deprecated. Please use the <> processor for processing syslog messages. + ++++ Syslog ++++ diff --git a/filebeat/input/filestream/fswatch.go b/filebeat/input/filestream/fswatch.go index 454a5b428b0..c51d850bbd2 100644 --- a/filebeat/input/filestream/fswatch.go +++ b/filebeat/input/filestream/fswatch.go @@ -20,6 +20,7 @@ package filestream import ( "crypto/sha256" "encoding/hex" + "errors" "fmt" "hash" "io" @@ -45,6 +46,10 @@ const ( watcherDebugKey = "file_watcher" ) +var ( + errFileTooSmall = errors.New("file size is too small for ingestion") +) + type fileWatcherConfig struct { // Interval is the time between two scans. Interval time.Duration `config:"check_interval"` @@ -202,7 +207,7 @@ func (w *fileWatcher) watch(ctx unison.Canceler) { for path, fd := range newFilesByName { // no need to react on empty new files if fd.Info.Size() == 0 { - w.log.Warnf("file %q has no content yet, skipping", fd.Filename) + w.log.Debugf("file %q has no content yet, skipping", fd.Filename) delete(paths, path) continue } @@ -385,6 +390,10 @@ func (s *fileScanner) GetFiles() map[string]loginp.FileDescriptor { } fd, err := s.toFileDescriptor(&it) + if errors.Is(err, errFileTooSmall) { + s.log.Debugf("cannot start ingesting from file %q: %s", filename, err) + continue + } if err != nil { s.log.Warnf("cannot create a file descriptor for an ingest target %q: %s", filename, err) continue @@ -473,7 +482,7 @@ func (s *fileScanner) toFileDescriptor(it *ingestTarget) (fd loginp.FileDescript // we should not open the file if we know it's too small minSize := s.cfg.Fingerprint.Offset + s.cfg.Fingerprint.Length if fileSize < minSize { - return fd, fmt.Errorf("filesize of %q is %d bytes, expected at least %d bytes for fingerprinting", fd.Filename, fileSize, minSize) + return fd, fmt.Errorf("filesize of %q is %d bytes, expected at least %d bytes for fingerprinting: %w", fd.Filename, fileSize, minSize, errFileTooSmall) } file, err := os.Open(it.originalFilename) diff --git a/filebeat/input/filestream/fswatch_test.go b/filebeat/input/filestream/fswatch_test.go index 6c9d88b858e..3fab8bfd2bd 100644 --- a/filebeat/input/filestream/fswatch_test.go +++ b/filebeat/input/filestream/fswatch_test.go @@ -276,17 +276,20 @@ scanner: err := os.WriteFile(filename, nil, 0777) require.NoError(t, err) - t.Run("issues a warning in logs", func(t *testing.T) { - var lastWarning string + t.Run("issues a debug message in logs", func(t *testing.T) { expLogMsg := fmt.Sprintf("file %q has no content yet, skipping", filename) require.Eventually(t, func() bool { - logs := logp.ObserverLogs().FilterLevelExact(logp.WarnLevel.ZapLevel()).TakeAll() + logs := logp.ObserverLogs().FilterLevelExact(logp.DebugLevel.ZapLevel()).TakeAll() if len(logs) == 0 { return false } - lastWarning = logs[len(logs)-1].Message - return strings.Contains(lastWarning, expLogMsg) - }, 100*time.Millisecond, 10*time.Millisecond, "required a warning message %q but got %q", expLogMsg, lastWarning) + for _, l := range logs { + if strings.Contains(l.Message, expLogMsg) { + return true + } + } + return false + }, 100*time.Millisecond, 10*time.Millisecond, "required a debug message %q but never found", expLogMsg) }) t.Run("emits a create event once something is written to the empty file", func(t *testing.T) { @@ -797,6 +800,25 @@ scanner: }) } + t.Run("does not issue warnings when file is too small", func(t *testing.T) { + cfgStr := ` +scanner: + fingerprint: + enabled: true + offset: 0 + length: 1024 +` + logp.DevelopmentSetup(logp.ToObserverOutput()) + + // this file is 128 bytes long + paths := []string{filepath.Join(dir, undersizedBasename)} + s := createScannerWithConfig(t, paths, cfgStr) + files := s.GetFiles() + require.Empty(t, files) + logs := logp.ObserverLogs().FilterLevelExact(logp.WarnLevel.ZapLevel()).TakeAll() + require.Empty(t, logs, "there must be no warning logs for files too small") + }) + t.Run("returns error when creating scanner with a fingerprint too small", func(t *testing.T) { cfgStr := ` scanner: diff --git a/filebeat/input/syslog/input.go b/filebeat/input/syslog/input.go index 702472794dd..c91158c0f89 100644 --- a/filebeat/input/syslog/input.go +++ b/filebeat/input/syslog/input.go @@ -28,6 +28,7 @@ import ( "github.com/elastic/beats/v7/filebeat/input" "github.com/elastic/beats/v7/filebeat/inputsource" "github.com/elastic/beats/v7/libbeat/beat" + "github.com/elastic/beats/v7/libbeat/common/cfgwarn" conf "github.com/elastic/elastic-agent-libs/config" "github.com/elastic/elastic-agent-libs/logp" "github.com/elastic/elastic-agent-libs/mapstr" @@ -85,6 +86,8 @@ var ( "local6", "local7", } + + deprecatedNotificationOnce sync.Once ) func init() { @@ -112,6 +115,10 @@ func NewInput( ) (input.Input, error) { log := logp.NewLogger("syslog") + deprecatedNotificationOnce.Do(func() { + cfgwarn.Deprecate("", "Syslog input. Use Syslog processor instead.") + }) + out, err := outlet.Connect(cfg) if err != nil { return nil, err @@ -180,7 +187,7 @@ func GetCbByConfig(cfg config, forwarder *harvester.Forwarder, log *logp.Logger) case syslogFormatRFC5424: return func(data []byte, metadata inputsource.NetworkMetadata) { ev := parseAndCreateEvent5424(data, metadata, cfg.Timezone.Location(), log) - forwarder.Send(ev) + _ = forwarder.Send(ev) } case syslogFormatAuto: @@ -191,7 +198,7 @@ func GetCbByConfig(cfg config, forwarder *harvester.Forwarder, log *logp.Logger) } else { ev = parseAndCreateEvent3164(data, metadata, cfg.Timezone.Location(), log) } - forwarder.Send(ev) + _ = forwarder.Send(ev) } case syslogFormatRFC3164: break @@ -199,7 +206,7 @@ func GetCbByConfig(cfg config, forwarder *harvester.Forwarder, log *logp.Logger) return func(data []byte, metadata inputsource.NetworkMetadata) { ev := parseAndCreateEvent3164(data, metadata, cfg.Timezone.Location(), log) - forwarder.Send(ev) + _ = forwarder.Send(ev) } } diff --git a/filebeat/module/elasticsearch/fields.go b/filebeat/module/elasticsearch/fields.go index 525d0c50eac..4f27bd426ff 100644 --- a/filebeat/module/elasticsearch/fields.go +++ b/filebeat/module/elasticsearch/fields.go @@ -32,5 +32,5 @@ func init() { // AssetElasticsearch returns asset data. // This is the base64 encoded zlib format compressed contents of module/elasticsearch. func AssetElasticsearch() string { - return "eJzUWltz2zb2f8+nOKOXfztj8y9f6taa2Z1plcRxprk0sp1tFQ/nCDyiEIEADYCS1U6++w5AShYpkrpsm+3qxSZxOb9zPwfEMUxp0QMSaCxnhlCzyTMAy62gHnRerL/vPAPQJAgN9SDGZwARGaZ5armSPfjnMwAo7wRvVJQJegYw5iQi0/NTjkFiQptE3c8uUre5VllavKmhUd5ufUumklRJknY1UtmgzNHTfBhrlcB8QprATgiEioFmbkBpHnOJlqLO2qb0iEnqRaQCCliQBG/I4nO02NeElq5lRI8D0jPOaH1dzt+UFnOlo034IjOWdJBlPGrk4Pb2+jmosYdZLKhHdpXM9OiVeHvDB3e/8vfjH6aP8WW8Pxr31IjmLSa0E5pIsSnp45o57SikiihoEceTMNzMetrPB/wju1nQzeSjvf3Xzz9dvu7+9Ga+J4adxdCMY/bx7Wvz29nuhLkzo3bK3tL89HqaYy5oRGiPLRl7zGWa2X3pt0nfU+cNvoHvruLn89Hth3H/7rvvfxywh1E/3kPuZoI6aiUfLYXup9aj6O5OsAhJYapVlDEb5s6/dfmGixqKYLSAItiAscimYBXwiKTl4wXMJ7wUfpZc+InGvXGPmh4yMraerSkfocTOBg8Ta9OgWBk8hirFh4zCtoBSRluERqvATrSyVhCgjCCiKEsFZ2gJIko1MXTrYY5achmbBov/HtN0D/m7kBs4GrHSi0bE/WLCUmzrePwW9WCcuNHykaAQU747KswibjdmrycpqMlJ6zsIXJAujVRZunFJx81aZiJnH3bCzUYa6oEmY4/AapQmVdqNAU/DMReVwFrmXlcsqc2cKxoJ3byt8N0kp5EcsJ2gBcVYprXDjFLJRaIyEyJjZEwYkeQUHQFmduKcIldfOEYu/OvKrPwx1iite2ZKSmJ+Rd275TKLSUqaorDwhyPQmQxxbaPiOV/QLLwy/f3FmKsv2CrHj6sKpEC8oXj4ZnMktxmEDy8GN/Dj++vl4m/XrWS1bo4GNDHiM4pASU/taRqboJQkvj0CoRiK0GUz+CaviRgKn92AG5NRtI7z22bZPe2zv9w0oUi2Wl7ZhvJFHlxlwHE+Q8EjLzSMkctNnyiAd1zNQmPMhHWudQD2zJAOdmPATf0/U8vHEfDx+kCjlXa8mVo+ozDimphVenEoaCXItIL+4Ga4HLEMVASp5pLxFAWMSKhKRihZxHCZvEKMEi47R9BxFYopHuH+QNTelavV0v4b1Kls+w51rcduK9HHrK02ItdKzHxJHmOdUdMjsazZNFzuy+vuXqIkt0r/f4JcHmAdWgQpaky2WIeLQ7cfrsHPJUu62Rg6fzjZu+3/8RnZVHI2Of3SqaXOZcTZFsO8zucUCSMvbHJptZnjWKnj0+7JZdA9CbrnziBLb8423lwcYqXLooxHrSzcSv6QEeSNTU0JWBbfx99/Dqeji7vB7N3kx4eunb+fvXr3yyGRNgdX4z7NqX6ZUvYwxL4g1AOmlRAf6nnbGWs4UtGidjEKjlU7SdFOKtWxWx8wJe2m2yY81phzbHVGLUk9xCjSZKrktgExKtOMAp4eQDjTfE9qznGLekEcQHAV2/clazbPD3almZAxGNeHckuPtiFELPN7gCmf0sIEai4pCkeLsJREQwetdu+RUoJQ1ofqUoKuK+d2KAVrGrIGzpZL1vqbrW1I7XkZtJyZxezwPQGu+uCqI0O2IBDs2BKlEzT1wqtS34LA/V56QmBSYnzMmStMrvo5iaAyuQ7TOq4aa4VWve4E0P3Wz4iu+sCUEHm7Ug90Tf1Z7iyhIdYIbSwUVoPYjsD6FSQrgi6LKh1xGefHAQSvcYYw49pmKCBBNuGyBbhhOhuFZpGMlAgtusbb8oT+Kj7gPWaGwJEALsEQUzIywJw7Ox6yFHIs4LGYrcCt5jL+CsB3wO2hbMU9J5yGmsYmTLVyBZDH/xciv3GYTeoa/ieKHgZoGpMm6YqxJ6aaobtSUQgSoSbDUH4t1GvyTlBPHXrBZwRq9JmYNa6/EQSY5kdfzie4AWNVmlLUzAwTaEyYSaEw+lqc5NS8vcjMVb4exI7SZ2nmcTZirAvKO2J8nxsG9N/f5jZe2AvpsdKJA/wUCmsgNodsqDRwDUKGrYLekRH3qzChMmt4lJ/YTElLEnUMrAWWhfkvoOSyChJaUboO+GvAvFEWBZDA1NlrBbRVvqcWZHPka/nSH3cZi9rPGnPJzSSorTI+z5JQZ7LBBZsZ2cKA74EcVI/k9d2bAk2WrnnbEaABzLd3Vp4qLi3ILBmRrkdrJ5owMqF1cgldlGkKHgcjv0I9wrgkzYIqeKo+thVqqAsaK0N2IdBnlyXmP1vEDoJVyn83yUEVOFtxWYzrO7H60m2btPogVBznqTduIDkhrEbGgwvZV4QpoBCqSDYoo6Ve+O9717JuTTgdNQZ1Li3FG23QDjBh5byOeU/HGf6UCzVa2LYKxWWmvwyS/5LlETWDWbVhIgpjqh67Hay4dyKCmCQVhbNiLEtRssXfX4NeeWrsBLLOwd9AnY0y3a7dhcpk/Gfq91e34f+4hhdVHv4GOm6Raz26ldxIz0pEy6eUAz/s7/FUv97s+vn2KdUhm1qNrFwdr9Hr9GCQf+J3sxxw5tpoNQbSWulyQvJ3KnowRlE6/6g9jqlyleej8oFpk0m3Hb54S2hzgE6ul6t+80Fu/bFtnWvVu8AqEMvNrqOMpUqpDcUSh1AbDK5KhLn6GgRX/M1ITwij0NBDq8gH9JC5frkoERslf3Z+fnl5eVor/kYUT/VeuDzdCbZ8xSl3yVf9I/cn4ULwogJrRHhy0e3uWAeupDRyDo37AfTRzdeqTsirSzWrynaOptiYoj3Q/7AT+lV4EGouVNwcifLx/FqEyTuGjXuVGyA6w9PuyQ/H3Yvj08ubk26ve9E7OT+6PDu7H16/ffkO7of53ax8i6AAETxkpBf3MJyFd68nn+/uYZiQ1Zz5G2AXwVnQPXb7Bt2L4PTifti99yX28Dz4LjH3R/4hzIU0PPfPrhGZcGuGJ5fnZ9+5V4uUzPD+yIVFm//jIfg7IsNfbl98+DW8efXibfjyxU3/1WoPfz/LDE/cfP/lY/jHp45H+6nT++NTJ0HLJiEKkT+OlDL2U6d3EnS/fPlyf/SfxG9XwVfSU1lDP/sJG3fo1rVRK+wx2bL2mnuNVexRatqCxLsct6u+p/ic5vtfL6wmfGfdbmL2hOIU2YbFjTfR24+UN5UWUgM3nmu0kaIfPdmT7pNltlEvLrwt0kb1V816Txje4EOvwDYcQs3btbyHy+yHkB6txjDH2YLwhZtWsANcjpVOcPPT+qFW8hRs2qwy7zq5bTKU89MDiObRaStZJ3xOUX7ZtAnA6X4AtMosryTt6l0bP6NJyKZ78uq3019+ml5+np/HNsaXVu4n+MqnyBL16+jP0W27C960+F6k2CHu1kxtkNuvGkOkWJasLiu6asHHeYpa6P07AAD//wFdWko=" + return "eJzUmt1z2zYSwN/zV+zo5doZmyd/1K09czeTKo7jNB9ubCeXKh7OClxRiECABkDJapv//QYgJYsUSX20zfX8klAEsL9dLHYXIPZhTLMzIIHGcmYINRs9AbDcCjqDzvny750nAJoEoaEziPEJQESGaZ5aruQZ/PsJAJRHgtcqygQ9ARhyEpE58032QWJCq0Ldn52lbnCtsrT4pUZGebjlIZlKUiVJ2sWbygBljR7bw1CrBKYj0gR2RCBUDDRxL5TmMZdoKeosDUoPmKTeRCqggAVJ8JosPkOLPU1o6VJG9HBNesIZLffL9RvTbKp0tIovMmNJB1nGo0YNbm8vn4EaesyiQz3ZRTLRgxfizQ2/fv+RXw1/GD/Ep/H2NO6pkeYNJrQRTaTYmPR+TZt2CqkiClrM8WgM17Je9rNr/oHdzOhm9MHe/ufVj6cvuz++nm7JsLEZmjkmH968NL8cbS6YOzdql+w9zTevlznkggaEdt+SsftcppndVn6b9b103rA28O1F/Gw6uH037L3/7vun1+x+0Iu3sLsZoY5axUdzo/um9RTdzQUWISlMtYoyZsN88a/tvrJEDUUwmEERbMBYZGOwCnhE0vLhDKYjXgo/cy18Q+N+cY+a7jMytl6tMR+gxM6KDiNr06DoGTyEKsX7jMK2gFKmLUKjVWBHWlkrCFBGEFGUpYIztAQRpZoYuv4wRS25jE2Dx3+PabqF/V3IDZyMWOlZI3GvaDA32zKPH6IexpkbLR8ICjHlm1NhFnG70no5SUFNTloeQeCMdOlNVaUbl3Rcq3kmcv5hR9yspKEz0GTsHliN0qRKu3fA03DIRSWwlrXXFU9qc+fKjISu3Vp818jNSA5sR2hBMZZp7ZhRKjlLVGZCZIyMCSOSnKI9wMyO3KLIpy8cIhf+50qr/DHWKK17ZkpKYr5H3W/zbhaTlDRFYbEe9kBnMsSlgYrnvEOz8crytzdjPn3BWjt+WFQgBfHKxMM3q29yn0F4d359A0+vLuedv132kkW/KRrQxIhPKAIlvbTHZmyEUpL4dg+EYihCl83gm7wmYih8dgNuTEbRMue3zbZ7HGd7u2lCkaz1vLIP5Z08XOWF03yCgkfeaBgjl6trogDvuJqFhpgJ65bWDuyZIR1spoBr+g9Tq8ce8OHyi0Yv7Xg3tXxCYcQ1Mav0bFdoJci0Qr9zLVyOmAcqglRzyXiKAgYkVCUjlDyiP09eIUYJl5096LgKxRSPcLcjtV/K1Wpp+wHqpmz9CHVbj816oo9Za31ELpWYeZc8xjqnpgdiWbNruNyX191niZLcKv3PBLncwTu0CFLUmKzxDheHbt9dgm9LlnSzM3R+c7Z3w//rM7Kx5Gx0+KVTK53LiLM1jnmZtykSRl7Y5NZqc8ehUvuH3YPToHsQdI+dQ5Z+OVr55WQXL50XZTxqVeFW8vuMIN/Y1JSAZfN9+PVVOB6cvL+evB09ve/a6dXkxdufd4m0OVzN8mlO9fOUsoUj9gShvmZaCfGuXreNWcOBima1nVFwrPpJinZUqY5d/4ApaVeXbcJjjbnGVmfUktRDjCJNpipuHYhRmWYU8HQHwZnmW0pzC7eoF8QOAhexfVuxZvX8YFOZCRmDcX0ot/RgG0LEPL8HmPIxzUygppKicDALS0k0dGi1Yw+UEoSyPlSXEnRdObdBKVizIWvQbN5laX+zdhtSe14GLWdmMdt9TICLHrjqyJAtBAQbbonSEZp641WlryFwf8+9IDApMT7kzBUmF71cRFBpXMe0zFXjrdA6rxsBur/lM6KLHjAlRL5dqQddmv4sXyyhIdaINhQKq0FsQ7BehWQh0GVRpSMu4/w4gOAlThAmXNsMBSTIRly2gBums0FoZslAidCi23hbntBfpQdcYWYInAjgEgwxJSMDzC1np0OWQs4CnsWsBbeay/grgG/A7VHWck8Jx6GmoQlTrVwB5Pn/QvIbx2xSt+F/lOgxQNOQNElXjD0q1YzuSkUhSISaDEP5taiX7J2gHjt6wScEavCZmDVufyMIMM2Pvtya4AaMVWlKUbMyTKAxYSaFwuhraZJL8/4iM1f5eogNrc/SzHM2MtYF5Q0Zr3LHgN7Vbe7jhb+QHiqdOODHUFiD2ByyobKBazAyrDX0hoq4v4oSKrOGR/mJzZi0JFGnwFJgmZn/ASWXVUhopXQ74K+BeaMsCiCBqfPXCrRVfk8tyObkS/nSH3cZi9q3GnLJzSiorTI+T5JQZ7JhCTYrskYBvwdyqJ7k5fvXBU2WLq22PUADmA/vvDxVXFqQWTIgXU9rR5owMqF1dgldlGkKHjuTX6AeYFyyZiEVvFQf24ppqAsaC0d2IdBnlznzn21ih2CV8t9NcqiCs5XLYly/E6sv3dZZqwdCxXGeeuMGkSPCamTcuZB9QZgCCqGKZIMyms8L/3XrWtb1CceDxqDOpaV4ZRu0ASYsFq9T3stxjj/mQg1mtq1CcZnpL0PyX7I8UTPMYhsmojCm6rHbzhP3VkQQk6SicFaMZSlKNvv7z6CfPDV0BlnW4G8wnY02XT+7M5XJ+M+c349uwP/zGZ5VdfgbzHGLXevpFnYjPSkJLZ9SXvvX/h5P9evNpp9vH1MdsrHVyMrV8ZK8zhlc55/4XSsHztw2Wg2BtFa6nJD8nYozGKIonX/UHsdUtcrzUfnAtMml2w5fvCe0LYBOPi8XveaD3Ppj27qlVb8EFoFYru46yixVSW0Ucw6hVhRclAhT9TUELvSbkB4RRqGh+1aTX9N95vbLRYnYaPmj4+PT09PDWvM3UjzWe+H8dCdY8xWnvEu+6O25fxIuBC8qsEbCg5Nud8M6cGGlgVvQuB2gj26+VnVGXlyqWVS2UzTFwBRtQf/DRvSL8CDUVKi4ORLl7/NrESbfMazcq1yB6PQPuwc/7HdP9g9Pbw66Z92Ts4PjvdOjo7v+5Zvnb+Gun9/NyocICojgPiM9u4P+JHz/cvT5/R30E7KaM38D7CQ4Crr7btygexIcntz1u3e+xO4fB98l5m7PP4S5kfrH/tltREbcmv7B6fHRd+6nWUqmf7fnwqLN/+MR/B2R/s+35+8+hjcvzt+Ez89vei8WY/j7WaZ/4Nr7Lx/93z51PO2nztlvnzoJWjYKUYj8caCUsZ86ZwdB98uXL3d7fyR+uwq+kp7KM/TKN1i5Q7c8G7XGHpItz17zXmMRe5Qat5D4JcftYt9TfE7z+19vrCa+o243MVuiuIlsY3Hvm+RtJ8q7Souoa/c+n9FGif7twZZyHz2zTXpx4W2WNk5/1a23xPAOH/oJbOMQato+y1ssme0I6cFqDHPOFsJz16xQB7gcKp3g6qf1Xb3kMdi0eWW+6+S2yVGOD3cQmkentWKd8TlF+WXTJoDD7QC0yiyvJO3qXRvfosnIpnvw4pfDn38cn36eHsc2xudWbmf4yqfIkvTL6M+Z2/YleNOy9iLFdlluzdKuc/9VQ4gUy5LFZUVXLfg4T9FW8hrueZVktt5ScwN46aUv1PP7aU1mabybthEvDYfkL4v9IfLFKF9bBydl9fP7OnB/bcV/G7GqBLqYhgDeSu8aN29/On8Dv8O786evXsPv8PTqMvzp/GO9Ir7Rdvz+ckL1JtAKP1/cb396demG8/QN1tzkJtCGXCsfwVfIlm+jbcCWzPYx5ftjmrXx/DcAAP//7NJVlQ==" } diff --git a/filebeat/module/elasticsearch/slowlog/_meta/fields.yml b/filebeat/module/elasticsearch/slowlog/_meta/fields.yml index fa251b39789..0055a7df364 100644 --- a/filebeat/module/elasticsearch/slowlog/_meta/fields.yml +++ b/filebeat/module/elasticsearch/slowlog/_meta/fields.yml @@ -54,3 +54,23 @@ - name: source description: Source of document that was indexed type: keyword + - name: user.realm + description: The authentication realm the user was authenticated against + example: "default_file" + type: keyword + - name: user.effective.realm + description: The authentication realm the effective user was authenticated against + example: "default_file" + type: keyword + - name: auth.type + description: The authentication type used to authenticate the user. One of TOKEN | REALM | API_KEY + example: REALM + type: keyword + - name: apikey.id + description: The id of the API key used + example: "WzL_kb6VSvOhAq0twPvHOQ" + type: keyword + - name: apikey.name + description: The name of the API key used + example: "my-api-key" + type: keyword diff --git a/filebeat/module/elasticsearch/slowlog/ingest/pipeline-json.yml b/filebeat/module/elasticsearch/slowlog/ingest/pipeline-json.yml index 614c9f7aa43..8a3c8e4f6f0 100644 --- a/filebeat/module/elasticsearch/slowlog/ingest/pipeline-json.yml +++ b/filebeat/module/elasticsearch/slowlog/ingest/pipeline-json.yml @@ -10,3 +10,23 @@ processors: - pipeline: if: 'ctx.message.contains("ecs.version")' name: '{< IngestPipeline "pipeline-json-8" >}' + - rename: + field: auth.type + target_field: elasticsearch.slowlog.auth.type + ignore_missing: true + - rename: + field: user.realm + target_field: elasticsearch.slowlog.user.realm + ignore_missing: true + - rename: + field: user.effective.realm + target_field: elasticsearch.slowlog.user.effective.realm + ignore_missing: true + - rename: + field: apikey.id + target_field: elasticsearch.slowlog.user.apikey.id + ignore_missing: true + - rename: + field: apikey.name + target_field: elasticsearch.slowlog.user.apikey.name + ignore_missing: true diff --git a/filebeat/module/elasticsearch/slowlog/test/es814_index_indexing_slowlog-json.log b/filebeat/module/elasticsearch/slowlog/test/es814_index_indexing_slowlog-json.log new file mode 100644 index 00000000000..920951b8caf --- /dev/null +++ b/filebeat/module/elasticsearch/slowlog/test/es814_index_indexing_slowlog-json.log @@ -0,0 +1,4 @@ +{"@timestamp":"2024-03-13T10:34:33.289Z", "log.level": "WARN", "auth.type":"REALM","elasticsearch.slowlog.id":"2","elasticsearch.slowlog.message":"[my-index/stZSoQ12R56VZORRItBKjA]","elasticsearch.slowlog.source":"{\\\"indices\\\":{\\\"field_security\\\":{\\\"grant\\\":\\\"read\\\",\\\"except\\\":\\\"confidential\\\"},\\\"names\\\":[\\\"foo\\\",\\\"bar\\\"],\\\"privileges\\\":\\\"admin\\\",\\\"query\\\":\\\"example\\\",\\\"allow_restricted_indices\\\":true}}","elasticsearch.slowlog.took":"12.3ms","elasticsearch.slowlog.took_millis":"12","user.name":"elastic","user.realm":"reserved" , "ecs.version": "1.2.0","service.name":"ES_ECS","event.dataset":"elasticsearch.index_indexing_slowlog","process.thread.name":"elasticsearch[runTask-0][write][T#7]","log.logger":"index.indexing.slowlog.index","elasticsearch.cluster.uuid":"0d2MZYNKR7Wqr2U6Cvpp7g","elasticsearch.node.id":"a8BUD2RfQSu4aqtpePX7BA","elasticsearch.node.name":"runTask-0","elasticsearch.cluster.name":"runTask"} +{"@timestamp":"2024-03-13T10:34:36.139Z", "log.level": "WARN", "auth.type":"REALM","elasticsearch.slowlog.id":"3","elasticsearch.slowlog.message":"[my-index/stZSoQ12R56VZORRItBKjA]","elasticsearch.slowlog.source":"{\\\"indices\\\":{\\\"field_security\\\":{\\\"grant\\\":\\\"read\\\",\\\"except\\\":\\\"confidential\\\"},\\\"names\\\":[\\\"foo\\\",\\\"bar\\\"],\\\"privileges\\\":\\\"admin\\\",\\\"query\\\":\\\"example\\\",\\\"allow_restricted_indices\\\":true}}","elasticsearch.slowlog.took":"5.9ms","elasticsearch.slowlog.took_millis":"5","user.name":"elastic","user.realm":"reserved" , "ecs.version": "1.2.0","service.name":"ES_ECS","event.dataset":"elasticsearch.index_indexing_slowlog","process.thread.name":"elasticsearch[runTask-0][write][T#9]","log.logger":"index.indexing.slowlog.index","elasticsearch.cluster.uuid":"0d2MZYNKR7Wqr2U6Cvpp7g","elasticsearch.node.id":"a8BUD2RfQSu4aqtpePX7BA","elasticsearch.node.name":"runTask-0","elasticsearch.cluster.name":"runTask"} +{"@timestamp":"2024-03-13T10:34:37.257Z", "log.level": "WARN", "auth.type":"REALM","elasticsearch.slowlog.id":"4","elasticsearch.slowlog.message":"[my-index/stZSoQ12R56VZORRItBKjA]","elasticsearch.slowlog.source":"{\\\"indices\\\":{\\\"field_security\\\":{\\\"grant\\\":\\\"read\\\",\\\"except\\\":\\\"confidential\\\"},\\\"names\\\":[\\\"foo\\\",\\\"bar\\\"],\\\"privileges\\\":\\\"admin\\\",\\\"query\\\":\\\"example\\\",\\\"allow_restricted_indices\\\":true}}","elasticsearch.slowlog.took":"2.5ms","elasticsearch.slowlog.took_millis":"2","user.name":"elastic","user.realm":"reserved" , "ecs.version": "1.2.0","service.name":"ES_ECS","event.dataset":"elasticsearch.index_indexing_slowlog","process.thread.name":"elasticsearch[runTask-0][write][T#12]","log.logger":"index.indexing.slowlog.index","elasticsearch.cluster.uuid":"0d2MZYNKR7Wqr2U6Cvpp7g","elasticsearch.node.id":"a8BUD2RfQSu4aqtpePX7BA","elasticsearch.node.name":"runTask-0","elasticsearch.cluster.name":"runTask"} +{"@timestamp":"2024-03-13T10:34:38.373Z", "log.level": "WARN", "auth.type":"REALM","elasticsearch.slowlog.id":"5","elasticsearch.slowlog.message":"[my-index/stZSoQ12R56VZORRItBKjA]","elasticsearch.slowlog.source":"{\\\"indices\\\":{\\\"field_security\\\":{\\\"grant\\\":\\\"read\\\",\\\"except\\\":\\\"confidential\\\"},\\\"names\\\":[\\\"foo\\\",\\\"bar\\\"],\\\"privileges\\\":\\\"admin\\\",\\\"query\\\":\\\"example\\\",\\\"allow_restricted_indices\\\":true}}","elasticsearch.slowlog.took":"2.2ms","elasticsearch.slowlog.took_millis":"2","user.name":"elastic","user.realm":"reserved" , "ecs.version": "1.2.0","service.name":"ES_ECS","event.dataset":"elasticsearch.index_indexing_slowlog","process.thread.name":"elasticsearch[runTask-0][write][T#3]","log.logger":"index.indexing.slowlog.index","elasticsearch.cluster.uuid":"0d2MZYNKR7Wqr2U6Cvpp7g","elasticsearch.node.id":"a8BUD2RfQSu4aqtpePX7BA","elasticsearch.node.name":"runTask-0","elasticsearch.cluster.name":"runTask"} diff --git a/filebeat/module/elasticsearch/slowlog/test/es814_index_indexing_slowlog-json.log-expected.json b/filebeat/module/elasticsearch/slowlog/test/es814_index_indexing_slowlog-json.log-expected.json new file mode 100644 index 00000000000..b24a197b41c --- /dev/null +++ b/filebeat/module/elasticsearch/slowlog/test/es814_index_indexing_slowlog-json.log-expected.json @@ -0,0 +1,130 @@ +[ + { + "@timestamp": "2024-03-13T10:34:33.289Z", + "log.level": "WARN", + "log.offset": 0, + "event.type": "info", + "event.kind": "event", + "fileset.name": "slowlog", + "elasticsearch.slowlog.auth.type": "REALM", + "elasticsearch.slowlog.id": "2", + "elasticsearch.index.id": "stZSoQ12R56VZORRItBKjA", + "elasticsearch.index.name": "my-index", + "message": "[my-index/stZSoQ12R56VZORRItBKjA]", + "elasticsearch.slowlog.source": "{\\\"indices\\\":{\\\"field_security\\\":{\\\"grant\\\":\\\"read\\\",\\\"except\\\":\\\"confidential\\\"},\\\"names\\\":[\\\"foo\\\",\\\"bar\\\"],\\\"privileges\\\":\\\"admin\\\",\\\"query\\\":\\\"example\\\",\\\"allow_restricted_indices\\\":true}}", + "elasticsearch.slowlog.took": "12.3ms", + "host.id": "a8BUD2RfQSu4aqtpePX7BA", + "input.type": "log", + "event.category": "database", + "user.name": "elastic", + "elasticsearch.slowlog.user.realm": "reserved", + "ecs.version": "1.2.0", + "service.name": "ES_ECS", + "event.dataset": "elasticsearch.index_indexing_slowlog", + "event.duration": 12000000, + "event.module": "elasticsearch", + "process.thread.name": "elasticsearch[runTask-0][write][T#7]", + "log.logger": "index.indexing.slowlog.index", + "elasticsearch.cluster.uuid": "0d2MZYNKR7Wqr2U6Cvpp7g", + "elasticsearch.node.id": "a8BUD2RfQSu4aqtpePX7BA", + "elasticsearch.node.name": "runTask-0", + "elasticsearch.cluster.name": "runTask", + "service.type": "elasticsearch" + }, + { + "@timestamp": "2024-03-13T10:34:36.139Z", + "log.level": "WARN", + "log.offset": 980, + "event.type": "info", + "event.kind": "event", + "fileset.name": "slowlog", + "elasticsearch.slowlog.auth.type": "REALM", + "elasticsearch.slowlog.id": "3", + "elasticsearch.index.id": "stZSoQ12R56VZORRItBKjA", + "elasticsearch.index.name": "my-index", + "message": "[my-index/stZSoQ12R56VZORRItBKjA]", + "elasticsearch.slowlog.source": "{\\\"indices\\\":{\\\"field_security\\\":{\\\"grant\\\":\\\"read\\\",\\\"except\\\":\\\"confidential\\\"},\\\"names\\\":[\\\"foo\\\",\\\"bar\\\"],\\\"privileges\\\":\\\"admin\\\",\\\"query\\\":\\\"example\\\",\\\"allow_restricted_indices\\\":true}}", + "elasticsearch.slowlog.took": "5.9ms", + "host.id": "a8BUD2RfQSu4aqtpePX7BA", + "input.type": "log", + "event.category": "database", + "user.name": "elastic", + "elasticsearch.slowlog.user.realm": "reserved", + "ecs.version": "1.2.0", + "service.name": "ES_ECS", + "event.dataset": "elasticsearch.index_indexing_slowlog", + "event.duration": 5000000, + "event.module": "elasticsearch", + "process.thread.name": "elasticsearch[runTask-0][write][T#9]", + "log.logger": "index.indexing.slowlog.index", + "elasticsearch.cluster.uuid": "0d2MZYNKR7Wqr2U6Cvpp7g", + "elasticsearch.node.id": "a8BUD2RfQSu4aqtpePX7BA", + "elasticsearch.node.name": "runTask-0", + "elasticsearch.cluster.name": "runTask", + "service.type": "elasticsearch" + }, + { + "@timestamp": "2024-03-13T10:34:37.257Z", + "log.level": "WARN", + "log.offset": 1958, + "event.type": "info", + "event.kind": "event", + "fileset.name": "slowlog", + "elasticsearch.slowlog.auth.type": "REALM", + "elasticsearch.slowlog.id": "4", + "elasticsearch.index.id": "stZSoQ12R56VZORRItBKjA", + "elasticsearch.index.name": "my-index", + "message": "[my-index/stZSoQ12R56VZORRItBKjA]", + "elasticsearch.slowlog.source": "{\\\"indices\\\":{\\\"field_security\\\":{\\\"grant\\\":\\\"read\\\",\\\"except\\\":\\\"confidential\\\"},\\\"names\\\":[\\\"foo\\\",\\\"bar\\\"],\\\"privileges\\\":\\\"admin\\\",\\\"query\\\":\\\"example\\\",\\\"allow_restricted_indices\\\":true}}", + "elasticsearch.slowlog.took": "2.5ms", + "host.id": "a8BUD2RfQSu4aqtpePX7BA", + "input.type": "log", + "event.category": "database", + "user.name": "elastic", + "elasticsearch.slowlog.user.realm": "reserved", + "ecs.version": "1.2.0", + "service.name": "ES_ECS", + "event.dataset": "elasticsearch.index_indexing_slowlog", + "event.duration": 2000000, + "event.module": "elasticsearch", + "process.thread.name": "elasticsearch[runTask-0][write][T#12]", + "log.logger": "index.indexing.slowlog.index", + "elasticsearch.cluster.uuid": "0d2MZYNKR7Wqr2U6Cvpp7g", + "elasticsearch.node.id": "a8BUD2RfQSu4aqtpePX7BA", + "elasticsearch.node.name": "runTask-0", + "elasticsearch.cluster.name": "runTask", + "service.type": "elasticsearch" + }, + { + "@timestamp": "2024-03-13T10:34:38.373Z", + "log.level": "WARN", + "log.offset": 2937, + "event.type": "info", + "event.kind": "event", + "fileset.name": "slowlog", + "elasticsearch.slowlog.auth.type": "REALM", + "elasticsearch.slowlog.id": "5", + "elasticsearch.index.id": "stZSoQ12R56VZORRItBKjA", + "elasticsearch.index.name": "my-index", + "message": "[my-index/stZSoQ12R56VZORRItBKjA]", + "elasticsearch.slowlog.source": "{\\\"indices\\\":{\\\"field_security\\\":{\\\"grant\\\":\\\"read\\\",\\\"except\\\":\\\"confidential\\\"},\\\"names\\\":[\\\"foo\\\",\\\"bar\\\"],\\\"privileges\\\":\\\"admin\\\",\\\"query\\\":\\\"example\\\",\\\"allow_restricted_indices\\\":true}}", + "elasticsearch.slowlog.took": "2.2ms", + "host.id": "a8BUD2RfQSu4aqtpePX7BA", + "input.type": "log", + "event.category": "database", + "user.name": "elastic", + "elasticsearch.slowlog.user.realm": "reserved", + "ecs.version": "1.2.0", + "service.name": "ES_ECS", + "event.dataset": "elasticsearch.index_indexing_slowlog", + "event.duration": 2000000, + "event.module": "elasticsearch", + "process.thread.name": "elasticsearch[runTask-0][write][T#3]", + "log.logger": "index.indexing.slowlog.index", + "elasticsearch.cluster.uuid": "0d2MZYNKR7Wqr2U6Cvpp7g", + "elasticsearch.node.id": "a8BUD2RfQSu4aqtpePX7BA", + "elasticsearch.node.name": "runTask-0", + "elasticsearch.cluster.name": "runTask", + "service.type": "elasticsearch" + } +] diff --git a/filebeat/module/elasticsearch/slowlog/test/es814_index_search_slowlog-json.log b/filebeat/module/elasticsearch/slowlog/test/es814_index_search_slowlog-json.log new file mode 100644 index 00000000000..40e1a31906f --- /dev/null +++ b/filebeat/module/elasticsearch/slowlog/test/es814_index_search_slowlog-json.log @@ -0,0 +1,3 @@ +{"@timestamp":"2024-03-13T09:42:41.350Z", "log.level": "WARN", "elasticsearch.slowlog.id":null,"elasticsearch.slowlog.message":"[my-index][0]","elasticsearch.slowlog.search_type":"QUERY_THEN_FETCH","elasticsearch.slowlog.source":"{\\\"query\\\":{\\\"match_none\\\":{\\\"boost\\\":1.0}}}","elasticsearch.slowlog.stats":"[]","elasticsearch.slowlog.took":"7.7ms","elasticsearch.slowlog.took_millis":7,"elasticsearch.slowlog.total_hits":"0 hits","elasticsearch.slowlog.total_shards":1 , "ecs.version": "1.2.0","service.name":"ES_ECS","event.dataset":"elasticsearch.index_search_slowlog","process.thread.name":"elasticsearch[runTask-0][search][T#3]","log.logger":"index.search.slowlog.query","elasticsearch.cluster.uuid":"0d2MZYNKR7Wqr2U6Cvpp7g","elasticsearch.node.id":"a8BUD2RfQSu4aqtpePX7BA","elasticsearch.node.name":"runTask-0","elasticsearch.cluster.name":"runTask"} +{"@timestamp":"2024-03-13T09:43:56.663Z", "log.level": "WARN", "elasticsearch.slowlog.id":null,"elasticsearch.slowlog.message":"[my-index][0]","elasticsearch.slowlog.search_type":"QUERY_THEN_FETCH","elasticsearch.slowlog.source":"{\\\"query\\\":{\\\"match_none\\\":{\\\"boost\\\":1.0}}}","elasticsearch.slowlog.stats":"[]","elasticsearch.slowlog.took":"946.6micros","elasticsearch.slowlog.took_millis":0,"elasticsearch.slowlog.total_hits":"0 hits","elasticsearch.slowlog.total_shards":1 , "ecs.version": "1.2.0","service.name":"ES_ECS","event.dataset":"elasticsearch.index_search_slowlog","process.thread.name":"elasticsearch[runTask-0][search][T#6]","log.logger":"index.search.slowlog.query","elasticsearch.cluster.uuid":"0d2MZYNKR7Wqr2U6Cvpp7g","elasticsearch.node.id":"a8BUD2RfQSu4aqtpePX7BA","elasticsearch.node.name":"runTask-0","elasticsearch.cluster.name":"runTask"} +{"@timestamp":"2024-03-13T09:44:20.724Z", "log.level": "WARN", "auth.type":"REALM","elasticsearch.slowlog.id":null,"elasticsearch.slowlog.message":"[my-index][0]","elasticsearch.slowlog.search_type":"QUERY_THEN_FETCH","elasticsearch.slowlog.source":"{\\\"query\\\":{\\\"match_none\\\":{\\\"boost\\\":1.0}}}","elasticsearch.slowlog.stats":"[]","elasticsearch.slowlog.took":"509.5micros","elasticsearch.slowlog.took_millis":0,"elasticsearch.slowlog.total_hits":"0 hits","elasticsearch.slowlog.total_shards":1,"user.name":"elastic","user.realm":"reserved" , "ecs.version": "1.2.0","service.name":"ES_ECS","event.dataset":"elasticsearch.index_search_slowlog","process.thread.name":"elasticsearch[runTask-0][search][T#8]","log.logger":"index.search.slowlog.query","elasticsearch.cluster.uuid":"0d2MZYNKR7Wqr2U6Cvpp7g","elasticsearch.node.id":"a8BUD2RfQSu4aqtpePX7BA","elasticsearch.node.name":"runTask-0","elasticsearch.cluster.name":"runTask"} diff --git a/filebeat/module/elasticsearch/slowlog/test/es814_index_search_slowlog-json.log-expected.json b/filebeat/module/elasticsearch/slowlog/test/es814_index_search_slowlog-json.log-expected.json new file mode 100644 index 00000000000..651f6ce267f --- /dev/null +++ b/filebeat/module/elasticsearch/slowlog/test/es814_index_search_slowlog-json.log-expected.json @@ -0,0 +1,104 @@ +[ + { + "@timestamp": "2024-03-13T09:42:41.350Z", + "elasticsearch.cluster.name": "runTask", + "elasticsearch.cluster.uuid": "0d2MZYNKR7Wqr2U6Cvpp7g", + "elasticsearch.index.name": "my-index", + "elasticsearch.node.id": "a8BUD2RfQSu4aqtpePX7BA", + "elasticsearch.node.name": "runTask-0", + "elasticsearch.slowlog.id": null, + "elasticsearch.slowlog.search_type": "QUERY_THEN_FETCH", + "elasticsearch.slowlog.source": "{\\\"query\\\":{\\\"match_none\\\":{\\\"boost\\\":1.0}}}", + "elasticsearch.slowlog.took": "7.7ms", + "elasticsearch.slowlog.total_hits": "0 hits", + "elasticsearch.shard.id": "0", + "elasticsearch.slowlog.stats": "[]", + "elasticsearch.slowlog.total_shards": 1, + "event.dataset": "elasticsearch.index_search_slowlog", + "event.type": "info", + "event.kind": "event", + "fileset.name": "slowlog", + "host.id": "a8BUD2RfQSu4aqtpePX7BA", + "input.type": "log", + "log.level": "WARN", + "log.offset": 0, + "message": "[my-index][0]", + "service.type": "elasticsearch", + "event.category": "database", + "ecs.version": "1.2.0", + "service.name": "ES_ECS", + "event.duration": 7000000, + "event.module": "elasticsearch", + "process.thread.name": "elasticsearch[runTask-0][search][T#3]", + "log.logger": "index.search.slowlog.query" + }, + { + "@timestamp": "2024-03-13T09:43:56.663Z", + "elasticsearch.cluster.name": "runTask", + "elasticsearch.cluster.uuid": "0d2MZYNKR7Wqr2U6Cvpp7g", + "elasticsearch.index.name": "my-index", + "elasticsearch.node.id": "a8BUD2RfQSu4aqtpePX7BA", + "elasticsearch.node.name": "runTask-0", + "elasticsearch.slowlog.id": null, + "elasticsearch.slowlog.search_type": "QUERY_THEN_FETCH", + "elasticsearch.slowlog.source": "{\\\"query\\\":{\\\"match_none\\\":{\\\"boost\\\":1.0}}}", + "elasticsearch.slowlog.took": "946.6micros", + "elasticsearch.slowlog.total_hits": "0 hits", + "elasticsearch.shard.id": "0", + "elasticsearch.slowlog.total_shards": 1, + "elasticsearch.slowlog.stats": "[]", + "event.dataset": "elasticsearch.index_search_slowlog", + "event.type": "info", + "event.kind": "event", + "fileset.name": "slowlog", + "host.id": "a8BUD2RfQSu4aqtpePX7BA", + "input.type": "log", + "log.level": "WARN", + "log.offset": 869, + "message": "[my-index][0]", + "service.type": "elasticsearch", + "event.category": "database", + "ecs.version": "1.2.0", + "service.name": "ES_ECS", + "event.duration": 0, + "event.module": "elasticsearch", + "process.thread.name": "elasticsearch[runTask-0][search][T#6]", + "log.logger": "index.search.slowlog.query" + }, + { + "@timestamp": "2024-03-13T09:44:20.724Z", + "elasticsearch.cluster.name": "runTask", + "elasticsearch.cluster.uuid": "0d2MZYNKR7Wqr2U6Cvpp7g", + "elasticsearch.index.name": "my-index", + "elasticsearch.node.id": "a8BUD2RfQSu4aqtpePX7BA", + "elasticsearch.node.name": "runTask-0", + "elasticsearch.slowlog.id": null, + "elasticsearch.slowlog.search_type": "QUERY_THEN_FETCH", + "elasticsearch.slowlog.source": "{\\\"query\\\":{\\\"match_none\\\":{\\\"boost\\\":1.0}}}", + "elasticsearch.slowlog.took": "509.5micros", + "elasticsearch.slowlog.total_hits": "0 hits", + "elasticsearch.shard.id": "0", + "elasticsearch.slowlog.stats": "[]", + "elasticsearch.slowlog.total_shards": 1, + "event.type": "info", + "event.kind": "event", + "event.dataset": "elasticsearch.index_search_slowlog", + "fileset.name": "slowlog", + "host.id": "a8BUD2RfQSu4aqtpePX7BA", + "input.type": "log", + "log.level": "WARN", + "log.offset": 1744, + "message": "[my-index][0]", + "service.type": "elasticsearch", + "elasticsearch.slowlog.auth.type": "REALM", + "event.category": "database", + "user.name": "elastic", + "elasticsearch.slowlog.user.realm": "reserved", + "ecs.version": "1.2.0", + "service.name": "ES_ECS", + "event.duration": 0, + "event.module": "elasticsearch", + "process.thread.name": "elasticsearch[runTask-0][search][T#8]", + "log.logger": "index.search.slowlog.query" + } +] diff --git a/filebeat/module/iis/error/test/iis_error_url.log-expected.json b/filebeat/module/iis/error/test/iis_error_url.log-expected.json index cc721314175..88509d87dc5 100644 --- a/filebeat/module/iis/error/test/iis_error_url.log-expected.json +++ b/filebeat/module/iis/error/test/iis_error_url.log-expected.json @@ -39,7 +39,6 @@ "source.geo.region_name": "England", "source.ip": "81.2.69.145", "source.port": 12345, - "url.extension": "1", "url.original": "12.2.1", "url.path": "12.2.1" }, @@ -83,7 +82,6 @@ "source.geo.region_name": "England", "source.ip": "81.2.69.145", "source.port": 12345, - "url.extension": "/", "url.original": "./././././../../../../../../../../", "url.path": "./././././../../../../../../../../" }, @@ -343,4 +341,4 @@ "url.original": "/fee&fie=foe", "url.path": "/fee&fie=foe" } -] \ No newline at end of file +] diff --git a/filebeat/module/kibana/_meta/fields.yml b/filebeat/module/kibana/_meta/fields.yml index d4e664ade58..aed9252122c 100644 --- a/filebeat/module/kibana/_meta/fields.yml +++ b/filebeat/module/kibana/_meta/fields.yml @@ -27,6 +27,10 @@ description: "The id of the saved object associated with this event." example: "6295bdd0-0a0e-11e7-825f-6748cda7d858" type: keyword + - name: saved_object.name + description: "The name of the saved object associated with this event." + example: "my-saved-object" + type: keyword - name: add_to_spaces description: "The set of space ids that a saved object was shared to." example: "['default', 'marketing']" diff --git a/filebeat/module/kibana/audit/test/test-audit-814.log b/filebeat/module/kibana/audit/test/test-audit-814.log new file mode 100644 index 00000000000..97127ddcbf0 --- /dev/null +++ b/filebeat/module/kibana/audit/test/test-audit-814.log @@ -0,0 +1,5 @@ +{"event":{"action":"saved_object_create","category":["database"],"outcome":"unknown","type":["access"]},"kibana":{"saved_object":{"id":"fleet-default-settings","type":"ingest_manager_settings"}},"labels":{"application":"elastic/fleet"},"service":{"node":{"roles":["background_tasks","ui"]}},"ecs":{"version":"8.6.1"},"@timestamp":"2023-06-19T15:18:47.298+00:00","message":"User is accessing ingest_manager_settings [id=fleet-default-settings]","log":{"level":"INFO","logger":"plugins.security.audit.ecs"},"process":{"pid":7},"trace":{"id":"809d3449277aba205a3ac539d23dbf7e"},"transaction":{"id":"49a38064b0f1dc1e"}} +{"event":{"action":"saved_object_create","category":["database"],"outcome":"unknown","type":["access"]},"kibana":{"saved_object":{"id":"a09a5397-7b9a-5a73-a622-e29f4c635658","type":"ingest-outputs"}},"labels":{"application":"elastic/fleet"},"service":{"node":{"roles":["background_tasks","ui"]}},"ecs":{"version":"8.6.1"},"@timestamp":"2023-06-19T15:18:48.987+00:00","message":"User is accessing ingest-outputs [id=a09a5397-7b9a-5a73-a622-e29f4c635658]","log":{"level":"INFO","logger":"plugins.security.audit.ecs"},"process":{"pid":7},"trace":{"id":"809d3449277aba205a3ac539d23dbf7e"},"transaction":{"id":"49a38064b0f1dc1e"}} +{"event":{"action":"saved_object_create","category":["database"],"outcome":"unknown","type":["access"]},"kibana":{"saved_object":{"id":"synthetics","type":"epm-packages"}},"labels":{"application":"elastic/fleet"},"service":{"node":{"roles":["background_tasks","ui"]}},"ecs":{"version":"8.6.1"},"@timestamp":"2023-06-19T15:18:53.426+00:00","message":"User is accessing epm-packages [id=synthetics]","log":{"level":"INFO","logger":"plugins.security.audit.ecs"},"process":{"pid":7},"trace":{"id":"809d3449277aba205a3ac539d23dbf7e"},"transaction":{"id":"49a38064b0f1dc1e"}} +{"event":{"action":"http_request","category":["web"],"outcome":"unknown"},"http":{"request":{"method":"get"}},"url":{"domain":"kibana","path":"/api/features","port":5601,"scheme":"http"},"user":{"name":"elastic","roles":["superuser"]},"kibana":{"space_id":"default"},"trace":{"id":"e2792f3f-4cf1-4f6d-b4eb-5b491724c295"},"client":{"ip":"172.22.0.2"},"service":{"node":{"roles":["background_tasks","ui"]}},"ecs":{"version":"8.6.1"},"@timestamp":"2023-06-19T15:19:18.882+00:00","message":"User is requesting [/api/features] endpoint","log":{"level":"INFO","logger":"plugins.security.audit.ecs"},"process":{"pid":7},"transaction":{"id":"cf44f52888b9ec5a"}} +{"event":{"action":"saved_object_create","category":["database"],"outcome":"unknown","type":["access"]},"kibana":{"saved_object":{"id":"abcde-fghijk","type":"ingest_manager_settings","name":"fleet-object-name"}},"labels":{"application":"elastic/fleet"},"service":{"node":{"roles":["background_tasks","ui"]}},"ecs":{"version":"8.6.1"},"@timestamp":"2023-06-19T16:18:47.298+00:00","message":"User is accessing ingest_manager_settings [id=fleet-default-settings]","log":{"level":"INFO","logger":"plugins.security.audit.ecs"},"process":{"pid":7},"trace":{"id":"809d3449277aba205a3ac539d23dbf7e"},"transaction":{"id":"49a38064b0f1dc1e"}} diff --git a/filebeat/module/kibana/audit/test/test-audit-814.log-expected.json b/filebeat/module/kibana/audit/test/test-audit-814.log-expected.json new file mode 100644 index 00000000000..9ab233fea60 --- /dev/null +++ b/filebeat/module/kibana/audit/test/test-audit-814.log-expected.json @@ -0,0 +1,171 @@ +[ + { + "@timestamp": "2023-06-19T15:18:47.298+00:00", + "event.action": "saved_object_create", + "event.category": [ + "database" + ], + "event.dataset": "kibana.audit", + "event.kind": "event", + "event.module": "kibana", + "event.outcome": "unknown", + "event.timezone": "-02:00", + "event.type": [ + "access" + ], + "fileset.name": "audit", + "input.type": "log", + "kibana.saved_object.id": "fleet-default-settings", + "kibana.saved_object.type": "ingest_manager_settings", + "labels.application": "elastic/fleet", + "log.level": "INFO", + "log.logger": "plugins.security.audit.ecs", + "log.offset": 0, + "message": "User is accessing ingest_manager_settings [id=fleet-default-settings]", + "process.pid": 7, + "service.node.roles": [ + "background_tasks", + "ui" + ], + "service.type": "kibana", + "trace.id": "809d3449277aba205a3ac539d23dbf7e", + "transaction.id": "49a38064b0f1dc1e" + }, + { + "@timestamp": "2023-06-19T15:18:48.987+00:00", + "event.action": "saved_object_create", + "event.category": [ + "database" + ], + "event.dataset": "kibana.audit", + "event.kind": "event", + "event.module": "kibana", + "event.outcome": "unknown", + "event.timezone": "-02:00", + "event.type": [ + "access" + ], + "fileset.name": "audit", + "input.type": "log", + "kibana.saved_object.id": "a09a5397-7b9a-5a73-a622-e29f4c635658", + "kibana.saved_object.type": "ingest-outputs", + "labels.application": "elastic/fleet", + "log.level": "INFO", + "log.logger": "plugins.security.audit.ecs", + "log.offset": 616, + "message": "User is accessing ingest-outputs [id=a09a5397-7b9a-5a73-a622-e29f4c635658]", + "process.pid": 7, + "service.node.roles": [ + "background_tasks", + "ui" + ], + "service.type": "kibana", + "trace.id": "809d3449277aba205a3ac539d23dbf7e", + "transaction.id": "49a38064b0f1dc1e" + }, + { + "@timestamp": "2023-06-19T15:18:53.426+00:00", + "event.action": "saved_object_create", + "event.category": [ + "database" + ], + "event.dataset": "kibana.audit", + "event.kind": "event", + "event.module": "kibana", + "event.outcome": "unknown", + "event.timezone": "-02:00", + "event.type": [ + "access" + ], + "fileset.name": "audit", + "input.type": "log", + "kibana.saved_object.id": "synthetics", + "kibana.saved_object.type": "epm-packages", + "labels.application": "elastic/fleet", + "log.level": "INFO", + "log.logger": "plugins.security.audit.ecs", + "log.offset": 1242, + "message": "User is accessing epm-packages [id=synthetics]", + "process.pid": 7, + "service.node.roles": [ + "background_tasks", + "ui" + ], + "service.type": "kibana", + "trace.id": "809d3449277aba205a3ac539d23dbf7e", + "transaction.id": "49a38064b0f1dc1e" + }, + { + "@timestamp": "2023-06-19T15:19:18.882+00:00", + "client.ip": "172.22.0.2", + "event.action": "http_request", + "event.category": [ + "web" + ], + "event.dataset": "kibana.audit", + "event.kind": "event", + "event.module": "kibana", + "event.outcome": "unknown", + "event.timezone": "-02:00", + "fileset.name": "audit", + "http.request.method": "get", + "input.type": "log", + "kibana.space_id": "default", + "log.level": "INFO", + "log.logger": "plugins.security.audit.ecs", + "log.offset": 1812, + "message": "User is requesting [/api/features] endpoint", + "process.pid": 7, + "related.user": [ + "elastic" + ], + "service.node.roles": [ + "background_tasks", + "ui" + ], + "service.type": "kibana", + "trace.id": "e2792f3f-4cf1-4f6d-b4eb-5b491724c295", + "transaction.id": "cf44f52888b9ec5a", + "url.domain": "kibana", + "url.path": "/api/features", + "url.port": 5601, + "url.scheme": "http", + "user.name": "elastic", + "user.roles": [ + "superuser" + ] + }, + { + "@timestamp": "2023-06-19T16:18:47.298+00:00", + "event.action": "saved_object_create", + "event.category": [ + "database" + ], + "event.dataset": "kibana.audit", + "event.kind": "event", + "event.module": "kibana", + "event.outcome": "unknown", + "event.timezone": "-02:00", + "event.type": [ + "access" + ], + "fileset.name": "audit", + "input.type": "log", + "kibana.saved_object.id": "abcde-fghijk", + "kibana.saved_object.type": "ingest_manager_settings", + "kibana.saved_object.name": "fleet-object-name", + "labels.application": "elastic/fleet", + "log.level": "INFO", + "log.logger": "plugins.security.audit.ecs", + "log.offset": 2466, + "message": "User is accessing ingest_manager_settings [id=fleet-default-settings]", + "process.pid": 7, + "service.node.roles": [ + "background_tasks", + "ui" + ], + "service.type": "kibana", + "trace.id": "809d3449277aba205a3ac539d23dbf7e", + "transaction.id": "49a38064b0f1dc1e" + } +] diff --git a/filebeat/module/kibana/fields.go b/filebeat/module/kibana/fields.go index 504d1f6283e..fce968bbf78 100644 --- a/filebeat/module/kibana/fields.go +++ b/filebeat/module/kibana/fields.go @@ -32,5 +32,5 @@ func init() { // AssetKibana returns asset data. // This is the base64 encoded zlib format compressed contents of module/kibana. func AssetKibana() string { - return "eJy8ls9ymzAQxu9+ih1ffCmM7fhffMilzaHT6a23TodZowVUC4lIi928fUfguGBj4rhpOe7ut/v7hJAIYEvPa9jKDWocALBkRWsY1oHhAMCSInS0htTnBbnYyoKl0Wt4GADAQQtfjSgVDQASSUq4dZULQGNOa3BkdzKmUBtBoTWKXJUG4OeC1p5hb6xoSY5If8pSa8riEOkA8U9NAYmxUKB1UqfwpeZTJnXhoa6J2MZ0ThodSXFMnUz6lhF8/gQmAc4ISkf2RQTonIklMgnYS86AM+mAdqQ5hEeMM08gNSAz5QWDJVcqduBDUGr5VNKxlRRhA4B+YV5Ub2UyvaPZfLEMaHW/CSZTcRfgbL4IZtPFYjKbLGfj8XjYUJ6vbsttgTH1eB16s1K8mK3K+1wOO5kFJVgqfgMW7khEZvOTYg59aS+fLzgSeiXUyhtA0WUbg1bciHr9Qv4V5mJ6P98IMQ7GOKZgMqFlsJrOk2CxnK1igUuxmq+ud4BCRGyi6t26Xn5H7A3Uu0AKB5whA7bd7NGBy9CSADYXDHwfHbbE6AOMcrRbYqnT0Y/roQUpYooSa/J3JbeUGx/yjf8ZPJackWYZo0eMCmt2UpDtddDWwIvmbPPg4Yzp2z8bdDKe3Mz76gd5ibX6UG8GvpnXEqq8F/hRoWMZO0IbZ6f4lbzqDPtMxhkkpUqkUiSuYtfIckfXwytjtmXxZuhadoD9P5zpWW3zdj6DfmgkoHElg5KaXPO2O72bm2MZU9dKXOJ8laBNkfr/BN887BzrGJneb+7H0lrSXLf1J1MN0j07J8bO0fW5dZKqg1E33MX+oaWnMCMUZLuXN1H+r0WT6GvhrmzxOwAA//+jaglP" + return "eJy8ls1y2kAMx+88hYYLl9oDhK9wyKXNodPprbdOxyO8sr1lvevsytC8fWdtQm0wDqFpOUr6Sz8tQiiALT2vYSs3qHEAwJIVrWFYG4YDAEuK0NEaUu8X5GIrC5ZGr+FhAAAHLXw1olQ0AEgkKeHWlS8AjTmtwZHdyZhCbQSF1ihylRuAnwtae4a9saIlOSL9CUutKYuDpQPEf2oKSIyFAq2TOoUvNZ8yqQsPcU3ENqZz0uhIiqPrpNK3jODzJzAJcEZQOrIvIkDnTCyRScBecgacSQe0I80hPGKceQKpAZkpLxgsuVKxA2+CUsunko6ppAgbAPQL86L6VibTO5rNF8uAVvebYDIVdwHO5otgNl0sJrPJcjYej4cN5fnrtrotMKaeXoe+WSlemq3C+7ocdjILSrBU/AYs3JGIzOYnxRz60F4+H3Ak9EqolTeAoss2Bq24EfX6h/wrzMX0fr4RYhyMcUzBZELLYDWdJ8FiOVvFApdiNV/d2IE39fbgA96ni/w5qBIEdYLrgVGIiE1UDaPrhXXEnrUeWykccIYM2AbfowOXoSUBbC6wfh8dZnj0AUY52i2x1Onox/XQghQxRYk1+buSW8qNN/nE/wweS85Is4zRI0aFNTspyPZ20NbAi+ZsTvCwFPtGZYNOxpObeV/dIJdYq81yM/DNvJZQ5b3Ajwody9gR2jg7xa/k9Q91n8k4g6RUiVSKxFXsGlnu6Hp4Zcy2LN4MXcsOsP+HMz2LbZ4TZ9APDQc0bghQUpNr/j2fHhPNsoypazkucb5K0KZI/WHjk4edZR0j0/vV/VhaS5rrtH4z1SDdtXNi7Cxd760TV22MuuEu5g8tPYUZoSDb/byJ8meWJtGXwl2Z4ncAAAD//zz3P1Q=" } diff --git a/go.mod b/go.mod index fc7c97929b2..538eb2fbdce 100644 --- a/go.mod +++ b/go.mod @@ -69,7 +69,7 @@ require ( github.com/dustin/go-humanize v1.0.1 github.com/eapache/go-resiliency v1.2.0 github.com/eclipse/paho.mqtt.golang v1.3.5 - github.com/elastic/elastic-agent-client/v7 v7.8.0 + github.com/elastic/elastic-agent-client/v7 v7.8.1 github.com/elastic/go-concert v0.2.0 github.com/elastic/go-libaudit/v2 v2.5.0 github.com/elastic/go-licenser v0.4.1 @@ -79,8 +79,8 @@ require ( github.com/elastic/go-seccomp-bpf v1.4.0 github.com/elastic/go-structform v0.0.10 github.com/elastic/go-sysinfo v1.13.1 - github.com/elastic/go-ucfg v0.8.6 - github.com/elastic/gosigar v0.14.2 + github.com/elastic/go-ucfg v0.8.7 + github.com/elastic/gosigar v0.14.3 github.com/fatih/color v1.15.0 github.com/fearful-symmetry/gorapl v0.0.4 github.com/fsnotify/fsevents v0.1.1 @@ -139,7 +139,7 @@ require ( github.com/shopspring/decimal v1.3.1 // indirect github.com/spf13/cobra v1.7.0 github.com/spf13/pflag v1.0.5 - github.com/stretchr/testify v1.8.4 + github.com/stretchr/testify v1.9.0 github.com/tsg/go-daemon v0.0.0-20200207173439-e704b93fd89b github.com/ugorji/go/codec v1.1.8 github.com/urso/sderr v0.0.0-20210525210834-52b04e8f5c71 @@ -151,20 +151,20 @@ require ( go.uber.org/atomic v1.11.0 go.uber.org/multierr v1.11.0 go.uber.org/zap v1.26.0 - golang.org/x/crypto v0.17.0 + golang.org/x/crypto v0.21.0 golang.org/x/lint v0.0.0-20210508222113-6edffad5e616 golang.org/x/mod v0.14.0 - golang.org/x/net v0.19.0 + golang.org/x/net v0.21.0 golang.org/x/oauth2 v0.10.0 golang.org/x/sync v0.5.0 - golang.org/x/sys v0.15.0 + golang.org/x/sys v0.18.0 golang.org/x/text v0.14.0 golang.org/x/time v0.3.0 golang.org/x/tools v0.16.0 google.golang.org/api v0.128.0 google.golang.org/genproto v0.0.0-20230920204549-e6e6cdab5c13 // indirect google.golang.org/grpc v1.58.3 - google.golang.org/protobuf v1.32.0 + google.golang.org/protobuf v1.33.0 gopkg.in/inf.v0 v0.9.1 gopkg.in/jcmturner/aescts.v1 v1.0.1 // indirect gopkg.in/jcmturner/dnsutils.v1 v1.0.1 // indirect @@ -207,7 +207,7 @@ require ( github.com/elastic/elastic-agent-shipper-client v0.5.1-0.20230228231646-f04347b666f3 github.com/elastic/elastic-agent-system-metrics v0.9.2 github.com/elastic/go-elasticsearch/v8 v8.12.0 - github.com/elastic/mito v1.9.0 + github.com/elastic/mito v1.10.0 github.com/elastic/tk-btf v0.1.0 github.com/elastic/toutoumomoma v0.0.0-20221026030040-594ef30cb640 github.com/foxcpp/go-mockdns v0.0.0-20201212160233-ede2f9158d15 @@ -218,7 +218,7 @@ require ( github.com/gorilla/mux v1.8.0 github.com/gorilla/websocket v1.4.2 github.com/icholy/digest v0.1.22 - github.com/lestrrat-go/jwx/v2 v2.0.19 + github.com/lestrrat-go/jwx/v2 v2.0.21 github.com/otiai10/copy v1.12.0 github.com/pierrec/lz4/v4 v4.1.18 github.com/pkg/xattr v0.4.9 @@ -330,7 +330,7 @@ require ( github.com/kylelemons/godebug v1.1.0 // indirect github.com/lestrrat-go/blackmagic v1.0.2 // indirect github.com/lestrrat-go/httpcc v1.0.1 // indirect - github.com/lestrrat-go/httprc v1.0.4 // indirect + github.com/lestrrat-go/httprc v1.0.5 // indirect github.com/lestrrat-go/iter v1.0.2 // indirect github.com/lestrrat-go/option v1.0.1 // indirect github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect @@ -359,7 +359,7 @@ require ( github.com/shirou/gopsutil v3.21.11+incompatible // indirect github.com/sirupsen/logrus v1.9.0 // indirect github.com/stoewer/go-strcase v1.2.0 // indirect - github.com/stretchr/objx v0.5.0 // indirect + github.com/stretchr/objx v0.5.2 // indirect github.com/tklauser/go-sysconf v0.3.10 // indirect github.com/tklauser/numcpus v0.4.0 // indirect github.com/urso/diag v0.0.0-20200210123136-21b3cc8eb797 // indirect @@ -376,7 +376,7 @@ require ( go.opentelemetry.io/otel/metric v1.21.0 // indirect go.opentelemetry.io/otel/trace v1.21.0 // indirect golang.org/x/exp v0.0.0-20231127185646-65229373498e // indirect - golang.org/x/term v0.15.0 // indirect + golang.org/x/term v0.18.0 // indirect golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 // indirect google.golang.org/appengine v1.6.7 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20231002182017-d307bd883b97 // indirect diff --git a/go.sum b/go.sum index f95a7973ed8..7623c476c43 100644 --- a/go.sum +++ b/go.sum @@ -675,8 +675,8 @@ github.com/elastic/ebpfevents v0.4.0 h1:M80eAeJnzvGQgU9cjJqkjFca9pjM3aq/TuZxJeom github.com/elastic/ebpfevents v0.4.0/go.mod h1:o21z5xup/9dK8u0Hg9bZRflSqqj1Zu5h2dg2hSTcUPQ= github.com/elastic/elastic-agent-autodiscover v0.6.7 h1:+KVjltN0rPsBrU8b156gV4lOTBgG/vt0efFCFARrf3g= github.com/elastic/elastic-agent-autodiscover v0.6.7/go.mod h1:hFeFqneS2r4jD0/QzGkrNk0YVdN0JGh7lCWdsH7zcI4= -github.com/elastic/elastic-agent-client/v7 v7.8.0 h1:GHFzDJIWpdgI0qDk5EcqbQJGvwTsl2E2vQK3/xe+MYQ= -github.com/elastic/elastic-agent-client/v7 v7.8.0/go.mod h1:ihtjqJzYiIltlRhNruaSSc0ogxIhqPD5hOMKq16cI1s= +github.com/elastic/elastic-agent-client/v7 v7.8.1 h1:J9wZc/0mUvSEok0X5iR5+n60Jgb+AWooKddb3XgPWqM= +github.com/elastic/elastic-agent-client/v7 v7.8.1/go.mod h1:axl1nkdqc84YRFkeJGD9jExKNPUrOrzf3DFo2m653nY= github.com/elastic/elastic-agent-libs v0.7.5 h1:4UMqB3BREvhwecYTs/L23oQp1hs/XUkcunPlmTZn5yg= github.com/elastic/elastic-agent-libs v0.7.5/go.mod h1:pGMj5myawdqu+xE+WKvM5FQzKQ/MonikkWOzoFTJxaU= github.com/elastic/elastic-agent-shipper-client v0.5.1-0.20230228231646-f04347b666f3 h1:sb+25XJn/JcC9/VL8HX4r4QXSUq4uTNzGS2kxOE7u1U= @@ -710,16 +710,16 @@ github.com/elastic/go-structform v0.0.10 h1:oy08o/Ih2hHTkNcRY/1HhaYvIp5z6t8si8gn github.com/elastic/go-structform v0.0.10/go.mod h1:CZWf9aIRYY5SuKSmOhtXScE5uQiLZNqAFnwKR4OrIM4= github.com/elastic/go-sysinfo v1.13.1 h1:U5Jlx6c/rLkR72O8wXXXo1abnGlWGJU/wbzNJ2AfQa4= github.com/elastic/go-sysinfo v1.13.1/go.mod h1:GKqR8bbMK/1ITnez9NIsIfXQr25aLhRJa7AfT8HpBFQ= -github.com/elastic/go-ucfg v0.8.6 h1:stUeyh2goTgGX+/wb9gzKvTv0YB0231LTpKUgCKj4U0= -github.com/elastic/go-ucfg v0.8.6/go.mod h1:4E8mPOLSUV9hQ7sgLEJ4bvt0KhMuDJa8joDT2QGAEKA= +github.com/elastic/go-ucfg v0.8.7 h1:/bKaN553LY3MsfEIz2XOEEs+tRw03TzJCARrnVPpOyc= +github.com/elastic/go-ucfg v0.8.7/go.mod h1:4E8mPOLSUV9hQ7sgLEJ4bvt0KhMuDJa8joDT2QGAEKA= github.com/elastic/go-windows v1.0.1 h1:AlYZOldA+UJ0/2nBuqWdo90GFCgG9xuyw9SYzGUtJm0= github.com/elastic/go-windows v1.0.1/go.mod h1:FoVvqWSun28vaDQPbj2Elfc0JahhPB7WQEGa3c814Ss= github.com/elastic/gopacket v1.1.20-0.20211202005954-d412fca7f83a h1:8WfL/X6fK11iyX5t3Dd9dDMMNqPfEZNc//JsWGIhEgQ= github.com/elastic/gopacket v1.1.20-0.20211202005954-d412fca7f83a/go.mod h1:riddUzxTSBpJXk3qBHtYr4qOhFhT6k/1c0E3qkQjQpA= -github.com/elastic/gosigar v0.14.2 h1:Dg80n8cr90OZ7x+bAax/QjoW/XqTI11RmA79ZwIm9/4= -github.com/elastic/gosigar v0.14.2/go.mod h1:iXRIGg2tLnu7LBdpqzyQfGDEidKCfWcCMS0WKyPWoMs= -github.com/elastic/mito v1.9.0 h1:gYB+0o5bhr5/XIlKuZOdeSFvWGTKpk0v73vsFRm98u8= -github.com/elastic/mito v1.9.0/go.mod h1:n7AvUVtYQQXb8fq87FI8z67TNzuhwBV3kHBkDT1qJYQ= +github.com/elastic/gosigar v0.14.3 h1:xwkKwPia+hSfg9GqrCUKYdId102m9qTJIIr7egmK/uo= +github.com/elastic/gosigar v0.14.3/go.mod h1:iXRIGg2tLnu7LBdpqzyQfGDEidKCfWcCMS0WKyPWoMs= +github.com/elastic/mito v1.10.0 h1:LhkzBXarU32zAf24k3HOWki0DIoxsNWbVYY6vyHk2RQ= +github.com/elastic/mito v1.10.0/go.mod h1:n7AvUVtYQQXb8fq87FI8z67TNzuhwBV3kHBkDT1qJYQ= github.com/elastic/ristretto v0.1.1-0.20220602190459-83b0895ca5b3 h1:ChPwRVv1RR4a0cxoGjKcyWjTEpxYfm5gydMIzo32cAw= github.com/elastic/ristretto v0.1.1-0.20220602190459-83b0895ca5b3/go.mod h1:RAy2GVV4sTWVlNMavv3xhLsk18rxhfhDnombTe6EF5c= github.com/elastic/sarama v1.19.1-0.20220310193331-ebc2b0d8eef3 h1:FzA0/n4iMt8ojGDGRoiFPSHFvvdVIvxOxyLtiFnrLBM= @@ -1379,12 +1379,12 @@ github.com/lestrrat-go/blackmagic v1.0.2 h1:Cg2gVSc9h7sz9NOByczrbUvLopQmXrfFx//N github.com/lestrrat-go/blackmagic v1.0.2/go.mod h1:UrEqBzIR2U6CnzVyUtfM6oZNMt/7O7Vohk2J0OGSAtU= github.com/lestrrat-go/httpcc v1.0.1 h1:ydWCStUeJLkpYyjLDHihupbn2tYmZ7m22BGkcvZZrIE= github.com/lestrrat-go/httpcc v1.0.1/go.mod h1:qiltp3Mt56+55GPVCbTdM9MlqhvzyuL6W/NMDA8vA5E= -github.com/lestrrat-go/httprc v1.0.4 h1:bAZymwoZQb+Oq8MEbyipag7iSq6YIga8Wj6GOiJGdI8= -github.com/lestrrat-go/httprc v1.0.4/go.mod h1:mwwz3JMTPBjHUkkDv/IGJ39aALInZLrhBp0X7KGUZlo= +github.com/lestrrat-go/httprc v1.0.5 h1:bsTfiH8xaKOJPrg1R+E3iE/AWZr/x0Phj9PBTG/OLUk= +github.com/lestrrat-go/httprc v1.0.5/go.mod h1:mwwz3JMTPBjHUkkDv/IGJ39aALInZLrhBp0X7KGUZlo= github.com/lestrrat-go/iter v1.0.2 h1:gMXo1q4c2pHmC3dn8LzRhJfP1ceCbgSiT9lUydIzltI= github.com/lestrrat-go/iter v1.0.2/go.mod h1:Momfcq3AnRlRjI5b5O8/G5/BvpzrhoFTZcn06fEOPt4= -github.com/lestrrat-go/jwx/v2 v2.0.19 h1:ekv1qEZE6BVct89QA+pRF6+4pCpfVrOnEJnTnT4RXoY= -github.com/lestrrat-go/jwx/v2 v2.0.19/go.mod h1:l3im3coce1lL2cDeAjqmaR+Awx+X8Ih+2k8BuHNJ4CU= +github.com/lestrrat-go/jwx/v2 v2.0.21 h1:jAPKupy4uHgrHFEdjVjNkUgoBKtVDgrQPB/h55FHrR0= +github.com/lestrrat-go/jwx/v2 v2.0.21/go.mod h1:09mLW8zto6bWL9GbwnqAli+ArLf+5M33QLQPDggkUWM= github.com/lestrrat-go/option v1.0.1 h1:oAzP2fvZGQKWkvHa1/SAcFolBEca1oN+mQ7eooNBEYU= github.com/lestrrat-go/option v1.0.1/go.mod h1:5ZHFbivi4xwXxhxY9XHDe2FHo6/Z7WWmtT7T5nBBp3I= github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= @@ -1833,8 +1833,9 @@ github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+ github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= github.com/stretchr/objx v0.3.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= -github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v0.0.0-20180303142811-b89eecf5ca5d/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.2.0/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= @@ -1846,8 +1847,8 @@ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= -github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/syndtr/gocapability v0.0.0-20170704070218-db04d3cc01c8/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww= github.com/syndtr/gocapability v0.0.0-20180916011248-d98352740cb2/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww= github.com/syndtr/gocapability v0.0.0-20200815063812-42c35b437635/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww= @@ -2067,8 +2068,8 @@ golang.org/x/crypto v0.0.0-20220511200225-c6db032c6c88/go.mod h1:IxCIyHEi3zRg3s0 golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU= golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= -golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k= -golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= +golang.org/x/crypto v0.21.0 h1:X31++rzVUdKhX5sWmSOFZxx8UW/ldWx55cbf08iNAMA= +golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs= golang.org/x/exp v0.0.0-20180321215751-8460e604b9de/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20180807140117-3d87b88a115f/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= @@ -2210,8 +2211,8 @@ golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= -golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c= -golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U= +golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4= +golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190130055435-99b60b757ec1/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -2379,8 +2380,8 @@ golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.15.0 h1:h48lPFYpsTvQJZF4EKyI4aLHaev3CxivZmv7yZig9pc= -golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4= +golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210220032956-6a3ed077a48d/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= @@ -2390,8 +2391,8 @@ golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= -golang.org/x/term v0.15.0 h1:y/Oo/a/q3IXu26lQgl04j/gjuBDOBlx7X6Om1j2CPW4= -golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0= +golang.org/x/term v0.18.0 h1:FcHjZXDMxI8mM3nwhX9HlKop4C0YQvCVCdwYl2wOtE8= +golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58= golang.org/x/text v0.0.0-20160726164857-2910a502d2bf/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -2682,8 +2683,8 @@ google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp0 google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I= -google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= +google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= gopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/heartbeat/security/security.go b/heartbeat/security/security.go index 8e15102f7b8..597e3a5bda9 100644 --- a/heartbeat/security/security.go +++ b/heartbeat/security/security.go @@ -26,6 +26,7 @@ import ( "strconv" "syscall" + "golang.org/x/sys/unix" "kernel.org/pub/linux/libs/security/libcap/cap" ) @@ -46,6 +47,9 @@ func init() { // The beat should use `getcap` at a later point to examine available capabilities // rather than relying on errors from `setcap` _ = setCapabilities() + + // Make heartbeat dumpable so elastic-agent can access process metrics. + _ = setDumpable() } func setNodeProcAttr(localUserName string) error { @@ -99,3 +103,13 @@ func setCapabilities() error { return nil } + +// Enforce PR_SET_DUMPABLE=true to allow user-level access to /proc//io. +func setDumpable() error { + _, err := cap.Prctl(unix.PR_SET_DUMPABLE, 1) + if err != nil { + return fmt.Errorf("error setting dumpable flag via prctl: %w", err) + } + + return nil +} diff --git a/libbeat/docs/command-reference.asciidoc b/libbeat/docs/command-reference.asciidoc index 0c65e80dc6b..91daaf097be 100644 --- a/libbeat/docs/command-reference.asciidoc +++ b/libbeat/docs/command-reference.asciidoc @@ -336,8 +336,8 @@ If {kib} is not running on `localhost:5061`, you must also adjust the endif::no_dashboards[] [[template-subcommand]]*`template`*:: -Exports the index template to stdout. You can specify the `--es.version` and -`--index` flags to further define what gets exported. Furthermore you can export +Exports the index template to stdout. You can specify the `--es.version` +flag to further define what gets exported. Furthermore you can export the template to a file instead of `stdout` by defining a directory via `--dir`. [[ilm-policy-subcommand]] @@ -388,10 +388,6 @@ endif::export_pipeline[] *`-h, --help`*:: Shows help for the `export` command. -*`--index BASE_NAME`*:: -When used with <>, sets the base name to use for -the index template. If this flag is not specified, the default base name is -+{beatname_lc}+. *`--dir DIRNAME`*:: @@ -412,7 +408,7 @@ ifndef::no_dashboards[] ["source","sh",subs="attributes"] ----- {beatname_lc} export config -{beatname_lc} export template --es.version {version} --index myindexname +{beatname_lc} export template --es.version {version} {beatname_lc} export dashboard --id="a7b35890-8baa-11e8-9676-ef67484126fb" > dashboard.json ----- endif::no_dashboards[] @@ -421,7 +417,7 @@ ifdef::no_dashboards[] ["source","sh",subs="attributes"] ----- {beatname_lc} export config -{beatname_lc} export template --es.version {version} --index myindexname +{beatname_lc} export template --es.version {version} ----- endif::no_dashboards[] endif::serverless[] @@ -430,7 +426,7 @@ ifdef::serverless[] ["source","sh",subs="attributes"] ----- {beatname_lc} export config -{beatname_lc} export template --es.version {version} --index myindexname +{beatname_lc} export template --es.version {version} {beatname_lc} export function cloudwatch ----- endif::serverless[] diff --git a/libbeat/outputs/fileout/config.go b/libbeat/outputs/fileout/config.go index e72a9f87d6f..69af40e4289 100644 --- a/libbeat/outputs/fileout/config.go +++ b/libbeat/outputs/fileout/config.go @@ -26,14 +26,14 @@ import ( ) type fileOutConfig struct { - Path string `config:"path"` - Filename string `config:"filename"` - RotateEveryKb uint `config:"rotate_every_kb" validate:"min=1"` - NumberOfFiles uint `config:"number_of_files"` - Codec codec.Config `config:"codec"` - Permissions uint32 `config:"permissions"` - RotateOnStartup bool `config:"rotate_on_startup"` - Queue config.Namespace `config:"queue"` + Path *PathFormatString `config:"path"` + Filename string `config:"filename"` + RotateEveryKb uint `config:"rotate_every_kb" validate:"min=1"` + NumberOfFiles uint `config:"number_of_files"` + Codec codec.Config `config:"codec"` + Permissions uint32 `config:"permissions"` + RotateOnStartup bool `config:"rotate_on_startup"` + Queue config.Namespace `config:"queue"` } func defaultConfig() fileOutConfig { @@ -45,6 +45,18 @@ func defaultConfig() fileOutConfig { } } +func readConfig(cfg *config.C) (*fileOutConfig, error) { + foConfig := defaultConfig() + if err := cfg.Unpack(&foConfig); err != nil { + return nil, err + } + + // disable bulk support in publisher pipeline + _ = cfg.SetInt("bulk_max_size", -1, -1) + + return &foConfig, nil +} + func (c *fileOutConfig) Validate() error { if c.NumberOfFiles < 2 || c.NumberOfFiles > file.MaxBackupsLimit { return fmt.Errorf("the number_of_files to keep should be between 2 and %v", diff --git a/libbeat/outputs/fileout/config_test.go b/libbeat/outputs/fileout/config_test.go new file mode 100644 index 00000000000..7e149173f6d --- /dev/null +++ b/libbeat/outputs/fileout/config_test.go @@ -0,0 +1,100 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package fileout + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" + + "github.com/elastic/elastic-agent-libs/config" + "github.com/elastic/elastic-agent-libs/mapstr" +) + +func TestConfig(t *testing.T) { + for name, test := range map[string]struct { + config *config.C + useWindowsPath bool + assertion func(t *testing.T, config *fileOutConfig, err error) + }{ + "default config": { + config: config.MustNewConfigFrom([]byte(`{ }`)), + assertion: func(t *testing.T, actual *fileOutConfig, err error) { + expectedConfig := &fileOutConfig{ + NumberOfFiles: 7, + RotateEveryKb: 10 * 1024, + Permissions: 0600, + RotateOnStartup: true, + } + + assert.Equal(t, expectedConfig, actual) + assert.Nil(t, err) + }, + }, + "config given with posix path": { + config: config.MustNewConfigFrom(mapstr.M{ + "number_of_files": 10, + "rotate_every_kb": 5 * 1024, + "path": "/tmp/packetbeat/%{+yyyy-MM-dd-mm-ss-SSSSSS}", + "filename": "pb", + }), + assertion: func(t *testing.T, actual *fileOutConfig, err error) { + assert.Equal(t, uint(10), actual.NumberOfFiles) + assert.Equal(t, uint(5*1024), actual.RotateEveryKb) + assert.Equal(t, true, actual.RotateOnStartup) + assert.Equal(t, uint32(0600), actual.Permissions) + assert.Equal(t, "pb", actual.Filename) + + path, runErr := actual.Path.Run(time.Date(2024, 1, 2, 3, 4, 5, 67890, time.UTC)) + assert.Nil(t, runErr) + + assert.Equal(t, "/tmp/packetbeat/2024-01-02-04-05-000067", path) + assert.Nil(t, err) + }, + }, + "config given with windows path": { + useWindowsPath: true, + config: config.MustNewConfigFrom(mapstr.M{ + "number_of_files": 10, + "rotate_every_kb": 5 * 1024, + "path": "c:\\tmp\\packetbeat\\%{+yyyy-MM-dd-mm-ss-SSSSSS}", + "filename": "pb", + }), + assertion: func(t *testing.T, actual *fileOutConfig, err error) { + assert.Equal(t, uint(10), actual.NumberOfFiles) + assert.Equal(t, uint(5*1024), actual.RotateEveryKb) + assert.Equal(t, true, actual.RotateOnStartup) + assert.Equal(t, uint32(0600), actual.Permissions) + assert.Equal(t, "pb", actual.Filename) + + path, runErr := actual.Path.Run(time.Date(2024, 1, 2, 3, 4, 5, 67890, time.UTC)) + assert.Nil(t, runErr) + + assert.Equal(t, "c:\\tmp\\packetbeat\\2024-01-02-04-05-000067", path) + assert.Nil(t, err) + }, + }, + } { + t.Run(name, func(t *testing.T) { + isWindowsPath = test.useWindowsPath + cfg, err := readConfig(test.config) + test.assertion(t, cfg, err) + }) + } +} diff --git a/libbeat/outputs/fileout/docs/fileout.asciidoc b/libbeat/outputs/fileout/docs/fileout.asciidoc index 54dfdd0772a..bb2a953ec75 100644 --- a/libbeat/outputs/fileout/docs/fileout.asciidoc +++ b/libbeat/outputs/fileout/docs/fileout.asciidoc @@ -49,6 +49,14 @@ The default value is `true`. The path to the directory where the generated files will be saved. This option is mandatory. +The path may include the timestamp when the file output is initialized using the `+FORMAT` syntax where `FORMAT` is a +valid https://github.com/elastic/beats/blob/{doc-branch}/libbeat/common/dtfmt/doc.go[time format], +and enclosed with expansion braces: `%{+FORMAT}`. For example: + +``` +path: 'fileoutput-%{+yyyy.MM.dd}' +``` + ===== `filename` The name of the generated files. The default is set to the Beat name. For example, the files diff --git a/libbeat/outputs/fileout/file.go b/libbeat/outputs/fileout/file.go index 4ddc5955d6e..34c57f29791 100644 --- a/libbeat/outputs/fileout/file.go +++ b/libbeat/outputs/fileout/file.go @@ -52,20 +52,17 @@ func makeFileout( observer outputs.Observer, cfg *c.C, ) (outputs.Group, error) { - foConfig := defaultConfig() - if err := cfg.Unpack(&foConfig); err != nil { + foConfig, err := readConfig(cfg) + if err != nil { return outputs.Fail(err) } - // disable bulk support in publisher pipeline - _ = cfg.SetInt("bulk_max_size", -1, -1) - fo := &fileOutput{ log: logp.NewLogger("file"), beat: beat, observer: observer, } - if err := fo.init(beat, foConfig); err != nil { + if err = fo.init(beat, *foConfig); err != nil { return outputs.Fail(err) } @@ -74,10 +71,14 @@ func makeFileout( func (out *fileOutput) init(beat beat.Info, c fileOutConfig) error { var path string + configPath, runErr := c.Path.Run(time.Now().UTC()) + if runErr != nil { + return runErr + } if c.Filename != "" { - path = filepath.Join(c.Path, c.Filename) + path = filepath.Join(configPath, c.Filename) } else { - path = filepath.Join(c.Path, out.beat.Beat) + path = filepath.Join(configPath, out.beat.Beat) } out.filePath = path diff --git a/libbeat/outputs/fileout/pathformatstring.go b/libbeat/outputs/fileout/pathformatstring.go new file mode 100644 index 00000000000..acd2a7605fe --- /dev/null +++ b/libbeat/outputs/fileout/pathformatstring.go @@ -0,0 +1,66 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package fileout + +import ( + "os" + "strings" + "time" + + "github.com/elastic/beats/v7/libbeat/common/fmtstr" + + "github.com/elastic/beats/v7/libbeat/beat" +) + +var isWindowsPath = os.PathSeparator == '\\' + +// PathFormatString is a wrapper around EventFormatString for the +// handling paths with a format expression that has access to the timestamp format. +// It has special handling for paths, specifically for windows path separator +// which would be interpreted as an escape character. This formatter double escapes +// the path separator so it is properly interpreted by the fmtstr processor +type PathFormatString struct { + efs *fmtstr.EventFormatString +} + +// Run executes the format string returning a new expanded string or an error +// if execution or event field expansion fails. +func (fs *PathFormatString) Run(timestamp time.Time) (string, error) { + placeholderEvent := &beat.Event{ + Timestamp: timestamp, + } + return fs.efs.Run(placeholderEvent) +} + +// Unpack tries to initialize the PathFormatString from provided value +// (which must be a string). Unpack method satisfies go-ucfg.Unpacker interface +// required by config.C, in order to use PathFormatString with +// `common.(*Config).Unpack()`. +func (fs *PathFormatString) Unpack(v interface{}) error { + path, ok := v.(string) + if !ok { + return nil + } + + if isWindowsPath { + path = strings.ReplaceAll(path, "\\", "\\\\") + } + + fs.efs = &fmtstr.EventFormatString{} + return fs.efs.Unpack(path) +} diff --git a/libbeat/outputs/fileout/pathformatstring_test.go b/libbeat/outputs/fileout/pathformatstring_test.go new file mode 100644 index 00000000000..b8eee4e44ea --- /dev/null +++ b/libbeat/outputs/fileout/pathformatstring_test.go @@ -0,0 +1,87 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package fileout + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestPathFormatString(t *testing.T) { + tests := []struct { + title string + useWindowsPath bool + format string + timestamp time.Time + expected string + }{ + { + "empty string", + false, + "", + time.Time{}, + "", + }, + { + "no fields configured", + false, + "format string", + time.Time{}, + "format string", + }, + { + "test timestamp formatter", + false, + "timestamp: %{+YYYY.MM.dd}", + time.Date(2015, 5, 1, 20, 12, 34, 0, time.UTC), + "timestamp: 2015.05.01", + }, + { + "test timestamp formatter with posix path", + false, + "/tmp/%{+YYYY.MM.dd}", + time.Date(2015, 5, 1, 20, 12, 34, 0, time.UTC), + "/tmp/2015.05.01", + }, + { + "test timestamp formatter with windows path", + true, + "C:\\tmp\\%{+YYYY.MM.dd}", + time.Date(2015, 5, 1, 20, 12, 34, 0, time.UTC), + "C:\\tmp\\2015.05.01", + }, + } + + for i, test := range tests { + t.Logf("test(%v): %v", i, test.title) + isWindowsPath = test.useWindowsPath + pfs := &PathFormatString{} + err := pfs.Unpack(test.format) + if err != nil { + t.Error(err) + continue + } + + actual, err := pfs.Run(test.timestamp) + + assert.NoError(t, err) + assert.Equal(t, test.expected, actual) + } +} diff --git a/libbeat/outputs/kafka/config.go b/libbeat/outputs/kafka/config.go index 8fff8dad0d5..3e2c836a06f 100644 --- a/libbeat/outputs/kafka/config.go +++ b/libbeat/outputs/kafka/config.go @@ -22,6 +22,7 @@ import ( "fmt" "math" "math/rand" + "regexp" "strings" "time" @@ -31,6 +32,7 @@ import ( "github.com/elastic/beats/v7/libbeat/common/fmtstr" "github.com/elastic/beats/v7/libbeat/common/kafka" "github.com/elastic/beats/v7/libbeat/common/transport/kerberos" + "github.com/elastic/beats/v7/libbeat/management" "github.com/elastic/beats/v7/libbeat/outputs/codec" "github.com/elastic/elastic-agent-libs/config" "github.com/elastic/elastic-agent-libs/logp" @@ -77,6 +79,11 @@ type kafkaConfig struct { Sasl kafka.SaslConfig `config:"sasl"` EnableFAST bool `config:"enable_krb5_fast"` Queue config.Namespace `config:"queue"` + + // Currently only used for validation. Those values are later + // unpacked into temporary structs whenever they're necessary. + Topic string `config:"topic"` + Topics []any `config:"topics"` } type metaConfig struct { @@ -102,6 +109,11 @@ var compressionModes = map[string]sarama.CompressionCodec{ "snappy": sarama.CompressionSnappy, } +// validTopicRegExp is used to validate the topic contains only valid characters +// when running under Elastic-Agent. The regexp is taken from: +// https://github.com/apache/kafka/blob/a126e3a622f2b7142f3543b9dbee54b6412ba9d8/clients/src/main/java/org/apache/kafka/common/internals/Topic.java#L33 +var validTopicRegExp = regexp.MustCompile("^[a-zA-Z0-9._-]+$") + func defaultConfig() kafkaConfig { return kafkaConfig{ Hosts: nil, @@ -169,6 +181,24 @@ func (c *kafkaConfig) Validate() error { return fmt.Errorf("compression_level must be between 0 and 9") } } + + if c.Topic == "" && len(c.Topics) == 0 { + return errors.New("either 'topic' or 'topics' must be defined") + } + + // When running under Elastic-Agent we do not support dynamic topic + // selection, so `topics` is not supported and `topic` is treated as an + // plain string + if management.UnderAgent() { + if len(c.Topics) != 0 { + return errors.New("'topics' is not supported when running under Elastic-Agent") + } + + if !validTopicRegExp.MatchString(c.Topic) { + return fmt.Errorf("topic '%s' is invalid, it must match '[a-zA-Z0-9._-]'", c.Topic) + } + } + return nil } diff --git a/libbeat/outputs/kafka/config_test.go b/libbeat/outputs/kafka/config_test.go index 25c0c5dce99..2435b274f6e 100644 --- a/libbeat/outputs/kafka/config_test.go +++ b/libbeat/outputs/kafka/config_test.go @@ -25,6 +25,7 @@ import ( "github.com/elastic/beats/v7/libbeat/beat" "github.com/elastic/beats/v7/libbeat/internal/testutil" + "github.com/elastic/beats/v7/libbeat/management" "github.com/elastic/elastic-agent-libs/config" "github.com/elastic/elastic-agent-libs/logp" "github.com/elastic/elastic-agent-libs/mapstr" @@ -32,16 +33,18 @@ import ( func TestConfigAcceptValid(t *testing.T) { tests := map[string]mapstr.M{ - "default config is valid": mapstr.M{}, "lz4 with 0.11": mapstr.M{ "compression": "lz4", "version": "0.11", + "topic": "foo", }, "lz4 with 1.0": mapstr.M{ "compression": "lz4", "version": "1.0.0", + "topic": "foo", }, "Kerberos with keytab": mapstr.M{ + "topic": "foo", "kerberos": mapstr.M{ "auth_type": "keytab", "username": "elastic", @@ -52,6 +55,7 @@ func TestConfigAcceptValid(t *testing.T) { }, }, "Kerberos with user and password pair": mapstr.M{ + "topic": "foo", "kerberos": mapstr.M{ "auth_type": "password", "username": "elastic", @@ -67,7 +71,9 @@ func TestConfigAcceptValid(t *testing.T) { test := test t.Run(name, func(t *testing.T) { c := config.MustNewConfigFrom(test) - c.SetString("hosts", 0, "localhost") + if err := c.SetString("hosts", 0, "localhost"); err != nil { + t.Fatalf("could not set 'hosts' on config: %s", err) + } cfg, err := readConfig(c) if err != nil { t.Fatalf("Can not create test configuration: %v", err) @@ -89,13 +95,17 @@ func TestConfigInvalid(t *testing.T) { "realm": "ELASTIC", }, }, + // The default config does not set `topic` nor `topics`. + "No topics or topic provided": mapstr.M{}, } for name, test := range tests { test := test t.Run(name, func(t *testing.T) { c := config.MustNewConfigFrom(test) - c.SetString("hosts", 0, "localhost") + if err := c.SetString("hosts", 0, "localhost"); err != nil { + t.Fatalf("could not set 'hosts' on config: %s", err) + } _, err := readConfig(c) if err == nil { t.Fatalf("Can create test configuration from invalid input") @@ -104,6 +114,84 @@ func TestConfigInvalid(t *testing.T) { } } +func TestConfigUnderElasticAgent(t *testing.T) { + oldUnderAgent := management.UnderAgent() + t.Cleanup(func() { + // Restore the previous value + management.SetUnderAgent(oldUnderAgent) + }) + + management.SetUnderAgent(true) + + tests := []struct { + name string + cfg mapstr.M + expectError bool + }{ + { + name: "topic with all valid characters", + cfg: mapstr.M{ + "topic": "abcdefghijklmnopqrstuvxz-ABCDEFGHIJKLMNOPQRSTUVXZ_01234567890.", + }, + }, + { + name: "topics is provided", + cfg: mapstr.M{ + "topics": []string{"foo", "bar"}, + }, + expectError: true, + }, + { + name: "topic cannot contain invalid characters", + cfg: mapstr.M{ + "topic": "foo bar", + }, + expectError: true, + }, + { + name: "topic with invalid characters", + cfg: mapstr.M{ + "topic": "foo + bar", + }, + expectError: true, + }, + { + name: "topic with invalid characters from dynamic topic selection", + cfg: mapstr.M{ + "topic": "%{event.field}", + }, + expectError: true, + }, + + // The default config does not set `topic` not `topics`. + { + name: "empty config is invalid", + cfg: mapstr.M{}, + expectError: true, + }, + } + + for _, test := range tests { + test := test + t.Run(test.name, func(t *testing.T) { + c := config.MustNewConfigFrom(test.cfg) + if err := c.SetString("hosts", 0, "localhost"); err != nil { + t.Fatalf("could not set 'hosts' on config: %s", err) + } + + _, err := readConfig(c) + + if test.expectError && err == nil { + t.Fatalf("invalid configuration must not be created") + } + + if !test.expectError && err != nil { + t.Fatalf("could not create config: %s", err) + } + }) + } +} + func TestBackoffFunc(t *testing.T) { testutil.SeedPRNG(t) tests := map[int]backoffConfig{ @@ -178,6 +266,7 @@ func TestTopicSelection(t *testing.T) { for name, test := range cases { t.Run(name, func(t *testing.T) { + test := test selector, err := buildTopicSelector(config.MustNewConfigFrom(test.cfg)) if err != nil { t.Fatalf("Failed to parse configuration: %v", err) diff --git a/libbeat/publisher/pipeline/ttl_batch.go b/libbeat/publisher/pipeline/ttl_batch.go index c374ac88d72..dcc2790f231 100644 --- a/libbeat/publisher/pipeline/ttl_batch.go +++ b/libbeat/publisher/pipeline/ttl_batch.go @@ -93,10 +93,14 @@ func (b *ttlBatch) Events() []publisher.Event { } func (b *ttlBatch) ACK() { + // Help the garbage collector clean up the event data a little faster + b.events = nil b.done() } func (b *ttlBatch) Drop() { + // Help the garbage collector clean up the event data a little faster + b.events = nil b.done() } diff --git a/libbeat/publisher/pipeline/ttl_batch_test.go b/libbeat/publisher/pipeline/ttl_batch_test.go index a56f4b0fca1..5e277d5042c 100644 --- a/libbeat/publisher/pipeline/ttl_batch_test.go +++ b/libbeat/publisher/pipeline/ttl_batch_test.go @@ -18,6 +18,7 @@ package pipeline import ( + "fmt" "testing" "github.com/stretchr/testify/assert" @@ -91,6 +92,50 @@ func TestNestedBatchSplit(t *testing.T) { assert.True(t, doneWasCalled, "Original callback should be invoked when all children are") } +func TestBatchCallsDoneAndFreesEvents(t *testing.T) { + doneCalled := false + batch := &ttlBatch{ + done: func() { doneCalled = true }, + events: []publisher.Event{{}}, + } + require.NotNil(t, batch.events, "Initial batch events must be non-nil") + batch.ACK() + require.Nil(t, batch.events, "Calling batch.ACK should clear the events array") + require.True(t, doneCalled, "Calling batch.ACK should invoke the done callback") + + doneCalled = false + batch.events = []publisher.Event{{}} + require.NotNil(t, batch.events, "Initial batch events must be non-nil") + batch.Drop() + require.Nil(t, batch.events, "Calling batch.Drop should clear the events array") + require.True(t, doneCalled, "Calling batch.Drop should invoke the done callback") +} + +func TestNewBatchFreesEvents(t *testing.T) { + queueBatch := &mockQueueBatch{} + _ = newBatch(nil, queueBatch, 0) + assert.Equal(t, 1, queueBatch.freeEntriesCalled, "Creating a new ttlBatch should call FreeEntries on the underlying queue.Batch") +} + +type mockQueueBatch struct { + freeEntriesCalled int +} + +func (b *mockQueueBatch) Count() int { + return 1 +} + +func (b *mockQueueBatch) Done() { +} + +func (b *mockQueueBatch) Entry(i int) interface{} { + return fmt.Sprintf("event %v", i) +} + +func (b *mockQueueBatch) FreeEntries() { + b.freeEntriesCalled++ +} + type mockRetryer struct { batches []*ttlBatch } diff --git a/libbeat/publisher/queue/memqueue/broker.go b/libbeat/publisher/queue/memqueue/broker.go index e1d0fd46c00..1455745961c 100644 --- a/libbeat/publisher/queue/memqueue/broker.go +++ b/libbeat/publisher/queue/memqueue/broker.go @@ -403,8 +403,12 @@ func (b *batch) Entry(i int) interface{} { } func (b *batch) FreeEntries() { - // Memory queue can't release event references until they're fully acknowledged, - // so do nothing. + // This signals that the event data has been copied out of the batch, and is + // safe to free from the queue buffer, so set all the event pointers to nil. + for i := 0; i < b.count; i++ { + index := (b.start + i) % len(b.queue.buf) + b.queue.buf[index].event = nil + } } func (b *batch) Done() { diff --git a/libbeat/publisher/queue/memqueue/queue_test.go b/libbeat/publisher/queue/memqueue/queue_test.go index 53f8da4b77c..637e7ccd4fb 100644 --- a/libbeat/publisher/queue/memqueue/queue_test.go +++ b/libbeat/publisher/queue/memqueue/queue_test.go @@ -438,6 +438,44 @@ func TestEntryIDs(t *testing.T) { }) } +func TestBatchFreeEntries(t *testing.T) { + const queueSize = 10 + const batchSize = 5 + // 1. Add 10 events to the queue, request two batches with 5 events each + // 2. Make sure the queue buffer has 10 non-nil events + // 3. Call FreeEntries on the second batch + // 4. Make sure only events 6-10 are nil + // 5. Call FreeEntries on the first batch + // 6. Make sure all events are nil + testQueue := NewQueue(nil, nil, Settings{Events: queueSize, MaxGetRequest: batchSize, FlushTimeout: time.Second}, 0) + producer := testQueue.Producer(queue.ProducerConfig{}) + for i := 0; i < queueSize; i++ { + _, ok := producer.Publish(i) + require.True(t, ok, "Queue publish must succeed") + } + batch1, err := testQueue.Get(batchSize) + require.NoError(t, err, "Queue read must succeed") + require.Equal(t, batchSize, batch1.Count(), "Returned batch size must match request") + batch2, err := testQueue.Get(batchSize) + require.NoError(t, err, "Queue read must succeed") + require.Equal(t, batchSize, batch2.Count(), "Returned batch size must match request") + // Slight concurrency subtlety: we check events are non-nil after the queue + // reads, since if we do it before we have no way to be sure the insert + // has been completed. + for i := 0; i < queueSize; i++ { + require.NotNil(t, testQueue.buf[i].event, "All queue events must be non-nil") + } + batch2.FreeEntries() + for i := 0; i < batchSize; i++ { + require.NotNilf(t, testQueue.buf[i].event, "Queue index %v: batch 1's events should be unaffected by calling FreeEntries on Batch 2", i) + require.Nilf(t, testQueue.buf[batchSize+i].event, "Queue index %v: batch 2's events should be nil after FreeEntries", batchSize+i) + } + batch1.FreeEntries() + for i := 0; i < queueSize; i++ { + require.Nilf(t, testQueue.buf[i].event, "Queue index %v: all events should be nil after calling FreeEntries on both batches") + } +} + // producerACKWaiter is a helper that can listen to queue producer callbacks // and wait on them from the test thread, so we can test the queue's asynchronous // behavior without relying on time.Sleep. diff --git a/libbeat/publisher/queue/memqueue/runloop.go b/libbeat/publisher/queue/memqueue/runloop.go index 0f7788c6209..45ae3c0a1a2 100644 --- a/libbeat/publisher/queue/memqueue/runloop.go +++ b/libbeat/publisher/queue/memqueue/runloop.go @@ -187,13 +187,8 @@ func (l *runLoop) handleGetReply(req *getRequest) { } func (l *runLoop) handleDelete(count int) { - // Clear the internal event pointers so they can be garbage collected - for i := 0; i < count; i++ { - index := (l.bufPos + i) % len(l.broker.buf) - l.broker.buf[index].event = nil - } - - // Advance position and counters + // Advance position and counters. Event data was already cleared in + // batch.FreeEntries when the events were vended. l.bufPos = (l.bufPos + count) % len(l.broker.buf) l.eventCount -= count l.consumedCount -= count diff --git a/metricbeat/docs/fields.asciidoc b/metricbeat/docs/fields.asciidoc index 435760b7406..6694528df13 100644 --- a/metricbeat/docs/fields.asciidoc +++ b/metricbeat/docs/fields.asciidoc @@ -57758,7 +57758,7 @@ type: long Fraction of the time (between 0.0 and 1.0) that the queue is able to immediately deliver messages to consumers. This can be less than 1.0 if consumers are limited by network congestion or prefetch count. -type: long +type: scaled_float format: percent diff --git a/metricbeat/docs/modules/mysql.asciidoc b/metricbeat/docs/modules/mysql.asciidoc index 0c5a793a29a..8711359bf5f 100644 --- a/metricbeat/docs/modules/mysql.asciidoc +++ b/metricbeat/docs/modules/mysql.asciidoc @@ -89,6 +89,18 @@ metricbeat.modules: # By setting raw to true, all raw fields from the status metricset will be added to the event. #raw: false + + # Optional SSL/TLS. By default is false. + #ssl.enabled: true + + # List of root certificates for SSL/TLS server verification + #ssl.certificate_authorities: ["/etc/pki/root/ca.crt"] + + # Certificate for SSL/TLS client authentication + #ssl.certificate: "/etc/pki/client/cert.crt" + + # Client certificate key file + #ssl.key: "/etc/pki/client/cert.key" ---- [float] diff --git a/metricbeat/metricbeat.reference.yml b/metricbeat/metricbeat.reference.yml index 6659ca29276..3f9ccb0a9db 100644 --- a/metricbeat/metricbeat.reference.yml +++ b/metricbeat/metricbeat.reference.yml @@ -764,6 +764,18 @@ metricbeat.modules: # By setting raw to true, all raw fields from the status metricset will be added to the event. #raw: false + # Optional SSL/TLS. By default is false. + #ssl.enabled: true + + # List of root certificates for SSL/TLS server verification + #ssl.certificate_authorities: ["/etc/pki/root/ca.crt"] + + # Certificate for SSL/TLS client authentication + #ssl.certificate: "/etc/pki/client/cert.crt" + + # Client certificate key file + #ssl.key: "/etc/pki/client/cert.key" + #--------------------------------- NATS Module --------------------------------- - module: nats metricsets: diff --git a/metricbeat/module/elasticsearch/index/index.go b/metricbeat/module/elasticsearch/index/index.go index 050ad311c85..d32982ad7b6 100644 --- a/metricbeat/module/elasticsearch/index/index.go +++ b/metricbeat/module/elasticsearch/index/index.go @@ -41,8 +41,9 @@ const ( expandWildcards = "expand_wildcards=open" statsPath = "/_stats/" + statsMetrics + "?filter_path=indices&" + expandWildcards - bulkSuffix = ",bulk" - hiddenSuffix = ",hidden" + bulkSuffix = ",bulk" + hiddenSuffix = ",hidden" + allowClosedIndices = "&forbid_closed_indices=false" ) // MetricSet type defines all fields of the MetricSet @@ -107,6 +108,7 @@ func getServicePath(esVersion version.V) (string, error) { if !esVersion.LessThan(elasticsearch.BulkStatsAvailableVersion) { u.Path += bulkSuffix + u.RawQuery += allowClosedIndices } if !esVersion.LessThan(elasticsearch.ExpandWildcardsHiddenAvailableVersion) { diff --git a/metricbeat/module/elasticsearch/index/index_test.go b/metricbeat/module/elasticsearch/index/index_test.go index f4ec196c3aa..4b7f46e23b6 100644 --- a/metricbeat/module/elasticsearch/index/index_test.go +++ b/metricbeat/module/elasticsearch/index/index_test.go @@ -29,7 +29,7 @@ import ( func TestGetServiceURIExpectedPath(t *testing.T) { path770 := strings.Replace(statsPath, expandWildcards, expandWildcards+hiddenSuffix, 1) - path800 := strings.Replace(path770, statsMetrics, statsMetrics+bulkSuffix, 1) + path800 := strings.Replace(path770, statsMetrics, statsMetrics+bulkSuffix, 1) + allowClosedIndices tests := map[string]struct { esVersion *version.V @@ -65,6 +65,7 @@ func TestGetServiceURIExpectedPath(t *testing.T) { func TestGetServiceURIMultipleCalls(t *testing.T) { path := strings.Replace(statsPath, expandWildcards, expandWildcards+hiddenSuffix, 1) path = strings.Replace(path, statsMetrics, statsMetrics+bulkSuffix, 1) + path += allowClosedIndices err := quick.Check(func(r uint) bool { numCalls := 2 + (r % 10) // between 2 and 11 diff --git a/metricbeat/module/elasticsearch/index_summary/index_summary.go b/metricbeat/module/elasticsearch/index_summary/index_summary.go index c74b744f238..d3a91b44247 100644 --- a/metricbeat/module/elasticsearch/index_summary/index_summary.go +++ b/metricbeat/module/elasticsearch/index_summary/index_summary.go @@ -19,10 +19,13 @@ package index_summary import ( "fmt" + "net/url" "github.com/elastic/beats/v7/metricbeat/mb" "github.com/elastic/beats/v7/metricbeat/mb/parse" "github.com/elastic/beats/v7/metricbeat/module/elasticsearch" + + "github.com/elastic/elastic-agent-libs/version" ) // init registers the MetricSet with the central registry. @@ -36,6 +39,8 @@ func init() { const ( statsPath = "/_stats" + + allowClosedIndices = "forbid_closed_indices=false" ) var ( @@ -70,15 +75,43 @@ func (m *MetricSet) Fetch(r mb.ReporterV2) error { return nil } - content, err := m.HTTP.FetchContent() + info, err := elasticsearch.GetInfo(m.HTTP, m.HostData().SanitizedURI+statsPath) if err != nil { + return fmt.Errorf("failed to get info from Elasticsearch: %w", err) + } + + if err := m.updateServicePath(*info.Version.Number); err != nil { return err } - info, err := elasticsearch.GetInfo(m.HTTP, m.HostData().SanitizedURI+statsPath) + content, err := m.HTTP.FetchContent() if err != nil { - return fmt.Errorf("failed to get info from Elasticsearch: %w", err) + return err } return eventMapping(r, info, content, m.XPackEnabled) } + +func (m *MetricSet) updateServicePath(esVersion version.V) error { + p, err := getServicePath(esVersion) + if err != nil { + return err + } + + m.SetServiceURI(p) + return nil +} + +func getServicePath(esVersion version.V) (string, error) { + currPath := statsPath + u, err := url.Parse(currPath) + if err != nil { + return "", err + } + + if !esVersion.LessThan(elasticsearch.BulkStatsAvailableVersion) { + u.RawQuery += allowClosedIndices + } + + return u.String(), nil +} diff --git a/metricbeat/module/logstash/fields.go b/metricbeat/module/logstash/fields.go index 1cdf0ce0e6a..d1550b9555c 100644 --- a/metricbeat/module/logstash/fields.go +++ b/metricbeat/module/logstash/fields.go @@ -32,5 +32,5 @@ func init() { // AssetLogstash returns asset data. // This is the base64 encoded zlib format compressed contents of module/logstash. func AssetLogstash() string { - return "eJzEWU2v4zQU3b9fcdU1E4kFmy4QC0AMAsFqNghFbnKb+D0nzvijw+PXo7RNGyf+bNwhiy6S+Jxzbef63tMP8Ibve2C8kYrI9gVAUcVwD7vfrrd2LwA1ykrQQVHe7+H7FwCA6TF0vNYMXwAEMiQS99CQFwCJStG+kXv4aycl230Du1apYff3+KzlQpUV74+02cORMDmOP1Jktdyf0T9ATzq86yqlIkqeHwGo92FkEVwP1zvzofPhinYoFemG25NpNGGUyNndgah2D7sfbiN2K7DXU7eCmYuwCZmP77Az7rswXDhzrBbJUGqJdUn78vCuUK5edUVqRjxNcNHzGovzLBevp67osCsiSAw9Hfnn6XKcHJMSPYxLOL7TUcboksotwi/ACTvx4gl7JTdtENpnEHvRUSywJgquVT6OJdhEUmtBxlSRdRGunB7sZc7YtBZa0zqD6tu9Fd5EdEIhKc+x8rd7S8iJaqADMtrjepf2KBXWqwF8236uBp0t4TFO6pKcUJAGnbnFBuwDnxN8+90yO5vYrrwF3jXhsqgGXczVFy6mm5KvJsSv46tNyILotn1sy/n4DqoGTapKFVqSBsue9DznITXGdVZVxPC4vxB/jL44DdyjLD9rrkjZ0UpYAw0HmxBwEUU4qRsBvIpcoYfCn5P0ujugKPmxREaGsYgZUFBeuyKNm4+UORnvFeky5rVrqVrBlWJYOzfsE7UnKVjP+7mUvo//v+bdJeN2KgpeobykpQFFhb2K7BZsUnxoE+NnjRqvBU1Zcb2J0AFmbaAwuYGaSobCqF7SNGJhQ1kxtLaS7QGOG46jHowPHhmRilYSiajaomJaKhS2mXjD9y9crGMb5QUrKEtvPV2L8WaDPV0u9TnIL203dKgErWSRrGWux1FPL+cOFgfFfWWzVQIrJX4186GLTZo2GIcWOxSElRsUHIiq2lLSf91lMON94xz/hYs3FO7SxztY4CBQYq/OPdhTShfHDENwguYg45ubQeydWTpOI8jgDylHvYN1Y3VdTCZ+eMXKVXwZoStaPYZ3W0guU72GcUgx/owIizc62lw6/z0ooZdPPXlsvH7hUsEKdFsLLlGcaIWrbjuD3Ju3+smCbbcjwbuZAny/fvodPvZHnphH3R9I6OMI6BkvW+hg1A3uHOpv36YSzY7gX7hI8X9eKODjj85zze6JPXKc2UxuE7W+l37r4et95JfjkwTxRqxJZD11wGuexykNqYVUUz1S9wo3YI5HwDqdZnOsayYitu1PZ2Tr9oTIVV952BGBResbr499xTvaN9dpgHPLg6Lwqlrb3rll/aFVw1NlHSlTKJytcS5tP19pkrQFTfwIlR5H3hy9Kc+snXoT/Nm1ntUtzxef3SMMY4fwId5djyeMIYUo193kbAk7lkfGncbhAvYpqHm0eu1tEynH0sbZ3iZv1Knm3pdxAcQEAemedkIkEOdJp4UEkWHBRq/aVBWMEx53ljcwPWIGR9DFWbcmUM31gbmrYNt/oybA4j/S+xWTxAPWDyQdVhkgsrgsZ+v5+caG3Se3E0ZlLo87BFETAMtJOPtxmbuGsWHICh7sHyDrujk7gQipEFW7JwJFV7SJuJdFGrRsy2dRBBqFCLRIZy+Q6iBh/Z81F9d8kGt7TYm/jEjREJ0dFr1ZGski0s0fwEJMBGzwpIFHZyIi7Cd+TwsxjzHdm6a+Ka+9dFiX95uCxFrY4o/bWcOrM0sNhDmPcxM1mGUEjh3l88+YI6FMi1wnrtTVWE5u9ee8f+9bcP4LAAD//53osjw=" + return "eJzEWk2P6zQU3c+vuOqaF4kFmy4QC0AMAsHqbRCKPM5t4hknzvNHH8OvR2mbNk5sx27cIYtZJPE551479r2n8wne8H0PXNRKE9U8AWimOe5h99vl1u4JoEJFJes1E90evn8CABgfQysqw/EJQCJHonAPNXkCUKg162q1h792SvHdN7BrtO53fw/PGiF1SUV3YPUeDoSrYfyBIa/U/oT+CTrS4k1XqTTR6vQIQL/3A4sUpr/cmQ6dDtesRaVJ21+fjKMJZ0RN7vZEN3vY/XAdsVuAvR7bBcxUhEvIdHyLrXXfh+HDmWI1SPrSKKxK1pUv7xrV4lVfpHbEY4KLTlRYnLJcvB7bosW2iCCx9LTkn4fL8XKMSkw/TOHwTss4Z3Mqv4iwAC/syItH7LTatEBYl0HsWUcxwxophNH5OOZgI0llJBm2iqyTcOEMYM/3jE1zYQyrMqi+3lvgjURHlIqJHDN/vTeHHKl61iNnHS5XaYdKY7UYILatZ9qbbBseF6QqyRElqdG7t7iAQ+BTgm+/m+/ONrZv34LgnAhV0N4UU/WFj+mq5MOEhHV8WEJmRNfl45rO+1cQ7Q2hVBdGkRrLjnQi5yE1xHVSVcTw+L+QcIyhOC3cgyq/GKFJ2TIqnYGuB5sQcBFFOKobAIKKfKGvhT8l6Uz7grIUhxI56YcipkfJROWLNC4fKTkZ7hXpMqa1a6kbKbTmWHkX7AO1JylY5v1USt/G/19598m4nopSUFTnbalHSbHTkd2CS0oIbWT8YtDgpaApqTCbCD1gzgYKkxuosWQorOolTSMWLpQFQ+Mq2e7guOJ46sH44JETpRlVSCRtCsqN0ihdmXjD969CLmMb5K1WUI7eerxm4+0Ge7x86nOQn9tuaFFLRlWRrGWqx1NPz3MHs4PiNrPZKoGFkrCa6dDZIk0bjH2DLUrCyw0KXoimTanYv/4ymIuu9o7/KuQbSn/pExwssZeosNOnHuwhpYsnw7CaoCnI8OZmEHdnlo5TS9KHQ8pR72BVO10Xm0m8vCL1FV9W6JrR+/CuEylUqtcwDCmGPwPC7I2W1efOfw9amvnTwD42XL8IpWEBuq0FVyiPjOKi284g9+qtfnZgu+1ICC6mFb5fP/8Oz91BJO6j/g9k7eNY0TNcrtDBqhv8e2i4fRtLNDdCeOIixf95poDnH73nmtsTu+c4c5ncNmp1K/2Ww5frKCwnJAnijVibyHnqQNA8j1O6phZSTfVI3QvcFXM8AtbrNNtjfZmIWLY/nZCdyxMiZ33hYUcEFq1vuJ47KlrW1Zc0wKnlQVkEVS1t79yy/jC6FqmyDoxrlN7WOJe2ny80SdpWTfwIlQFH3h69aZ9ZOvU2+KNrPadbni8+t0e4jr2GD/HuejxhDClEue42Z0P4oTxw4TUOZ7APQc2jNWhv20g5pjbO9rZ5o041/7qMCyAmCEj3tBMigThPOi0kiAwLNnrVtqrVOOF+Z3kD0z1mcARdnHVrA1XCvHB/Fez6bdQGmP1GertiNvEV6weSDqsMEFlclpP1/Hhjw+2Tuwmjdq6AOwRRCYB5Ek5+XOauYWgYHgZOSU8o0+8fuoOnRxQZ1ZzEdBJJVXobpQ3w/XCKjtl7UAjZc7TaOULWL9bbA0ZIhaiuLREoupdJxD1PVG9UUz6KYqVFjECL9HRXDjlImP9H5eJyEuRaXuORX0YczhB9Lsy68jSSWaSbP4CZmAjY1RoD7s1ERNgP/J5mYu5jurXLXV1eXJR1XcFvChLPUMcvI27W9dmZbA2Eews5G3V1l5HIBfE2LvnOmANh3Mhc5ZAydGgktjqzwX/scOD8FwAA//81Lx/S" } diff --git a/metricbeat/module/logstash/node_stats/_meta/fields.yml b/metricbeat/module/logstash/node_stats/_meta/fields.yml index 8e37a165659..202907fdc29 100644 --- a/metricbeat/module/logstash/node_stats/_meta/fields.yml +++ b/metricbeat/module/logstash/node_stats/_meta/fields.yml @@ -161,6 +161,17 @@ type: long - name: max_queue_size_in_bytes type: long + - name: capacity + type: group + fields: + - name: max_queue_size_in_bytes + type: long + - name: max_unread_events + type: long + - name: page_capacity_in_bytes + type: long + - name: queue_size_in_bytes + type: long - name: events type: group fields: diff --git a/metricbeat/module/mysql/_meta/Dockerfile b/metricbeat/module/mysql/_meta/Dockerfile index 2051c726595..b701ad617ea 100644 --- a/metricbeat/module/mysql/_meta/Dockerfile +++ b/metricbeat/module/mysql/_meta/Dockerfile @@ -5,4 +5,8 @@ ENV MYSQL_ROOT_PASSWORD test HEALTHCHECK --interval=1s --retries=90 CMD mysql -u root -p$MYSQL_ROOT_PASSWORD -h$HOSTNAME -P 3306 -e "SHOW STATUS" > /dev/null +COPY /certs/root-ca.pem /etc/certs/root-ca.pem +COPY /certs/server-cert.pem /etc/certs/server-cert.pem +COPY /certs/server-key.pem /etc/certs/server-key.pem + COPY test.cnf /etc/mysql/conf.d/test.cnf diff --git a/metricbeat/module/mysql/_meta/certs/client-cert.pem b/metricbeat/module/mysql/_meta/certs/client-cert.pem new file mode 100755 index 00000000000..df9c76e0862 --- /dev/null +++ b/metricbeat/module/mysql/_meta/certs/client-cert.pem @@ -0,0 +1,19 @@ +-----BEGIN CERTIFICATE----- +MIIDDDCCAfQCAQEwDQYJKoZIhvcNAQELBQAwSjELMAkGA1UEBhMCVVMxEzARBgNV +BAgMCkNhbGlmb3JuaWExFDASBgNVBAcMC1NhbnRhIENsYXJhMRAwDgYDVQQDDAdm +YWtlLUNBMB4XDTI0MDIxNTIzNTA0MloXDTMzMTIyNDIzNTA0MlowTjELMAkGA1UE +BhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFDASBgNVBAcMC1NhbnRhIENsYXJh +MRQwEgYDVQQDDAtmYWtlLWNsaWVudDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC +AQoCggEBAIqHZbSUB1x/iW6DxaRlkFWjPuZ+F1wYTGvfpqnxZgZY1k5vSJTy3ETe +y3TelpEPBWEmsgHDx4bjuqeG+3my9dDEKEIYgXkfkfHREndVxPDfnRdfXPfp3qbm +wV2bdJnpSQzCg+lv8e8U+kMv0WcmwTuwlpVG0Rnb6vFdOs67/IIlBvI9sP5BKDYL +YFRxaoc8fLb8UMkfQ0BSmT4Rvmq5MSETh4re7OecV6pN0naEWhZf72mr/HiTAhb6 +xZJNSvNAzvdkQnhwt9aHemGQLRZD+4dduZYn27cwK4ySTZdyMoKn66HqMIfXPvr8 +LlICP4Gb8Df/JuUZVRbI13P+Xqujd8kCAwEAATANBgkqhkiG9w0BAQsFAAOCAQEA +gwA1+nZISC6QF9JtkOGrPpBZk6v1iy4iLsZSNaoinkB/FgesIpNrTFG0k6exSBV1 +pwQSmMVNSEsUOOjEq/Vk98014Kf8QVqfkdcujaBNPtxMqsocOO9Od78UuX5QdZXi +ayhkzrcPX4HTwjTqKFlJxb92rHrBx/GIWa68TeAjwbRiZmDASpVCEI2HnkBkFWTs +5Ux4wlC3JrnY3Jxb7QfDK94g9r5s1ljHeVki83cUYaI5TdY7F0uP+O6TvlhCPrjd +5708kRZJHnKThu3aE8HJYIbYhHocm9DszbnObd4SqECjfd6YNbREBhyaHJdCY/j2 +hm1zhBiW24dazs108uhFsQ== +-----END CERTIFICATE----- diff --git a/metricbeat/module/mysql/_meta/certs/client-key.pem b/metricbeat/module/mysql/_meta/certs/client-key.pem new file mode 100755 index 00000000000..33430372fd2 --- /dev/null +++ b/metricbeat/module/mysql/_meta/certs/client-key.pem @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCKh2W0lAdcf4lu +g8WkZZBVoz7mfhdcGExr36ap8WYGWNZOb0iU8txE3st03paRDwVhJrIBw8eG47qn +hvt5svXQxChCGIF5H5Hx0RJ3VcTw350XX1z36d6m5sFdm3SZ6UkMwoPpb/HvFPpD +L9FnJsE7sJaVRtEZ2+rxXTrOu/yCJQbyPbD+QSg2C2BUcWqHPHy2/FDJH0NAUpk+ +Eb5quTEhE4eK3uznnFeqTdJ2hFoWX+9pq/x4kwIW+sWSTUrzQM73ZEJ4cLfWh3ph +kC0WQ/uHXbmWJ9u3MCuMkk2XcjKCp+uh6jCH1z76/C5SAj+Bm/A3/yblGVUWyNdz +/l6ro3fJAgMBAAECggEAEPRCAHQrA/k4c9oFBQoonHCMrNdDCuKO7NdsHYm1ucJi +5SnVxWQFTRkC59hrr1B6MTIUEGb6iyHhOOpqafI7B0xQnIlFBFLWsPSseMY6opvN +jTwew9k/xqfAg/E4F7OvXPRMAnSQ1LjZqcInE+Owe9qQjW/DvPFXS2fEgCOOA4vw +M6w6USf8UTsXBzMvRnDHMTQM0vfKNNSdopYDPeQc4YQ1A2AjkpYXZVWXFcFsE9zw +xFVZ9k6tP+gzk6shJjsbBoQ7qWwhdq1Q5tJ28FTaCVXDAp8l6yIFuZuI7r23O7+0 +ngxSejABJ3m9NmG0J7DPGU6zXhJW5nylWcSk5vwMkQKBgQDCWIRe4iSW0eGYBSe5 +hBoQgLe7aMAbsaCrHjTYQkKvI25YlfJ08OVU7oB/Bng/9AlpJlouGz67/W0PiRaz +jlP370p92IiwehUl9PkuVDpex4l2rDLCM1iVrPbxhbm/7+2nro2M/0/4iUyIK+Gr +Rpcqj2dQ3qarD+UmLXYPOoyRuQKBgQC2ec0sWyU67QuFaTemvTH8YFu68BfQqg6t +YQMc4+wj30ww0TZHFYVwyvR4agTOdFjwIUTERRN3EcFmlV5x+fGz/LfUdVYJj8B0 +lXakqeATsGJHngrdlyM+m+g+6JI1SUTshMa/xXVAUx8NZESOVE5JeZH6TD4/9Q3y +ijtithtekQKBgQCPeso/QrXAozLqCORLEjwr8tuygKNTzs/PhX1+K20P4BiXThyy +OScWjP5QyXX9wS0xdB8f6v1lzLO3xH3+EhXr9b4JKtO/dmImo7VTftuZHbde5cKT +nVTJK+kkZpW8HmZWZYgbkGJ6GuNlpP/2cycnRLgB/F8P66xBg06l75PYAQKBgGap +GhR1ZvnC+TNiocuuL5wkfhcrEsrzkfRbWwv68xSvgUcJvTa61etCU84XH4MjlBHt +NaoSjsPzelKDgLIxA5nWeXoPVYtlk8pDeI9lf0q0dmaCdOx8JnkH797Mq81M3nkO +rl6f8bpxyUuYeLV2muDdg5JFKNSEwwcMXCLJ/5XxAoGAKIkS02jWudDoBzubdFe/ +c5jSYufTZOmErnjnSKGkj9oZGVP6RYDhkHMPOxadO/4OLOKo6Phkg9yRmPG2tyKA ++ddgYP7zXEnsLxrjumoYTvcWgs1AHUUH4kA5SdImzYbSSfPW5h0KkvB+gYaukBGa +XHILry/59LkxU+nP1ZCVvt8= +-----END PRIVATE KEY----- diff --git a/metricbeat/module/mysql/_meta/certs/client-req.pem b/metricbeat/module/mysql/_meta/certs/client-req.pem new file mode 100755 index 00000000000..3295c803f8d --- /dev/null +++ b/metricbeat/module/mysql/_meta/certs/client-req.pem @@ -0,0 +1,16 @@ +-----BEGIN CERTIFICATE REQUEST----- +MIICkzCCAXsCAQAwTjELMAkGA1UEBhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWEx +FDASBgNVBAcMC1NhbnRhIENsYXJhMRQwEgYDVQQDDAtmYWtlLWNsaWVudDCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAIqHZbSUB1x/iW6DxaRlkFWjPuZ+ +F1wYTGvfpqnxZgZY1k5vSJTy3ETey3TelpEPBWEmsgHDx4bjuqeG+3my9dDEKEIY +gXkfkfHREndVxPDfnRdfXPfp3qbmwV2bdJnpSQzCg+lv8e8U+kMv0WcmwTuwlpVG +0Rnb6vFdOs67/IIlBvI9sP5BKDYLYFRxaoc8fLb8UMkfQ0BSmT4Rvmq5MSETh4re +7OecV6pN0naEWhZf72mr/HiTAhb6xZJNSvNAzvdkQnhwt9aHemGQLRZD+4dduZYn +27cwK4ySTZdyMoKn66HqMIfXPvr8LlICP4Gb8Df/JuUZVRbI13P+Xqujd8kCAwEA +AaAAMA0GCSqGSIb3DQEBCwUAA4IBAQBr6+WE3t0KdMpEBBC81IUHkXNB9Mf5EYKG +d1ev6jq1bi2jw6WqAGbqYp1W0awEjZJZcS2skXoy8QIFDNjznHPgKEXB9b98nj34 +TLpszCrlcQteWmzRCspwkhdrXNGE4Z4UMgN+xoh2P/dujK4kGH6HFcF1Fo4ajDUX +HT5vybjQuQlPDgt6Ufs+Pjotr5uCzLbIsFN1QG6gKVY90WAzPsa0XYN1ehMpkLsM +8vbVP0uRT6/VXTenbTtqqQ5Y70gmeiF/EssnQ9rM3vkGUW1A/9j23agLmlOVaCWw +HSN5HqrFUIlsLFIDDTgi7icW4Uk+7qdMSF7ooMOJIm27PGc49u4U +-----END CERTIFICATE REQUEST----- diff --git a/metricbeat/module/mysql/_meta/certs/root-ca-key.pem b/metricbeat/module/mysql/_meta/certs/root-ca-key.pem new file mode 100755 index 00000000000..2343e39b149 --- /dev/null +++ b/metricbeat/module/mysql/_meta/certs/root-ca-key.pem @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDSrYQWHfzCy/+N +Nzbg03pW/xnJ4wsEOeSuaYJfrkzU87pwlOFY57bhxe+3rektDOOziwjCxo4a8rKu +YmHRYKx4XdBtTjPnimRiwymSnemZdABWLNuJyvWen6iNJQqrcSesvobAtaQ265A9 +faRPn/Hjx5CH5x52hLhcpo6Yg4Ae6K2dnGbahFb1DI7Btfcf+PYiUau5DRiJiIpU +9K9hBbPmPuo0hsGiAYCJkTspdDMrFsBA6hNadamzsXy6AzB82Pu19nckR20kJVlG +Ioebg6mlHlcTV1qCsiWZBR/ghGGNHBp15EIXvIDpEJ4rcuy4AER4lXIdpG2RPD7Y ++Y7EGi0zAgMBAAECggEAU/SCuR+dHPGXdqqEq4aK78U7/SiFuNrrRxfTtRZsFsbD +yt6BiODaD9HFrCBZVjMXQHLM/HWMpq+Fxhl9mqcYQ+U6zHxIEeKkC3lzhTJ5p0XD +ZpP8rsYbKGm+jPSwck6m/V91qrEX7izkb6S0iGiYR+m8rnPLP3a3U3CqTZvFwErG +n7jk7caLZcT9+p7/TLlDIyx4ha4+7RRaL9OC1dNH8ADOkSHk/vaE6aU8J8PJ4YZg +QvNfsuo7FtDMq3OIkMAsHseuX90X8c3ZS7lNdCTRU7YuC1+8+l6xGs1Arjv1jqnd +9gIo6kh88Ng8zi4TkGLVAnfc55eXmB+f7PPN93fMeQKBgQD0uqDSsvPNnaY6DJIF +Gyz4qExyYH/h2QFT5M4bb0hQNIkP187JhBZw8Et2AvBtSBhs8dXfBxu736KRs8XG +b60iw2qXqo1XUEUO7R0VMO6NcA8Hk206X+p7ukn5RExzv2MurD+3f8QM8CypFA57 +UnSWdDCrOAh6WU5zfcz9woOM2QKBgQDcYWvqbV8XoyhJBG5PkG3UzYtOi/Es/fGH +qt03ZyyfYdCbhAknqftuj6ZzlMfVV3xOSXX+sdr0rLzammnRdlPJtJfjk8lUYa/i +0hy4eTHm7o1iZJfMS9rCMH9uTwyNGnb67u8kW16BuzaLbJMtd7IKtEG69U63abZX +t+zqmxGy6wKBgQCD43w+cNCxdA+cYx/ifpXK4DBqx5TDq0Zq5vkokd1/1AA1uJEp +yvSpIucYD1dxHZSESgR/sH4Czu/249JnMdI11OjCGdkYQBsngyPUQs2dDdIbvBj2 +h7B/w5KQMn2dN3yFL7Ea/FE0w87dxABV98b7OlzsOUNgZHbCCP8LluN8aQKBgGS3 +RTly2JWV5DBSjRNhn0A026h+/i6gs8RbyxOp3FPOwSaBlimBXr4telWyNg2DGPUy +T3Gh2L4fP4PsM9YdbLdvCEdiYA1nQ5m2ipeoE61Fcmn4LQOZ2xUKUwKXr9XAtYWC +stn7w9ooNApOCYkq/bw0myGVQG9EKag3D1g8nD8XAoGAZLJlDhlfFaWa7jy1VF/g +JWcsN/+BfTjBY6t3npxzg4pdi7lHhuAZ45PLnQMTIdWCkqgigt224kcbUy3b351u +lzoSiLatNXj5Q3on85ZNRaOMLqp0ueIzOLWvC+CRp46wXlwxTrPxghXatUBPsG47 +mO/mtw9gmaJ8UBW/SuxS24g= +-----END PRIVATE KEY----- diff --git a/metricbeat/module/mysql/_meta/certs/root-ca.pem b/metricbeat/module/mysql/_meta/certs/root-ca.pem new file mode 100755 index 00000000000..9b3e4f60fe8 --- /dev/null +++ b/metricbeat/module/mysql/_meta/certs/root-ca.pem @@ -0,0 +1,21 @@ +-----BEGIN CERTIFICATE----- +MIIDdTCCAl2gAwIBAgIUUp8x6W/bui3FjHLnJfIb7AsKBIwwDQYJKoZIhvcNAQEL +BQAwSjELMAkGA1UEBhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFDASBgNVBAcM +C1NhbnRhIENsYXJhMRAwDgYDVQQDDAdmYWtlLUNBMB4XDTI0MDIxNTIzNTAzNVoX +DTMzMTIyNDIzNTAzNVowSjELMAkGA1UEBhMCVVMxEzARBgNVBAgMCkNhbGlmb3Ju +aWExFDASBgNVBAcMC1NhbnRhIENsYXJhMRAwDgYDVQQDDAdmYWtlLUNBMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0q2EFh38wsv/jTc24NN6Vv8ZyeML +BDnkrmmCX65M1PO6cJThWOe24cXvt63pLQzjs4sIwsaOGvKyrmJh0WCseF3QbU4z +54pkYsMpkp3pmXQAVizbicr1np+ojSUKq3EnrL6GwLWkNuuQPX2kT5/x48eQh+ce +doS4XKaOmIOAHuitnZxm2oRW9QyOwbX3H/j2IlGruQ0YiYiKVPSvYQWz5j7qNIbB +ogGAiZE7KXQzKxbAQOoTWnWps7F8ugMwfNj7tfZ3JEdtJCVZRiKHm4OppR5XE1da +grIlmQUf4IRhjRwadeRCF7yA6RCeK3LsuABEeJVyHaRtkTw+2PmOxBotMwIDAQAB +o1MwUTAdBgNVHQ4EFgQURA7Q9JPfB4mveB0vzmoqNJ2HSZUwHwYDVR0jBBgwFoAU +RA7Q9JPfB4mveB0vzmoqNJ2HSZUwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0B +AQsFAAOCAQEAB4NGJFZpzltHLqvInSU/EQxdIHgifihOFzsXTEXkdrmkfEw5puVL +fzg6qnLOunh3GAwLCnM0aIzDLS8WAS509Jwwidn7OtBpYV+jIzJrrTycWjAdvcHC +WToPTueXxwaAD3pCrus0w9H8egoQ1haNVmQm0OWcv3My82cNbZwViuQSCrky1srL +N5l7UM0gbXKeZjTGHIoTIjQJDgJT8PydsxpOZq7CcKRDBdF5nYMcUq8wltneb0Nh +7DuLLdxEM11XzIRT4GLRxT2xqwW7UpLfWpuo+niCvmNFY6SzyHFR1vFI3Kw1rYXh +3cbEtHtRvcNQg6Jp/zoHDcXMS3hDMeN2vQ== +-----END CERTIFICATE----- diff --git a/metricbeat/module/mysql/_meta/certs/server-cert.pem b/metricbeat/module/mysql/_meta/certs/server-cert.pem new file mode 100755 index 00000000000..1ca56e3f44f --- /dev/null +++ b/metricbeat/module/mysql/_meta/certs/server-cert.pem @@ -0,0 +1,19 @@ +-----BEGIN CERTIFICATE----- +MIIDDDCCAfQCAQEwDQYJKoZIhvcNAQELBQAwSjELMAkGA1UEBhMCVVMxEzARBgNV +BAgMCkNhbGlmb3JuaWExFDASBgNVBAcMC1NhbnRhIENsYXJhMRAwDgYDVQQDDAdm +YWtlLUNBMB4XDTI0MDIxNTIzNTAzOFoXDTMzMTIyNDIzNTAzOFowTjELMAkGA1UE +BhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFDASBgNVBAcMC1NhbnRhIENsYXJh +MRQwEgYDVQQDDAtmYWtlLXNlcnZlcjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC +AQoCggEBAMuPqkUt/Ax9s/h5LPxXU0m6OAEp1InLbR6x//hGVgmIiQu5/Fg1VfmZ +YbwraXxs4JDfMUyK6bd/bk2o71I1pnLmoFmQvawDRxOqkA1NLpF2FJtk0eevkF1D +crC9T1SfrzlwrucqqUXowdprVXFFVbFQTXsSyD8Nv/MGzDgmDtmMXQ8sLVqjGIEM +akuPMbNCVNTVnd/53WMaDzopnam/NCJNDGp2RVhf+KuOWLTURXFYN6j1z+f/1BNa +4QW+WtofzYkAWEcvCc8zeXUhwL6xE5gDyq1NkQ/ejqQq+iIJLd1FUFOH1jPSgmW5 +3CiWih2Is6VA0hCzDirdFtAHTui/OekCAwEAATANBgkqhkiG9w0BAQsFAAOCAQEA +vdGGVxaeSEfOkx+D7uYCx0blnobJoclggQP3fOIpyrU/LCeka+F8dvFvuGJLvn3A +JOMZZHCVnK6jjJYHFeolRCxd9iULYHD+dkWDr6uhvNMfwIt7UzUmtbznHAaD+ays +X0H70Z9+jmr3uFkevRbFkvDZqzdRYi/12oPM+0Skra3ouYen6zAtPU0Hruc0jyBP +W7V6mMSmCUPKTOJRZgDEIEBvu43rwEbQUG0ayqF1sLv+D6hjFrFJ2gCxgVH/+C9E +h0NF2Kdpb+jECCu3yhQA536Ugi9k96zJqJonu9jP4ODXMTG2qmsdFFW1zyFb9DbV +bjUsiDE7bEumHY2NEfzr3A== +-----END CERTIFICATE----- diff --git a/metricbeat/module/mysql/_meta/certs/server-key.pem b/metricbeat/module/mysql/_meta/certs/server-key.pem new file mode 100755 index 00000000000..d1a7d286a1c --- /dev/null +++ b/metricbeat/module/mysql/_meta/certs/server-key.pem @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDLj6pFLfwMfbP4 +eSz8V1NJujgBKdSJy20esf/4RlYJiIkLufxYNVX5mWG8K2l8bOCQ3zFMium3f25N +qO9SNaZy5qBZkL2sA0cTqpANTS6RdhSbZNHnr5BdQ3KwvU9Un685cK7nKqlF6MHa +a1VxRVWxUE17Esg/Db/zBsw4Jg7ZjF0PLC1aoxiBDGpLjzGzQlTU1Z3f+d1jGg86 +KZ2pvzQiTQxqdkVYX/irjli01EVxWDeo9c/n/9QTWuEFvlraH82JAFhHLwnPM3l1 +IcC+sROYA8qtTZEP3o6kKvoiCS3dRVBTh9Yz0oJludwoloodiLOlQNIQsw4q3RbQ +B07ovznpAgMBAAECggEADLAux9Me89ReBG3hLPVwfpb56LCny9L/QTuNHfecY0m8 +aRu1q/XfHwi9e9Ik6BmNQdp3ozLBcKujv3l5OWGYt27CrfKEsBUgOAyYoAugjHaU +wD7fipZ55CZRHs0eBcNSU70/Wa9iD7Z7Ztbr43yT49KCkdpQ2wVLYqWY0yMkJ9Eo +ZUJ8fL+yDMeJxnhQSIejK62TQI3FdMz+aNXA6AO0YiSfqagTS8GVNZQvZzvyxYS0 +DpiydzKSbS2RXkf3waClU5hDGwqhNxXa9bya/KrLvm4ag/VaV0O1M9jwFOKwfUGY +0SDELz/mxsOmGntTUbtuH7VSvnqkJHfACUcNkkIjAQKBgQD5pwIzrPnGrljDcFqu +OCRxhiRjgCNth4ObBbmj2n0BV5Uw33o1VlN/+GCfKcIQ1+tHOUrEtkwP5mMatUbf +4G4K/+bO3eWAf+ia5hkSVASbU0ui36iSkPWLYJr0oDx0N6Vw+ZK7oxqLGqW2dm4Y +Q1TFaIDd2wUGPYAuDaqPDHecCQKBgQDQvKXy9Ueh4iTbz3sH6Kp4wGN2BsjWWOVn +Hi4QoqnDoLrguhCe5vvNyxfayziu9hUKzP8kBHQOY/2xpKv+epPuw6hgaD0Mnh/w +UcWEqZs102y0zZcQISfG8TUoLHW31T87veB3YEVIB+8uZg1CWJ7aDKe8UmugVGV2 +k2sMG7fm4QKBgHq0z6w+lPZGs3I8QxXmmmMCH9iYHtGzDcigY8JZnZ+PQNEoxpR4 +vcnkdvlEORK2TfpP+qP9Rh16i7OQ7ikT0oKtjPCYuDkUpWudNS2BBlKh+kcvz1da +0JWVAhTCvXQR9cs1oB2B6YX9rv2j8DEUxxHQb6acBDgw+lOoe/CbnB6hAoGBAKxg +bcbjCcHFCF1BzT8tw8GuVzS7y5U/mkp64N26BunXzRwSa/FdnOpI4q07j9bkv2HJ +ApZS2yibKIFQFP01av8NMvpSer/1wThrvuqcSeG8dJQnB645QykGPrirZpdmki6a +0kijBvPCIaI2gpKcrqoxMz/Q7LJdn+C5Qvif11HhAoGAfai8GYFiXuShvf+8gjOt +qIsBMV3YexcX11qPD5vVJMCW1xLbpb9f3sPf8P31TB8tz5JA3aG24k8yURtgTA4Z +2I6Jo9vwMjAdOxHTalqMllDvBj5S5+cX38kGdcxdcbAiUHwIoXy6cjcGbeO/SesR +L1bbyZA45gpsWFxFr5V67G0= +-----END PRIVATE KEY----- diff --git a/metricbeat/module/mysql/_meta/certs/server-req.pem b/metricbeat/module/mysql/_meta/certs/server-req.pem new file mode 100755 index 00000000000..035ab7e2faf --- /dev/null +++ b/metricbeat/module/mysql/_meta/certs/server-req.pem @@ -0,0 +1,16 @@ +-----BEGIN CERTIFICATE REQUEST----- +MIICkzCCAXsCAQAwTjELMAkGA1UEBhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWEx +FDASBgNVBAcMC1NhbnRhIENsYXJhMRQwEgYDVQQDDAtmYWtlLXNlcnZlcjCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMuPqkUt/Ax9s/h5LPxXU0m6OAEp +1InLbR6x//hGVgmIiQu5/Fg1VfmZYbwraXxs4JDfMUyK6bd/bk2o71I1pnLmoFmQ +vawDRxOqkA1NLpF2FJtk0eevkF1DcrC9T1SfrzlwrucqqUXowdprVXFFVbFQTXsS +yD8Nv/MGzDgmDtmMXQ8sLVqjGIEMakuPMbNCVNTVnd/53WMaDzopnam/NCJNDGp2 +RVhf+KuOWLTURXFYN6j1z+f/1BNa4QW+WtofzYkAWEcvCc8zeXUhwL6xE5gDyq1N +kQ/ejqQq+iIJLd1FUFOH1jPSgmW53CiWih2Is6VA0hCzDirdFtAHTui/OekCAwEA +AaAAMA0GCSqGSIb3DQEBCwUAA4IBAQAK3+eAfReXoGP3CQvTE/Bd6u+u5kG65stV +DONrBzhMQ4R36X+Q6q65qJ0rmvwZcUfkIauQzdNv9ZfCDT7pO1VtNT0R+H6+shz9 +JiwGOudAlFSt31Ps0+lDm6WjA6J1Nmr9N7XrsmfdW4z2n1UZSPS9mOZIj+PpUtQw +OzIwJ/+btS/RVO0cGGFkoFwhrYKilAbq+SsMxMVxPcXUP+xLFYn6FCNFbf5uBpLz +ZM7HBDh2uVfwsaptnY3v+EIELCsXsFm9uj4zG45fJmu4KARY6FAi9sEvfA1ieZuU +8hmovXhKq6eSU2fPoeurRV1gxuanuFObd39LRoCTy3fCnqTZFxXg +-----END CERTIFICATE REQUEST----- diff --git a/metricbeat/module/mysql/_meta/config.reference.yml b/metricbeat/module/mysql/_meta/config.reference.yml index 03880a5ad6a..4e5cc470aca 100644 --- a/metricbeat/module/mysql/_meta/config.reference.yml +++ b/metricbeat/module/mysql/_meta/config.reference.yml @@ -21,3 +21,15 @@ # By setting raw to true, all raw fields from the status metricset will be added to the event. #raw: false + + # Optional SSL/TLS. By default is false. + #ssl.enabled: true + + # List of root certificates for SSL/TLS server verification + #ssl.certificate_authorities: ["/etc/pki/root/ca.crt"] + + # Certificate for SSL/TLS client authentication + #ssl.certificate: "/etc/pki/client/cert.crt" + + # Client certificate key file + #ssl.key: "/etc/pki/client/cert.key" diff --git a/metricbeat/module/mysql/_meta/config.yml b/metricbeat/module/mysql/_meta/config.yml index 367b32e9173..a86258fca3b 100644 --- a/metricbeat/module/mysql/_meta/config.yml +++ b/metricbeat/module/mysql/_meta/config.yml @@ -18,3 +18,15 @@ # Password of hosts. Empty by default. #password: secret + + # Optional SSL/TLS. By default is false. + #ssl.enabled: true + + # List of root certificates for SSL/TLS server verification + #ssl.certificate_authorities: ["/etc/pki/root/ca.crt"] + + # Certificate for SSL/TLS client authentication + #ssl.certificate: "/etc/pki/client/cert.crt" + + # Client certificate key file + #ssl.key: "/etc/pki/client/cert.key" \ No newline at end of file diff --git a/metricbeat/module/mysql/_meta/test.cnf b/metricbeat/module/mysql/_meta/test.cnf index f759a49631d..24eec52dd05 100644 --- a/metricbeat/module/mysql/_meta/test.cnf +++ b/metricbeat/module/mysql/_meta/test.cnf @@ -1,2 +1,6 @@ [mysqld] bind-address = 0.0.0.0 +require_secure_transport = OFF +ssl-ca = /etc/certs/root-ca.pem +ssl-cert = /etc/certs/server-cert.pem +ssl-key = /etc/certs/server-key.pem \ No newline at end of file diff --git a/metricbeat/module/mysql/config.go b/metricbeat/module/mysql/config.go new file mode 100644 index 00000000000..96704bef479 --- /dev/null +++ b/metricbeat/module/mysql/config.go @@ -0,0 +1,32 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package mysql + +import ( + "crypto/tls" + + "github.com/elastic/elastic-agent-libs/transport/tlscommon" +) + +type Config struct { + Hosts []string `config:"hosts" validate:"required"` + Username string `config:"username"` + Password string `config:"password"` + TLS *tlscommon.Config `config:"ssl"` + TLSConfig *tls.Config +} diff --git a/metricbeat/module/mysql/docker-compose.yml b/metricbeat/module/mysql/docker-compose.yml index e112587fccd..0644d9568ad 100644 --- a/metricbeat/module/mysql/docker-compose.yml +++ b/metricbeat/module/mysql/docker-compose.yml @@ -2,10 +2,10 @@ version: '2.3' services: mysql: - image: docker.elastic.co/integrations-ci/beats-mysql:${MYSQL_VARIANT:-mysql}-${MYSQL_VERSION:-5.7.12}-1 + image: docker.elastic.co/integrations-ci/beats-mysql:${MYSQL_VARIANT:-mysql}-${MYSQL_VERSION:-8.0}-1 build: context: ./_meta args: - MYSQL_IMAGE: ${MYSQL_VARIANT:-mysql}:${MYSQL_VERSION:-5.7.12} + MYSQL_IMAGE: ${MYSQL_VARIANT:-mysql}:${MYSQL_VERSION:-8.0} ports: - 3306 diff --git a/metricbeat/module/mysql/galera_status/status.go b/metricbeat/module/mysql/galera_status/status.go index d1dc68cd0a2..6f27b8d4e8f 100644 --- a/metricbeat/module/mysql/galera_status/status.go +++ b/metricbeat/module/mysql/galera_status/status.go @@ -42,7 +42,7 @@ func init() { // MetricSet for fetching Galera-MySQL server status type MetricSet struct { - mb.BaseMetricSet + *mysql.Metricset db *sql.DB } @@ -50,7 +50,13 @@ type MetricSet struct { // Loads query_mode config setting from the config file func New(base mb.BaseMetricSet) (mb.MetricSet, error) { cfgwarn.Experimental("The galera_status metricset is experimental.") - return &MetricSet{BaseMetricSet: base}, nil + + ms, err := mysql.NewMetricset(base) + if err != nil { + return nil, err + } + + return &MetricSet{Metricset: ms, db: nil}, nil } // Fetch methods implements the data gathering and data conversion to the right format @@ -58,7 +64,7 @@ func New(base mb.BaseMetricSet) (mb.MetricSet, error) { func (m *MetricSet) Fetch(reporter mb.ReporterV2) error { if m.db == nil { var err error - m.db, err = mysql.NewDB(m.HostData().URI) + m.db, err = mysql.NewDB(m.HostData().URI, m.Metricset.Config.TLSConfig) if err != nil { return fmt.Errorf("Galera-status fetch failed: %w", err) } diff --git a/metricbeat/module/mysql/mysql.go b/metricbeat/module/mysql/mysql.go index 35388a9a1bd..23c0f8dda10 100644 --- a/metricbeat/module/mysql/mysql.go +++ b/metricbeat/module/mysql/mysql.go @@ -21,14 +21,18 @@ Package mysql is Metricbeat module for MySQL server. package mysql import ( + "crypto/tls" "database/sql" "fmt" "github.com/elastic/beats/v7/metricbeat/mb" + "github.com/elastic/elastic-agent-libs/transport/tlscommon" "github.com/go-sql-driver/mysql" ) +const TLSConfigKey = "custom" + func init() { // Register the ModuleFactory function for the "mysql" module. if err := mb.Registry.AddModule("mysql", NewModule); err != nil { @@ -38,16 +42,37 @@ func init() { func NewModule(base mb.BaseModule) (mb.Module, error) { // Validate that at least one host has been specified. - config := struct { - Hosts []string `config:"hosts" validate:"required"` - }{} - if err := base.UnpackConfig(&config); err != nil { + var c Config + if err := base.UnpackConfig(&c); err != nil { return nil, err } return &base, nil } +type Metricset struct { + mb.BaseMetricSet + Config Config +} + +func NewMetricset(base mb.BaseMetricSet) (*Metricset, error) { + var c Config + if err := base.Module().UnpackConfig(&c); err != nil { + return nil, fmt.Errorf("could not read config: %w", err) + } + + if c.TLS.IsEnabled() { + tlsConfig, err := tlscommon.LoadTLSConfig(c.TLS) + if err != nil { + return nil, fmt.Errorf("could not load provided TLS configuration: %w", err) + } + + c.TLSConfig = tlsConfig.ToConfig() + } + + return &Metricset{Config: c, BaseMetricSet: base}, nil +} + // ParseDSN creates a DSN (data source name) string by parsing the host. // It validates the resulting DSN and returns an error if the DSN is invalid. // @@ -55,9 +80,11 @@ func NewModule(base mb.BaseModule) (mb.Module, error) { // Example: root:test@tcp(127.0.0.1:3306)/ func ParseDSN(mod mb.Module, host string) (mb.HostData, error) { c := struct { - Username string `config:"username"` - Password string `config:"password"` + Username string `config:"username"` + Password string `config:"password"` + TLS *tlscommon.Config `config:"ssl"` }{} + if err := mod.UnpackConfig(&c); err != nil { return mb.HostData{}, err } @@ -86,6 +113,10 @@ func ParseDSN(mod mb.Module, host string) (mb.HostData, error) { noCredentialsConfig.User = "" noCredentialsConfig.Passwd = "" + if c.TLS.IsEnabled() { + config.TLSConfig = TLSConfigKey + } + return mb.HostData{ URI: config.FormatDSN(), SanitizedURI: noCredentialsConfig.FormatDSN(), @@ -99,10 +130,18 @@ func ParseDSN(mod mb.Module, host string) (mb.HostData, error) { // must be valid, otherwise an error will be returned. // // DSN Format: [username[:password]@][protocol[(address)]]/ -func NewDB(dsn string) (*sql.DB, error) { +func NewDB(dsn string, tlsConfig *tls.Config) (*sql.DB, error) { + if tlsConfig != nil { + err := mysql.RegisterTLSConfig(TLSConfigKey, tlsConfig) + if err != nil { + return nil, fmt.Errorf("registering custom tls config failed: %w", err) + } + } + db, err := sql.Open("mysql", dsn) if err != nil { return nil, fmt.Errorf("sql open failed: %w", err) } + return db, nil } diff --git a/metricbeat/module/mysql/mysql_integration_test.go b/metricbeat/module/mysql/mysql_integration_test.go index 5713a582149..2fc96475646 100644 --- a/metricbeat/module/mysql/mysql_integration_test.go +++ b/metricbeat/module/mysql/mysql_integration_test.go @@ -20,6 +20,9 @@ package mysql import ( + "crypto/tls" + "crypto/x509" + "os" "testing" "github.com/stretchr/testify/assert" @@ -31,9 +34,58 @@ import ( func TestNewDB(t *testing.T) { service := compose.EnsureUp(t, "mysql") - db, err := NewDB(GetMySQLEnvDSN(service.Host())) + db, err := NewDB(GetMySQLEnvDSN(service.Host()), nil) assert.NoError(t, err) err = db.Ping() assert.NoError(t, err) } + +func loadTLSConfig(caCertPath, clientCertPath, clientKeyPath string) (*tls.Config, error) { + caCert, err := os.ReadFile(caCertPath) + if err != nil { + return nil, err + } + caCertPool := x509.NewCertPool() + caCertPool.AppendCertsFromPEM(caCert) + + cert, err := tls.LoadX509KeyPair(clientCertPath, clientKeyPath) + if err != nil { + return nil, err + } + + tlsConfig := &tls.Config{ + Certificates: []tls.Certificate{cert}, + RootCAs: caCertPool, + MinVersion: tls.VersionTLS12, + } + + return tlsConfig, nil +} + +func TestNewDBWithSSL(t *testing.T) { + service := compose.EnsureUp(t, "mysql") + + tlsConfig, err := loadTLSConfig("_meta/certs/root-ca.pem", "_meta/certs/client-cert.pem", "_meta/certs/client-key.pem") + tlsConfig.InsecureSkipVerify = true + assert.NoError(t, err) + + db, err := NewDB(GetMySQLEnvDSN(service.Host())+"?tls=custom", tlsConfig) + assert.NoError(t, err) + + err = db.Ping() + assert.NoError(t, err) + + // Check if the current connection is using SSL + var sslCipher, variableName, value string + err = db.QueryRow(`show status like 'Ssl_cipher'`).Scan(&variableName, &sslCipher) + assert.NoError(t, err) + + // If sslCipher is not empty, then SSL is being used for the connection + assert.NotEmpty(t, variableName) + assert.NotEmpty(t, sslCipher) + + err = db.QueryRow(`show variables like 'have_ssl'`).Scan(&variableName, &value) + assert.NoError(t, err) + assert.Equal(t, "YES", value) +} diff --git a/metricbeat/module/mysql/query/query.go b/metricbeat/module/mysql/query/query.go index 35881d76401..d7bbaaa4cd7 100644 --- a/metricbeat/module/mysql/query/query.go +++ b/metricbeat/module/mysql/query/query.go @@ -25,13 +25,17 @@ package query import ( "context" + "crypto/tls" "fmt" + mysqlDriver "github.com/go-sql-driver/mysql" + "github.com/elastic/beats/v7/libbeat/common/cfgwarn" "github.com/elastic/beats/v7/metricbeat/helper/sql" "github.com/elastic/beats/v7/metricbeat/mb" "github.com/elastic/beats/v7/metricbeat/module/mysql" "github.com/elastic/elastic-agent-libs/mapstr" + "github.com/elastic/elastic-agent-libs/transport/tlscommon" ) func init() { @@ -57,8 +61,10 @@ type MetricSet struct { mb.BaseMetricSet db *sql.DbClient Config struct { - Queries []query `config:"queries" validate:"nonzero,required"` - Namespace string `config:"namespace" validate:"nonzero,required"` + Queries []query `config:"queries" validate:"nonzero,required"` + Namespace string `config:"namespace" validate:"nonzero,required"` + TLS *tlscommon.Config `config:"ssl"` + TLSConfig *tls.Config } } @@ -72,16 +78,31 @@ func New(base mb.BaseMetricSet) (mb.MetricSet, error) { return nil, err } + if b.Config.TLS.IsEnabled() { + tlsConfig, err := tlscommon.LoadTLSConfig(b.Config.TLS) + if err != nil { + return nil, fmt.Errorf("could not load provided TLS configuration: %w", err) + } + + b.Config.TLSConfig = tlsConfig.ToConfig() + } + return b, nil } // Fetch fetches status messages from a mysql host. func (m *MetricSet) Fetch(ctx context.Context, reporter mb.ReporterV2) error { if m.db == nil { + if m.Config.TLSConfig != nil { + err := mysqlDriver.RegisterTLSConfig(mysql.TLSConfigKey, m.Config.TLSConfig) + if err != nil { + return fmt.Errorf("registering custom tls config failed: %w", err) + } + } var err error m.db, err = sql.NewDBClient("mysql", m.HostData().URI, m.Logger()) if err != nil { - return fmt.Errorf("mysql-status fetch failed: %w", err) + return fmt.Errorf("mysql-query fetch failed: %w", err) } } diff --git a/metricbeat/module/mysql/status/status.go b/metricbeat/module/mysql/status/status.go index dd57f7e23c9..ac3e5b83a18 100644 --- a/metricbeat/module/mysql/status/status.go +++ b/metricbeat/module/mysql/status/status.go @@ -40,20 +40,25 @@ func init() { // MetricSet for fetching MySQL server status. type MetricSet struct { - mb.BaseMetricSet + *mysql.Metricset db *sql.DB } // New creates and returns a new MetricSet instance. func New(base mb.BaseMetricSet) (mb.MetricSet, error) { - return &MetricSet{BaseMetricSet: base}, nil + ms, err := mysql.NewMetricset(base) + if err != nil { + return nil, err + } + + return &MetricSet{Metricset: ms, db: nil}, nil } // Fetch fetches status messages from a mysql host. func (m *MetricSet) Fetch(reporter mb.ReporterV2) error { if m.db == nil { var err error - m.db, err = mysql.NewDB(m.HostData().URI) + m.db, err = mysql.NewDB(m.HostData().URI, m.Metricset.Config.TLSConfig) if err != nil { return fmt.Errorf("mysql-status fetch failed: %w", err) } diff --git a/metricbeat/module/rabbitmq/fields.go b/metricbeat/module/rabbitmq/fields.go index 95345f6b335..a2d590d8109 100644 --- a/metricbeat/module/rabbitmq/fields.go +++ b/metricbeat/module/rabbitmq/fields.go @@ -32,5 +32,5 @@ func init() { // AssetRabbitmq returns asset data. // This is the base64 encoded zlib format compressed contents of module/rabbitmq. func AssetRabbitmq() string { - return "eJzsW1+PG7kNf99PQezL5oDNIH3dhwLXvev1HjbINbn2oSgWtER7VGukiaixd1r0uxeUxv9n1vbuOLm28UOA2DPk70dSFElp38Kc2jsIOJmYWH2+AogmWrqD6z+nrx5+ub4C0MQqmDoa7+7g91cAAKufofK6sXQFEMgSMt3BDK8AmGI0bsZ38LdrZnt9C9dljPX1368Apoas5rsk5y04rGgHgXxiW4uk4Ju6+6YHw66kbWmL0nNcf7sSN6d26YPe+r5XaP78xYTYoAWRlKTC0sQSnHdvv/94//PPoEoMqCIFBmKFNWlABuPgvjjAo7xzpETNAahtjkcg9UrZNfvqs2+YbTDy784Pw/Y5Akg+n0rK5vFTiCVtgXyJwZ5z4gYoWoO890uNsdyEUdH3cmVmATOPGJp9GxzhOUo8bNNrmMKZ7OSVosd9ryL2K1NIkPphOq/7o+UEJ8i7o+N97zU9g5cjxhHD+34Tzklyv1JVonNk982R9VrvZi9YU001oSCraiUcvNtbYc+CeazwaUQ8FT6Zqqn6cKG1fkn6VHzTgBWNiO6hQ1ZTqAyzmVgCNv9MGQmzNnhjHEzaSPwdRA+OZj4ajN0aVtaQi1zsSZ76UGG8y+/1MhHEI2bStu7Jov02HEyNL9L8kcKCQhKaU/kkonGkYWEQAi0oMMEP7z/egg9gIsPPHwC1DsQMZrr9BEzRWAmFAEtk0IZxYkn3k6iJQjEukw90ER7ORyD3HBUf+lmcH86dM0TiM2YbUWGy2TPqUM0pPirfuFgwubHUvl8nkqyBQYSfmkR2UAVSZBa0HyvjIVspeBG6mpw2BxjGA9fJPxWbV/Gy7kwKzvPmNqaLObPDda4v8+7wWAe/MJp0Xz3zioSVqi+uSZmpIb1dPe+WOCs09CTb7oxe00f0yPitdRGfG2peXWH/jzcQugmyI/USnHhvCd15CP9aUixlrYS04W38wE1YmAXJmk57UyCOGPYrphUubKJ/1GRpoA5/Fbbt4LAWJgRZk05qK4xGobUtLEty4HxKGBSkcxrYuY2LFBzay0BdLTUwvNZ0C6agAhQ6sbIwMIFUtC3UzcQaLklLkTppAbvk81/QQC5LDyoQiie2iW+g93KoiBlnxEVH/dG4Im0DI2V/uBdhklVWmraMfG3ctVga3Rpu55sU/jiXXRWVWokIvonGzfq90cdEU0RjuQhDLenUetyneowR/MkvoWpUuRtgndq3xkEGXCJD/k1LawRMyjst+UTeqyQLyVboIjBWtRWuKUIXaE9k6Jv4BZ3lm3idWroDb73WU8Lja7jKN3FMXw1MbM4uD/beP7800IbnxTQQFYet88uj4wfDcxCpwDUqglVHf37bvsFnTWXiqCg/eOMiYIRlaTq/izpAi6HKe9bMg59Oz4c91UX0cWCnOh/pH42l9TM+MOACjZWKYmBsowvZRUfS/iuThukehH7FM1W4pho102yagp/uwdeUN79h/YGURVORHjVWfrrP3oaN+LODwvhCrPhYotOWCl+Te8QYqapjgYtZUY0FNoVL1iIWc4CLGUSzV0qcAmtMRx6iykoGjRUI9eUMI9KPGiZBGDUzYsSsepUTXxJGCdaYvtnA2qywYeXivUuoT1FxHAATzceNi6RfxB4NiKR7dOpJ9QnEW6cuQbx16jhx0T0+cVF9nPgymEgXYJ7kHqWetY+eBbLy16SBDGx0p2RgR7xSUfVlSrKKKh/aMYoygZyGC2MifsjwRO5AmbtW74gNFqmijU8XqpEekhKIAR1jGk5yZ8sSFwQTIiddixgrdf8o3crnxgTS2e8sXXbC+ByJgNVvioM2Og0COi6nUeFZwdEHehx9O91wWXfH+wTSZjsNvuqCPD0GCc8xuOMv+xPwitJITkx6IuBXTJ0PAa4uEvRqqoNXo/ZbDwcH6D8Gi26WNBHz0BJPQEbsvN4P6pdc0/CA5bunfBgryW1wKB+IQVMklQa5TkOTzo1h0nYg+zGlQXBhnKan4h++CQ7tRQM5kPJB837c5nl0ggEdjON4L5gfVjB308EWyuPovq4V+/GFxhXpoZEQfb+gIOlmeEHCEk00biYQRXsvLPZqTvELjWakIpE1Csid4oGs0aEaMW8cgErlyTEc416RSSlb3h84EakPSu5XBKzoyhIPZ6z7cXj2kHVfwLcD2G8HsP+fB7D0pGzDZnFRrIY3euBNOjUqkcEvHYXH2ujvvl1+HUgKSeROVijgvbTF4uSb0Dhn3OzmFiZNhApbCYjrf3HrlHGzW3jgWTrU+/c1mOmuP+SZMnhn2LhZAb/I96sWAQOB9SqdJnsH4rhIIbmDc3MmT6gmBHLRtqD90q3jkcv8v1gCJvgNC/4beehmYMWEWVOlO6AVPj3WwfhgYnuxwn+lACwtyHLa1De2iR64qYcv5CnvuKko8IXqs438I+qbaKzhFMtFrU4Gshqq1BTU4f2zY1VIyN37KiJle4Y3E4pLaSjfFe9S+/C74t13OU52Qi5VUNGDqSrSBiNJ6JA1klLXTWr0WxThU2kYFDoJLEucos+JfInn9XM5YE1lJGAnLTiKSx/m8sCMOOMNUAeaUlRlPvQ9cjid6slRPfyxqXL5jbrNTZZDNXd+aUnPSG8s8CYbTFMdy4HEuAfzEqfnO2fnW4gueLEhmebSQ5Bs/+i7rVOCL1/46b2GPoDx4gZXB1cxMu7LGX83Gi/thT7Lpz1MCq+WIuyAOQv5l/PN0AK+4NUfCmw4khv3GPeTZJHtDXKtZkOqQ58zwZu02aNdYsuykN7lPTSgY3FlfooHk1flQ3vexH74JOAouT+kM/4UfGms3+0aaaMQRrtTB0Bmr0wqfVINsyZ9C8Yp26S73hxRzW+hJKxTKl/dLQSOoVGxCUPTxDS6lqU8bvWw70DZlnk7IA9H1OlaTDKB4VWbY5wiMBFSdzO07hKFPIz/Ohy2BldnkVhX6aVfkD06PViPBCYU8bSpwoHkrzdAyFC+TRCepfetv9ztL3PQDDeYyjdWS76/SYtLfpDC+iZSqIyTnDnQ4I38l3plnkLuw/7RpLGDgItGZWi6dVgZdVNc/ScAAP//Lecs9w==" + return "eJzsW0tvHLnxv+tTFHSRDcgN7VWHP7B/7Wbjgwxv7E0OQTCoIWu6GbHJNos9o0mQ7x4U2fPu1mikHnuTeA4G3M2u+tWD9SL1Dh5oeQsBp1MT6y8XANFES7dw+af06P7XywsATayCaaLx7hb+7wIAYPUaaq9bSxcAgSwh0y2UeAHAFKNxJd/CXy+Z7eU1XFYxNpd/uwCYGbKabxOdd+Cwph0E8ovLRigF3zbdkx4Mu5S2qc0rz3H9dEXugZYLH/TW816i+fdnE2KLFoRSogoLEytw3r378dPd+/egKgyoIgUGYoUNaUAG4+CuOMCjvHOkhM0BqG0Zj0DqpbKr9tVvXzHbYOTfnRfD+jkCSH6fK8rq8TOIFW2BfInCnjLiBihag7z3psFYbdyo6Pu4NmXALEcM7b4Ojsg5ij9si9cyhROlk0+KHvO9SrDfmEKC1A/Ted3vLc8wgnw7Ot4PXtMTeDliHNG97zbunCj3M1UVOkd2Xx2Zr/WufMGeauspBdlVK+Lg3d4OexLMpMbHEfHU+Gjqtu7Dhdb6Benn4psFrGlEdPcdsoZCbZjN1BKw+UeKSJi5wRvjYLqMxG8henBU+mgwdntYWUMucrFHeeZDjfE2f9criSAeMZIum54o2q/DwdD4Is6fKMwpJKI5lE8jGkca5gYh0JwCE/z04dM1+AAmMrz/CKh1IGYws+0VMENjxRUCLJBBG8apJd0vREMUinEl+UhnkcP5COSeEsWHfilOd+fOGELxCbWNyDDp7Al2qB4oTpRvXSyY3FhsP6wDSebAIMSfG0R2UAVSZOa07yvjIVsxeBG6hpw2BxjGA9fRfy42r+J5zZkYnGbNbUxnM2aH61Rb5uwwaYKfG026r555RcBK1Rc3pMzMkN6unndLnBUaepS0W9Jr+ogeGr+3LuJLS+2rK+z/8gZCt0EyUq+AU+8toTsN4V8qipXslZAS3sYO3Ia5mZPs6ZSbAnHEsF8xrXBhG/1Ek6WBOvxV2Ladw1qYEmROOrGtMRqF1i5hUZED51PAoCCd00DmNi5ScGjPA3W11cDwmtM1mIIKUOhEyyKBCaSiXULTTq3hirQUqdMlYBd8/gMayEXlQQVCscS24BvovTLUxIwlcdGJPjGuSGlgpOgPd0JMosqK05aSL427FE2jW8PtbJPcHx8kq6JSKxLBt9G4st8afZJoimgsF2GoJZ1Zj/uiHpMI/ugXULeq2nWwju074yADrpAhv9PSGgGT8k5LPJHvaolCkgpdBMa6sSJr8tA52mdK6Nv4FY3l23iZWroDa73WUiLHtzCVb+OYthqY2JxcHux9f3ppoA0/FLNAVBy2zi/3jp8MP4BQBW5QEaw6+tPb9g0+a2oTR0X50RsXASMsKtPZXdgBWgx1zlmlBz+bnQ57povo40CmOh3pH4yl9RofGHCOxkpFMTC20YVk0ZG4/8akYbYHoZ9xqQrX1qNGmk1T8Msd+IZy8hvmH0hZNDXpUX3ll7tsbdiQP9kpjC9Ei5MKnbZU+IbcBGOkuokFzsuiHgtscpfMRTTmAOclRLNXSjwH1piGPESVmQwqKxDq8ylGqB9VTIIwamTEiJn1Kia+xI0SrDFts4G12WHDzMV652CfvOI4ACZ6GNcvEn8he9QhEu/RRU+snyH40qlzCL506rjgwnt8wYX1ccEXwUQ6g+SJ7lHRM/fRo0Bm/powkIGNbpQM7IhVaqq/TklWU+3DcoyiTCCn4cKYiO8zPKE7UOau2Ttig0WqaOPjmWqk+8QEYkDHmIaT3OmywjnBlMhJ1yLKSt0/SrfypTWBdLY7S5edMD4lRMD6dyWDNjoNAjpZnicKlwVHH2gyejrdyLLujvcFSMl2FnzdOXlaBgnPMbjjb/tn4BWmkZyo9JmAXzF1PgS4ukjQy6kJXo3ab90fHKD/HCy6MnEi5qEtnoCM2Hl9GOQvsablAc13q3wYK8htcCgfiEFTJJUGuU5Dm86NYbrsQPZjSoPgwjhNj8XffRsc2rM6ciDlg+Z9v83z6AQDOhjH8Z4xPqxg7oaDLZTH0X1bLfbjC60r0qKREP04pyDhZnhDwgJNNK4UiMK9FxZ79UDxK41mpCKRPQrIHeOBqNGhGjFuHIBK5ckxHONekUkhW74fOBFpDkruVzis8MoUD2es+3548pB1n8D3A9jvB7D/mwew9Khsy2Z+VqyGN3zgTTo1qpDBLxyFSWP02++XXweCQiK5ExUK+CBtsRj5KrTOGVdeXcO0jVDjUhzi8p+8dMq48hruuUyHev+6BDPbtYesqYJ3ho0rC/hVnq9aBAwE1qt0muwdiOEihWQOzs2ZrFBtCOSiXYL2C7f2R67y/2IFmOC3LPivZNHVwI4JZVunO6A1Pk6aYHwwcXm2wn/FACzNyXJK6hvdRA/cNsMX8pR33NYU+Ez12Yb+EfZtNNZw8uWiUf1AWKElPek7PZVXxpWTGarowy38cHMzMH5pKKjDm2rH6pWQ+/yV70oihzdTigtpPW+Km9Ro/FDcvM0eteOcqdaKHkxdkzYYSZyMrJHgu25no99SBnyuDINCJy5oiZOfOqEvnr9el13b1EZce7oER3Hhw4MsKIkz3gBNoBlFVeXj4SPH2KnyHNUXPrV1LtRRL3M75lA9OL+wpEvSGw28yQrT1MRqIITuwTzHOfvOKfsWojNegUiqOfe4JOs/+i7JivPlq0G9F9YHMJ5d4erg0kbGfT7l73rjua3Qp/mU7aREW1KEHTAnIf96thnawGe8JESBDUdy4x74fpYosp1K12w2QnXocyR4k8oCtAtcsmykm5xtAzoWU+ZVPBi8ah+Wp832h88Mjgr3/+k2QHK+dADQZY2UKESi3fkEILNXJhVJqdpZC30NxinbplvhHFE9XENF2KRQvrqFCBxDq2IbhuaOacgtW3ncOmPfgJKWedshD4fZ6QJNUoHhVUNknCIwEVIfNLTvkgh5bP9tZNgacZ0kxLqer/yc7NE5w3p4MKWIz5s/HFD+dqOGDOX7rOFJ8b53orudaHaa4VZU+dZqifdXaXPJCymsryKF2jiJmQOt4Mh/01fleeU+7J9NGlAIuGhUhqaXDmujroqLfwcAAP//9NY4nA==" } diff --git a/metricbeat/module/rabbitmq/queue/_meta/data.json b/metricbeat/module/rabbitmq/queue/_meta/data.json index 51f8696f24b..42992de9967 100644 --- a/metricbeat/module/rabbitmq/queue/_meta/data.json +++ b/metricbeat/module/rabbitmq/queue/_meta/data.json @@ -20,7 +20,7 @@ "consumers": { "count": 3, "utilisation": { - "pct": 0 + "pct": 0.7 } }, "disk": { @@ -68,4 +68,4 @@ "address": "127.0.0.1:55555", "type": "rabbitmq" } -} \ No newline at end of file +} diff --git a/metricbeat/module/rabbitmq/queue/_meta/fields.yml b/metricbeat/module/rabbitmq/queue/_meta/fields.yml index 93a496d5926..0376de9d367 100644 --- a/metricbeat/module/rabbitmq/queue/_meta/fields.yml +++ b/metricbeat/module/rabbitmq/queue/_meta/fields.yml @@ -45,7 +45,8 @@ description: > Number of consumers. - name: consumers.utilisation.pct - type: long + type: scaled_float + scaling_factor: 100 format: percent description: > Fraction of the time (between 0.0 and 1.0) that the queue is able to immediately deliver messages to consumers. This can be less than 1.0 if consumers are limited by network congestion or prefetch count. diff --git a/metricbeat/module/rabbitmq/queue/data.go b/metricbeat/module/rabbitmq/queue/data.go index 682abcbc194..35a4a772c0e 100644 --- a/metricbeat/module/rabbitmq/queue/data.go +++ b/metricbeat/module/rabbitmq/queue/data.go @@ -43,7 +43,7 @@ var ( "consumers": s.Object{ "count": c.Int("consumers"), "utilisation": s.Object{ - "pct": c.Int("consumer_utilisation", s.IgnoreAllErrors), + "pct": c.Float("consumer_utilisation", s.IgnoreAllErrors), }, }, "messages": s.Object{ diff --git a/metricbeat/module/rabbitmq/queue/queue_test.go b/metricbeat/module/rabbitmq/queue/queue_test.go index 4e0d08aba6f..41f0fd61b20 100644 --- a/metricbeat/module/rabbitmq/queue/queue_test.go +++ b/metricbeat/module/rabbitmq/queue/queue_test.go @@ -55,7 +55,7 @@ func TestFetchEventContents(t *testing.T) { consumers := event["consumers"].(mapstr.M) utilisation := consumers["utilisation"].(mapstr.M) assert.EqualValues(t, 3, consumers["count"]) - assert.EqualValues(t, 0.7, utilisation["pct"]) + assert.Equal(t, 0.7, utilisation["pct"]) memory := event["memory"].(mapstr.M) assert.EqualValues(t, 232720, memory["bytes"]) diff --git a/metricbeat/modules.d/mysql.yml.disabled b/metricbeat/modules.d/mysql.yml.disabled index 2913f5af8bc..27dcc1e59ea 100644 --- a/metricbeat/modules.d/mysql.yml.disabled +++ b/metricbeat/modules.d/mysql.yml.disabled @@ -21,3 +21,15 @@ # Password of hosts. Empty by default. #password: secret + + # Optional SSL/TLS. By default is false. + #ssl.enabled: true + + # List of root certificates for SSL/TLS server verification + #ssl.certificate_authorities: ["/etc/pki/root/ca.crt"] + + # Certificate for SSL/TLS client authentication + #ssl.certificate: "/etc/pki/client/cert.crt" + + # Client certificate key file + #ssl.key: "/etc/pki/client/cert.key" \ No newline at end of file diff --git a/packetbeat/config/config.go b/packetbeat/config/config.go index 7d579af635b..427df6cd117 100644 --- a/packetbeat/config/config.go +++ b/packetbeat/config/config.go @@ -144,6 +144,8 @@ type Flows struct { KeepNull bool `config:"keep_null"` // Index is used to overwrite the index where flows are published Index string `config:"index"` + // DeltaFlowReports when enabled will report flow network stats(bytes, packets) as delta values + EnableDeltaFlowReports bool `config:"enable_delta_flow_reports"` } type ProtocolCommon struct { diff --git a/packetbeat/docs/packetbeat-options.asciidoc b/packetbeat/docs/packetbeat-options.asciidoc index c48b4a1b01d..aaa598b612c 100644 --- a/packetbeat/docs/packetbeat-options.asciidoc +++ b/packetbeat/docs/packetbeat-options.asciidoc @@ -461,6 +461,12 @@ in time. Periodical reporting can be disabled by setting the value to -1. If disabled, flows are still reported once being timed out. The default value is 10s. +[float] +==== `enable_delta_flow_reports` + +Configure network.bytes and network.packets to be a delta +value instead of a cumlative sum for each flow period. The default value is false. + [float] [[packetbeat-configuration-flows-fields]] ==== `fields` diff --git a/packetbeat/flows/flows.go b/packetbeat/flows/flows.go index b7b52217529..9df019af2d0 100644 --- a/packetbeat/flows/flows.go +++ b/packetbeat/flows/flows.go @@ -71,7 +71,7 @@ func NewFlows(pub Reporter, watcher *procs.ProcessesWatcher, config *config.Flow counter := &counterReg{} - worker, err := newFlowsWorker(pub, watcher, table, counter, timeout, period) + worker, err := newFlowsWorker(pub, watcher, table, counter, timeout, period, config.EnableDeltaFlowReports) if err != nil { logp.Err("failed to configure flows processing intervals: %v", err) return nil, err diff --git a/packetbeat/flows/worker.go b/packetbeat/flows/worker.go index e3a2008a059..46f7c0ca418 100644 --- a/packetbeat/flows/worker.go +++ b/packetbeat/flows/worker.go @@ -127,7 +127,7 @@ func (w *worker) periodically(tick time.Duration, fn func() error) { // reporting will be done at flow lifetime end. // Flows are published via the pub Reporter after being enriched with process information // by watcher. -func newFlowsWorker(pub Reporter, watcher *procs.ProcessesWatcher, table *flowMetaTable, counters *counterReg, timeout, period time.Duration) (*worker, error) { +func newFlowsWorker(pub Reporter, watcher *procs.ProcessesWatcher, table *flowMetaTable, counters *counterReg, timeout, period time.Duration, enableDeltaFlowReports bool) (*worker, error) { if timeout < time.Second { return nil, ErrInvalidTimeout } @@ -161,10 +161,11 @@ func newFlowsWorker(pub Reporter, watcher *procs.ProcessesWatcher, table *flowMe defaultBatchSize := 1024 processor := &flowsProcessor{ - table: table, - watcher: watcher, - counters: counters, - timeout: timeout, + table: table, + watcher: watcher, + counters: counters, + timeout: timeout, + enableDeltaFlowReporting: enableDeltaFlowReports, } processor.spool.init(pub, defaultBatchSize) @@ -221,11 +222,12 @@ func makeWorker(processor *flowsProcessor, tick time.Duration, timeout, period i } type flowsProcessor struct { - spool spool - watcher *procs.ProcessesWatcher - table *flowMetaTable - counters *counterReg - timeout time.Duration + spool spool + watcher *procs.ProcessesWatcher + table *flowMetaTable + counters *counterReg + timeout time.Duration + enableDeltaFlowReporting bool } func (fw *flowsProcessor) execute(w *worker, checkTimeout, handleReports, lastReport bool) { @@ -281,13 +283,13 @@ func (fw *flowsProcessor) execute(w *worker, checkTimeout, handleReports, lastRe } func (fw *flowsProcessor) report(w *worker, ts time.Time, flow *biFlow, isOver bool, intNames, uintNames, floatNames []string) { - event := createEvent(fw.watcher, ts, flow, isOver, intNames, uintNames, floatNames) + event := createEvent(fw.watcher, ts, flow, isOver, intNames, uintNames, floatNames, fw.enableDeltaFlowReporting) debugf("add event: %v", event) fw.spool.publish(event) } -func createEvent(watcher *procs.ProcessesWatcher, ts time.Time, f *biFlow, isOver bool, intNames, uintNames, floatNames []string) beat.Event { +func createEvent(watcher *procs.ProcessesWatcher, ts time.Time, f *biFlow, isOver bool, intNames, uintNames, floatNames []string, enableDeltaFlowReporting bool) beat.Event { timestamp := ts event := mapstr.M{ @@ -418,7 +420,7 @@ func createEvent(watcher *procs.ProcessesWatcher, ts time.Time, f *biFlow, isOve var totalBytes, totalPackets uint64 if f.stats[0] != nil { // Source stats. - stats := encodeStats(f.stats[0], intNames, uintNames, floatNames) + stats := encodeStats(f.stats[0], intNames, uintNames, floatNames, enableDeltaFlowReporting) for k, v := range stats { switch k { case "icmpV4TypeCode": @@ -449,7 +451,7 @@ func createEvent(watcher *procs.ProcessesWatcher, ts time.Time, f *biFlow, isOve } if f.stats[1] != nil { // Destination stats. - stats := encodeStats(f.stats[1], intNames, uintNames, floatNames) + stats := encodeStats(f.stats[1], intNames, uintNames, floatNames, enableDeltaFlowReporting) for k, v := range stats { switch k { case "icmpV4TypeCode", "icmpV6TypeCode": @@ -533,7 +535,7 @@ func formatHardwareAddr(addr net.HardwareAddr) string { return string(buf) } -func encodeStats(stats *flowStats, ints, uints, floats []string) map[string]interface{} { +func encodeStats(stats *flowStats, ints, uints, floats []string, enableDeltaFlowReporting bool) map[string]interface{} { report := make(map[string]interface{}) i := 0 @@ -551,6 +553,12 @@ func encodeStats(stats *flowStats, ints, uints, floats []string) map[string]inte for m := mask; m != 0; m >>= 1 { if (m & 1) == 1 { report[uints[i]] = stats.uints[i] + if enableDeltaFlowReporting && (uints[i] == "bytes" || uints[i] == "packets") { + // If Delta Flow Reporting is enabled, reset bytes and packets at each period. + // Only the bytes and packets received during the flow period will be reported. + // This should be thread safe as it is called under the flowmetadatatable lock. + stats.uints[i] = 0 + } } i++ } diff --git a/packetbeat/flows/worker_test.go b/packetbeat/flows/worker_test.go index ef0104adc92..d6e371cad87 100644 --- a/packetbeat/flows/worker_test.go +++ b/packetbeat/flows/worker_test.go @@ -21,16 +21,17 @@ import ( "encoding/json" "flag" "os" + "reflect" "testing" "time" - "github.com/elastic/go-lookslike/isdef" - - "github.com/elastic/go-lookslike" + "gotest.tools/assert" "github.com/elastic/beats/v7/libbeat/common" "github.com/elastic/beats/v7/packetbeat/procs" "github.com/elastic/elastic-agent-libs/logp" + "github.com/elastic/go-lookslike" + "github.com/elastic/go-lookslike/isdef" ) // Use `go test -data` to update sample event files. @@ -65,7 +66,7 @@ func TestCreateEvent(t *testing.T) { } bif.stats[0] = &flowStats{uintFlags: []uint8{1, 1}, uints: []uint64{10, 1}} bif.stats[1] = &flowStats{uintFlags: []uint8{1, 1}, uints: []uint64{460, 2}} - event := createEvent(&procs.ProcessesWatcher{}, time.Now(), bif, true, nil, []string{"bytes", "packets"}, nil) + event := createEvent(&procs.ProcessesWatcher{}, time.Now(), bif, true, nil, []string{"bytes", "packets"}, nil, false) // Validate the contents of the event. validate := lookslike.MustCompile(map[string]interface{}{ @@ -116,7 +117,7 @@ func TestCreateEvent(t *testing.T) { // Write the event to disk if -data is used. if *dataFlag { - event.Fields.Put("@timestamp", common.Time(end)) //nolint:errcheck // Never fails. + event.Fields.Put("@timestamp", common.Time(end)) output, err := json.MarshalIndent(&event.Fields, "", " ") if err != nil { t.Fatal(err) @@ -126,4 +127,34 @@ func TestCreateEvent(t *testing.T) { t.Fatal(err) } } + + // when enableDeltaFlowReporting is true, the flow stats should be reset + expectbiFlow := &biFlow{ + id: id.rawFlowID, + killed: 1, + createTS: start, + ts: end, + dir: flowDirForward, + } + expectbiFlow.stats[0] = &flowStats{uintFlags: []uint8{1, 1}, uints: []uint64{0, 0}} + expectbiFlow.stats[1] = &flowStats{uintFlags: []uint8{1, 1}, uints: []uint64{0, 0}} + + // Assert the biflow is not 0 before the test + assert.Assert(t, !reflect.DeepEqual(expectbiFlow.stats[0].uints, bif.stats[0].uints)) + assert.Assert(t, !reflect.DeepEqual(expectbiFlow.stats[1].uints, bif.stats[1].uints)) + + event = createEvent(&procs.ProcessesWatcher{}, time.Now(), bif, true, nil, []string{"bytes", "packets"}, nil, true) + result = validate(event.Fields) + if errs := result.Errors(); len(errs) > 0 { + for _, err := range errs { + t.Error(err) + } + t.FailNow() + } + + // Assert the biflow is 0 after the test + assert.DeepEqual(t, expectbiFlow.stats[0].uintFlags, bif.stats[0].uintFlags) + assert.DeepEqual(t, expectbiFlow.stats[0].uints, bif.stats[0].uints) + assert.DeepEqual(t, expectbiFlow.stats[1].uintFlags, bif.stats[1].uintFlags) + assert.DeepEqual(t, expectbiFlow.stats[1].uints, bif.stats[1].uints) } diff --git a/testing/environments/snapshot.yml b/testing/environments/snapshot.yml index fd3c6007409..940d2e82782 100644 --- a/testing/environments/snapshot.yml +++ b/testing/environments/snapshot.yml @@ -3,7 +3,7 @@ version: '2.3' services: elasticsearch: - image: docker.elastic.co/elasticsearch/elasticsearch:8.14.0-74a79bf3-SNAPSHOT + image: docker.elastic.co/elasticsearch/elasticsearch:8.14.0-14d688f1-SNAPSHOT # When extend is used it merges healthcheck.tests, see: # https://github.com/docker/compose/issues/8962 # healthcheck: @@ -31,7 +31,7 @@ services: - "./docker/elasticsearch/users_roles:/usr/share/elasticsearch/config/users_roles" logstash: - image: docker.elastic.co/logstash/logstash:8.14.0-74a79bf3-SNAPSHOT + image: docker.elastic.co/logstash/logstash:8.14.0-14d688f1-SNAPSHOT healthcheck: test: ["CMD", "curl", "-f", "http://localhost:9600/_node/stats"] retries: 600 @@ -44,7 +44,7 @@ services: - 5055:5055 kibana: - image: docker.elastic.co/kibana/kibana:8.14.0-74a79bf3-SNAPSHOT + image: docker.elastic.co/kibana/kibana:8.14.0-14d688f1-SNAPSHOT environment: - "ELASTICSEARCH_USERNAME=kibana_system_user" - "ELASTICSEARCH_PASSWORD=testing" diff --git a/x-pack/filebeat/docs/inputs/input-cel.asciidoc b/x-pack/filebeat/docs/inputs/input-cel.asciidoc index 684ceb7aa18..7be120941c9 100644 --- a/x-pack/filebeat/docs/inputs/input-cel.asciidoc +++ b/x-pack/filebeat/docs/inputs/input-cel.asciidoc @@ -1,7 +1,7 @@ [role="xpack"] :type: cel -:mito_version: v1.9.0 +:mito_version: v1.10.0 :mito_docs: https://pkg.go.dev/github.com/elastic/mito@{mito_version} [id="{beatname_lc}-input-{type}"] @@ -171,7 +171,9 @@ As noted above the `cel` input provides functions, macros, and global variables * {mito_docs}/lib#Crypto[Crypto] ** {mito_docs}/lib#hdr-Base64[Base64] +** {mito_docs}/lib#hdr-Base64_Decode[Base64 Decode] ** {mito_docs}/lib#hdr-Base64_Raw[Base64 Raw] +** {mito_docs}/lib#hdr-Base64_Raw_Decode[Base64 Raw Decode] ** {mito_docs}/lib#hdr-Hex[Hex] ** {mito_docs}/lib#hdr-MD5[MD5] ** {mito_docs}/lib#hdr-SHA_1[SHA-1] diff --git a/x-pack/filebeat/docs/inputs/input-entity-analytics.asciidoc b/x-pack/filebeat/docs/inputs/input-entity-analytics.asciidoc index 86143f727bc..676925f7176 100644 --- a/x-pack/filebeat/docs/inputs/input-entity-analytics.asciidoc +++ b/x-pack/filebeat/docs/inputs/input-entity-analytics.asciidoc @@ -16,6 +16,7 @@ external identity providers. The following identity providers are supported: +- <> - <> - <> @@ -27,7 +28,7 @@ the <<{beatname_lc}-input-{type}-common-options>> described later. [float] ==== `provider` -The identity provider. Must be one of: `azure-ad` or `okta`. +The identity provider. Must be one of: `activedirectory`, `azure-ad` or `okta`. [id="{beatname_lc}-input-{type}-common-options"] include::../../../../filebeat/docs/inputs/input-common-options.asciidoc[] @@ -35,6 +36,178 @@ include::../../../../filebeat/docs/inputs/input-common-options.asciidoc[] [float] === Providers +[id="provider-activedirectory"] +==== Active Directory (`activedirectory`) + +The `activedirectory` provider allows the input to retrieve users, with group +memberships, from Active Directory. + +[float] +==== Setup + +A user with appropriate permissions must be set up in the Active Directory +Server Manager in order for the provider to function properly. + +[float] +==== How It Works + +[float] +===== Overview + +The Active Directory provider periodically queries the Active Directory server, +retrieving updates for users and groups, updates its internal cache of user and +group metadata and group membership information, and ships updated user metadata +to Elasticsearch. + +Fetching and shipping updates occurs in one of two processes: *full +synchronizations* and *incremental updates*. Full synchronizations will send the +entire list of users and group membership in state, along with write markers to indicate +the start and end of the synchronization event. Incremental updates will only +send data for changed users during that event. Changes on a user can come in many +forms, whether it be a change to the user metadata, a user was added or modified, +or group membership was changed. + +[float] +===== Sending User and Device Metadata to Elasticsearch + +During a full synchronization, all users and groups stored in state will be sent +to the output, while incremental updates will only send users and group that have been +updated. Full synchronizations will be bounded on either side by write marker +documents, which will look something like this: + +["source","json",subs="attributes"] +---- +{ + "@timestamp": "2022-11-04T09:57:19.786056-05:00", + "event": { + "action": "started", + "start": "2022-11-04T09:57:19.786056-05:00" + }, + "labels": { + "identity_source": "activedirectory-1" + } +} +---- + +User documents will show the current state of the user. + +Example user document: + +["source","json",subs="attributes"] +---- +{ + "@timestamp": "2024-02-05T06:37:40.876026-05:00", + "event": { + "action": "user-discovered", + }, + "activedirectory": { + "id": "CN=Guest,CN=Users,DC=testserver,DC=local", + "user": { + "accountExpires": "2185-07-21T23:34:33.709551516Z", + "badPasswordTime": "0", + "badPwdCount": "0", + "cn": "Guest", + "codePage": "0", + "countryCode": "0", + "dSCorePropagationData": [ + "2024-01-22T06:37:40Z", + "1601-01-01T00:00:01Z" + ], + "description": "Built-in account for guest access to the computer/domain", + "distinguishedName": "CN=Guest,CN=Users,DC=testserver,DC=local", + "instanceType": "4", + "isCriticalSystemObject": true, + "lastLogoff": "0", + "lastLogon": "2185-07-21T23:34:33.709551616Z", + "logonCount": "0", + "memberOf": "CN=Guests,CN=Builtin,DC=testserver,DC=local", + "name": "Guest", + "objectCategory": "CN=Person,CN=Schema,CN=Configuration,DC=testserver,DC=local", + "objectClass": [ + "top", + "person", + "organizationalPerson", + "user" + ], + "objectGUID": "hSt/40XJQU6cf+J2XoYMHw==", + "objectSid": "AQUAAAAAAAUVAAAA0JU2Fq1k30YZ7UPx9QEAAA==", + "primaryGroupID": "514", + "pwdLastSet": "2185-07-21T23:34:33.709551616Z", + "sAMAccountName": "Guest", + "sAMAccountType": "805306368", + "uSNChanged": "8197", + "uSNCreated": "8197", + "userAccountControl": "66082", + "whenChanged": "2024-01-22T06:36:59Z", + "whenCreated": "2024-01-22T06:36:59Z" + }, + "whenChanged": "2024-01-22T06:36:59Z" + }, + "user": { + "id": "CN=Guest,CN=Users,DC=testserver,DC=local" + }, + "labels": { + "identity_source": "activedirectory-1" + } +} +---- + +[float] +==== Configuration + +Example configuration: + +["source","yaml",subs="attributes"] +---- +{beatname_lc}.inputs: +- type: entity-analytics + enabled: true + id: activedirectory-1 + provider: activedirectory + sync_interval: "12h" + update_interval: "30m" + ad_url: "ldaps://host.domain.tld" + ad_base_dn: "CN=Users,DC=SERVER,DC=DOMAIN" + ad_user: "USERNAME" + ad_password: "PASSWORD" +---- + +The `azure-ad` provider supports the following configuration: + +[float] +===== `ad_url` + +The Active Directory server URL. Field is required. + +[float] +===== `ad_base_dn` + +The Active Directory Base Distinguished Name. Field is required. + +[float] +===== `ad_user` + +The client user name. Used for authentication. The user must have Active Directory read access. Field is required. + +[float] +===== `ad_password` + +The client's password, used for authentication. Field is required. + +[float] +===== `sync_interval` + +The interval in which full synchronizations should occur. The interval must be +longer than the update interval (`update_interval`) Expressed as a duration +string (e.g., 1m, 3h, 24h). Defaults to `24h` (24 hours). + +[float] +===== `update_interval` + +The interval in which incremental updates should occur. The interval must be +shorter than the full synchronization interval (`sync_interval`). Expressed as a +duration string (e.g., 1m, 3h, 24h). Defaults to `15m` (15 minutes). + [id="provider-azure-ad"] ==== Azure Active Directory (`azure-ad`) diff --git a/x-pack/filebeat/docs/inputs/input-etw.asciidoc b/x-pack/filebeat/docs/inputs/input-etw.asciidoc index 27dadaca2b7..c072542cf5a 100644 --- a/x-pack/filebeat/docs/inputs/input-etw.asciidoc +++ b/x-pack/filebeat/docs/inputs/input-etw.asciidoc @@ -99,7 +99,7 @@ Multiple providers example: provider.name: Microsoft-Windows-DNSServer session_name: DNSServer-Analytical trace_level: verbose - match_any_keyword: 0xfffffffffffffffff + match_any_keyword: 0xffffffffffffffff match_all_keyword: 0 - type: etw id: etw-security @@ -107,7 +107,7 @@ Multiple providers example: provider.name: Microsoft-Windows-Security-Auditing session_name: Security-Auditing trace_level: warning - match_any_keyword: 0xffffffffffffffff + match_any_keyword: 0xfffffffffffffff match_all_keyword: 0 ---- @@ -145,14 +145,14 @@ using the provider ID prefixed by 'Elastic-'. ==== `trace_level` Defines the filtering level for events based on severity. Valid options include -critical, error, warning, informational, and verbose. +critical, error, warning, information, and verbose. [float] ==== `match_any_keyword` An 8-byte bitmask used for filtering events from specific provider subcomponents based on keyword matching. Any matching keyword will enable the event to be -written. Default value is `0xfffffffffffffffff` so it matches every available +written. Default value is `0xffffffffffffffff` so it matches every available keyword. Run `logman query providers ""` to list the available keywords diff --git a/x-pack/filebeat/docs/inputs/input-http-endpoint.asciidoc b/x-pack/filebeat/docs/inputs/input-http-endpoint.asciidoc index b7a7ee06f70..a669eae489a 100644 --- a/x-pack/filebeat/docs/inputs/input-http-endpoint.asciidoc +++ b/x-pack/filebeat/docs/inputs/input-http-endpoint.asciidoc @@ -227,6 +227,11 @@ The prefix for the signature. Certain webhooks prefix the HMAC signature with a By default the input expects the incoming POST to include a Content-Type of `application/json` to try to enforce the incoming data to be valid JSON. In certain scenarios when the source of the request is not able to do that, it can be overwritten with another value or set to null. +[float] +==== `program` + +The normal operation of the input treats the body either as a single event when the body is an object, or as a set of events when the body is an array. If the body should be handled differently, for example a set of events in an array field of an object to be handled as a set of events, then a https://opensource.google.com/projects/cel[Common Expression Language (CEL)] program can be provided through this configuration field. No CEL extensions are provided beyond the function in the CEL https://github.com/google/cel-spec/blob/master/doc/langdef.md#standard[standard library]. CEL https://pkg.go.dev/github.com/google/cel-go/cel#OptionalTypes[optional types] are supported. + [float] ==== `response_code` diff --git a/x-pack/filebeat/input/awss3/input.go b/x-pack/filebeat/input/awss3/input.go index 0b33ae042f9..5fc1c1f0491 100644 --- a/x-pack/filebeat/input/awss3/input.go +++ b/x-pack/filebeat/input/awss3/input.go @@ -317,18 +317,20 @@ func (in *s3Input) createS3Lister(ctx v2.Context, cancelCtx context.Context, cli return s3Poller, nil } -var errBadQueueURL = errors.New("QueueURL is not in format: https://sqs.{REGION_ENDPOINT}.{ENDPOINT}/{ACCOUNT_NUMBER}/{QUEUE_NAME}") +var errBadQueueURL = errors.New("QueueURL is not in format: https://sqs.{REGION_ENDPOINT}.{ENDPOINT}/{ACCOUNT_NUMBER}/{QUEUE_NAME} or https://{VPC_ENDPOINT}.sqs.{REGION_ENDPOINT}.vpce.{ENDPOINT}/{ACCOUNT_NUMBER}/{QUEUE_NAME}") func getRegionFromQueueURL(queueURL string, endpoint, defaultRegion string) (region string, err error) { // get region from queueURL - // Example: https://sqs.us-east-1.amazonaws.com/627959692251/test-s3-logs + // Example for sqs queue: https://sqs.us-east-1.amazonaws.com/12345678912/test-s3-logs + // Example for vpce: https://vpce-test.sqs.us-east-1.vpce.amazonaws.com/12345678912/sqs-queue u, err := url.Parse(queueURL) if err != nil { return "", fmt.Errorf(queueURL + " is not a valid URL") } if (u.Scheme == "https" || u.Scheme == "http") && u.Host != "" { queueHostSplit := strings.SplitN(u.Host, ".", 3) - if len(queueHostSplit) == 3 { + // check for sqs queue url + if len(queueHostSplit) == 3 && queueHostSplit[0] == "sqs" { if queueHostSplit[2] == endpoint || (endpoint == "" && strings.HasPrefix(queueHostSplit[2], "amazonaws.")) { region = queueHostSplit[1] if defaultRegion != "" && region != defaultRegion { @@ -336,7 +338,21 @@ func getRegionFromQueueURL(queueURL string, endpoint, defaultRegion string) (reg } return region, nil } - } else if defaultRegion != "" { + } + + // check for vpce url + queueHostSplitVPC := strings.SplitN(u.Host, ".", 5) + if len(queueHostSplitVPC) == 5 && queueHostSplitVPC[1] == "sqs" { + if queueHostSplitVPC[4] == endpoint || (endpoint == "" && strings.HasPrefix(queueHostSplitVPC[4], "amazonaws.")) { + region = queueHostSplitVPC[2] + if defaultRegion != "" && region != defaultRegion { + return defaultRegion, regionMismatchError{queueURLRegion: region, defaultRegion: defaultRegion} + } + return region, nil + } + } + + if defaultRegion != "" { return defaultRegion, nil } } diff --git a/x-pack/filebeat/input/awss3/input_test.go b/x-pack/filebeat/input/awss3/input_test.go index 8a195eb3084..abc9f5c9a6a 100644 --- a/x-pack/filebeat/input/awss3/input_test.go +++ b/x-pack/filebeat/input/awss3/input_test.go @@ -5,7 +5,6 @@ package awss3 import ( - "errors" "testing" "github.com/stretchr/testify/assert" @@ -76,40 +75,16 @@ func TestGetRegionFromQueueURL(t *testing.T) { wantErr: errBadQueueURL, }, { - name: "abc.xyz_and_domain_with_different_endpoint", - queueURL: "https://sqs.us-east-1.abc.xyz/627959692251/test-s3-logs", - endpoint: "googlecloud.com", - wantErr: errBadQueueURL, - }, - { - name: "mismatch_regions_no_default", - queueURL: "https://sqs.us-east-1.amazonaws.com/627959692251/test-s3-logs", + name: "vpce_endpoint", + queueURL: "https://vpce-test.sqs.us-east-2.vpce.amazonaws.com/12345678912/sqs-queue", deflt: "", - want: "us-east-1", - }, - { - name: "mismatch_regions", - queueURL: "https://sqs.us-east-1.amazonaws.com/627959692251/test-s3-logs", - deflt: "ap-west-1", - want: "ap-west-1", - wantErr: regionMismatchError{queueURLRegion: "us-east-1", defaultRegion: "ap-west-1"}, - }, - { - name: "localstack", - queueURL: "http://localhost:4566/000000000000/filebeat-s3-integtest-d9clk9", - deflt: "localstack", - want: "localstack", + want: "us-east-2", }, { - name: "localstack_sns", - queueURL: "http://localhost:4566/000000000000/filebeat-s3-integtest-sns-d9clk9", - deflt: "localstack_sns", - want: "localstack_sns", - }, - { - name: "invalid_queue_url", - queueURL: ":foo", - wantErr: errors.New(":foo is not a valid URL"), + name: "vpce_endpoint_with_endpoint", + queueURL: "https://vpce-test.sqs.us-east-1.vpce.amazonaws.com/12345678912/sqs-queue", + endpoint: "amazonaws.com", + want: "us-east-1", }, } diff --git a/x-pack/filebeat/input/cel/config_auth.go b/x-pack/filebeat/input/cel/config_auth.go index d6b35d633e6..ac187f4ffa1 100644 --- a/x-pack/filebeat/input/cel/config_auth.go +++ b/x-pack/filebeat/input/cel/config_auth.go @@ -6,7 +6,6 @@ package cel import ( "context" - "crypto/x509" "encoding/json" "errors" "fmt" @@ -341,7 +340,10 @@ func (o *oAuth2Config) validateOktaProvider() error { } // jwk_pem if o.OktaJWKPEM != "" { - _, err := x509.ParsePKCS1PrivateKey([]byte(o.OktaJWKPEM)) + _, err := pemPKCS8PrivateKey([]byte(o.OktaJWKPEM)) + if err != nil { + return fmt.Errorf("okta validation error: %w", err) + } return err } // jwk_file diff --git a/x-pack/filebeat/input/cel/config_okta_auth.go b/x-pack/filebeat/input/cel/config_okta_auth.go index 74366afd3d5..0f18b12e66c 100644 --- a/x-pack/filebeat/input/cel/config_okta_auth.go +++ b/x-pack/filebeat/input/cel/config_okta_auth.go @@ -12,6 +12,7 @@ import ( "encoding/base64" "encoding/json" "encoding/pem" + "errors" "fmt" "math/big" "net/http" @@ -160,17 +161,24 @@ func (i *base64int) UnmarshalJSON(b []byte) error { } func generateOktaJWTPEM(pemdata string, cnf *oauth2.Config) (string, error) { - blk, rest := pem.Decode([]byte(pemdata)) - if rest := bytes.TrimSpace(rest); len(rest) != 0 { - return "", fmt.Errorf("PEM text has trailing data: %s", rest) - } - key, err := x509.ParsePKCS8PrivateKey(blk.Bytes) + key, err := pemPKCS8PrivateKey([]byte(pemdata)) if err != nil { return "", err } return signJWT(cnf, key) } +func pemPKCS8PrivateKey(pemdata []byte) (any, error) { + blk, rest := pem.Decode(pemdata) + if rest := bytes.TrimSpace(rest); len(rest) != 0 { + return nil, fmt.Errorf("PEM text has trailing data: %d bytes", len(rest)) + } + if blk == nil { + return nil, errors.New("no PEM data") + } + return x509.ParsePKCS8PrivateKey(blk.Bytes) +} + // signJWT creates a JWT token using required claims and sign it with the // private key. func signJWT(cnf *oauth2.Config, key any) (string, error) { @@ -182,7 +190,7 @@ func signJWT(cnf *oauth2.Config, key any) (string, error) { Expiration(now.Add(time.Hour)). Build() if err != nil { - return "", err + return "", fmt.Errorf("failed to create token: %w", err) } signedToken, err := jwt.Sign(tok, jwt.WithKey(jwa.RS256, key)) if err != nil { diff --git a/x-pack/filebeat/input/cel/config_test.go b/x-pack/filebeat/input/cel/config_test.go index 7acf74df08c..0a686df099c 100644 --- a/x-pack/filebeat/input/cel/config_test.go +++ b/x-pack/filebeat/input/cel/config_test.go @@ -539,6 +539,47 @@ var oAuth2ValidationTests = []struct { }, }, }, + { + name: "okta_successful_pem_oauth2_validation", + input: map[string]interface{}{ + "auth.oauth2": map[string]interface{}{ + "provider": "okta", + "client.id": "a_client_id", + "token_url": "localhost", + "scopes": []string{"foo"}, + "okta.jwk_pem": ` +-----BEGIN PRIVATE KEY----- +MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQCOuef3HMRhohVT +5kSoAJgV+atpDjkwTwkOq+ImnbBlv75GaApG90w8VpjXjhqN/1KJmwfyrKiquiMq +OPu+o/672Dys5rUAaWSbT7wRF1GjLDDZrM0GHRdV4DGxM/LKI8I5yE1Mx3EzV+D5 +ZLmcRc5U4oEoMwtGpr0zRZ7uUr6a28UQwcUsVIPItc1/9rERlo1WTv8dcaj4ECC3 +2Sc0y/F+9XqwJvLd4Uv6ckzP0Sv4tbDA+7jpD9MneAIUiZ4LVj2cwbBd+YRY6jXx +MkevcCSmSX60clBY1cIFkw1DYHqtdHEwAQcQHLGMoi72xRP2qrdzIPsaTKVYoHVo +WA9vADdHAgMBAAECggEAIlx7jjCsztyYyeQsL05FTzUWoWo9NnYwtgmHnshkCXsK +MiUmJEOxZO1sSqj5l6oakupyFWigCspZYPbrFNCiqVK7+NxqQzkccY/WtT6p9uDS +ufUyPwCN96zMCd952lSVlBe3FH8Hr9a+YQxw60CbFjCZ67WuR0opTsi6JKJjJSDb +TQQZ4qJR97D05I1TgfmO+VO7G/0/dDaNHnnlYz0AnOgZPSyvrU2G5cYye4842EMB +ng81xjHD+xp55JNui/xYkhmYspYhrB2KlEjkKb08OInUjBeaLEAgA1r9yOHsfV/3 +DQzDPRO9iuqx5BfJhdIqUB1aifrye+sbxt9uMBtUgQKBgQDVdfO3GYT+ZycOQG9P +QtdMn6uiSddchVCGFpk331u6M6yafCKjI/MlJDl29B+8R5sVsttwo8/qnV/xd3cn +pY14HpKAsE4l6/Ciagzoj+0NqfPEDhEzbo8CyArcd7pSxt3XxECAfZe2+xivEPHe +gFO60vSFjFtvlLRMDMOmqX3kYQKBgQCrK1DISyQTnD6/axsgh2/ESOmT7n+JRMx/ +YzA7Lxu3zGzUC8/sRDa1C41t054nf5ZXJueYLDSc4kEAPddzISuCLxFiTD2FQ75P +lHWMgsEzQObDm4GPE9cdKOjoAvtAJwbvZcjDa029CDx7aCaDzbNvdmplZ7EUrznR +55U8Wsm8pwKBgBytxTmzZwfbCgdDJvFKNKzpwuCB9TpL+v6Y6Kr2Clfg+26iAPFU +MiWqUUInGGBuamqm5g6jI5sM28gQWeTsvC4IRXyes1Eq+uCHSQax15J/Y+3SSgNT +9kjUYYkvWMwoRcPobRYWSZze7XkP2L8hFJ7EGvAaZGqAWxzgliS9HtnhAoGAONZ/ +UqMw7Zoac/Ga5mhSwrj7ZvXxP6Gqzjofj+eKqrOlB5yMhIX6LJATfH6iq7cAMxxm +Fu/G4Ll4oB3o5wACtI3wldV/MDtYfJBtoCTjBqPsfNOsZ9hMvBATlsc2qwzKjsAb +tFhzTevoOYpSD75EcSS/G8Ec2iN9bagatBnpl00CgYBVqAOFZelNfP7dj//lpk8y +EUAw7ABOq0S9wkpFWTXIVPoBQUipm3iAUqGNPmvr/9ShdZC9xeu5AwKram4caMWJ +ExRhcDP1hFM6CdmSkIYEgBKvN9N0O4Lx1ba34gk74Hm65KXxokjJHOC0plO7c7ok +LNV/bIgMHOMoxiGrwyjAhg== +-----END PRIVATE KEY----- +`, + }, + }, + }, } func TestConfigOauth2Validation(t *testing.T) { diff --git a/x-pack/filebeat/input/entityanalytics/provider/activedirectory/activedirectory.go b/x-pack/filebeat/input/entityanalytics/provider/activedirectory/activedirectory.go new file mode 100644 index 00000000000..fa8bc7325fa --- /dev/null +++ b/x-pack/filebeat/input/entityanalytics/provider/activedirectory/activedirectory.go @@ -0,0 +1,409 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +// Package activedirectory provides a user identity asset provider for Microsoft +// Active Directory. +package activedirectory + +import ( + "context" + "crypto/tls" + "errors" + "fmt" + "net" + "net/url" + "time" + + "github.com/go-ldap/ldap/v3" + + v2 "github.com/elastic/beats/v7/filebeat/input/v2" + "github.com/elastic/beats/v7/libbeat/beat" + "github.com/elastic/beats/v7/x-pack/filebeat/input/entityanalytics/internal/kvstore" + "github.com/elastic/beats/v7/x-pack/filebeat/input/entityanalytics/provider" + "github.com/elastic/beats/v7/x-pack/filebeat/input/entityanalytics/provider/activedirectory/internal/activedirectory" + "github.com/elastic/elastic-agent-libs/config" + "github.com/elastic/elastic-agent-libs/logp" + "github.com/elastic/elastic-agent-libs/mapstr" + "github.com/elastic/elastic-agent-libs/transport/httpcommon" + "github.com/elastic/elastic-agent-libs/transport/tlscommon" + "github.com/elastic/go-concert/ctxtool" +) + +func init() { + err := provider.Register(Name, New) + if err != nil { + panic(err) + } +} + +// Name of this provider. +const Name = "activedirectory" + +// FullName of this provider, including the input name. Prefer using this +// value for full context, especially if the input name isn't present in an +// adjacent log field. +const FullName = "entity-analytics-" + Name + +// adInput implements the provider.Provider interface. +type adInput struct { + *kvstore.Manager + + cfg conf + baseDN *ldap.DN + tlsConfig *tls.Config + + metrics *inputMetrics + logger *logp.Logger +} + +// New creates a new instance of an Active Directory identity provider. +func New(logger *logp.Logger) (provider.Provider, error) { + p := adInput{ + cfg: defaultConfig(), + } + p.Manager = &kvstore.Manager{ + Logger: logger, + Type: FullName, + Configure: p.configure, + } + + return &p, nil +} + +// configure configures this provider using the given configuration. +func (p *adInput) configure(cfg *config.C) (kvstore.Input, error) { + err := cfg.Unpack(&p.cfg) + if err != nil { + return nil, fmt.Errorf("unable to unpack %s input config: %w", Name, err) + } + p.baseDN, err = ldap.ParseDN(p.cfg.BaseDN) + if err != nil { + return nil, err + } + u, err := url.Parse(p.cfg.URL) + if err != nil { + return nil, err + } + if p.cfg.TLS.IsEnabled() && u.Scheme == "ldaps" { + tlsConfig, err := tlscommon.LoadTLSConfig(p.cfg.TLS) + if err != nil { + return nil, err + } + host, _, err := net.SplitHostPort(u.Host) + var addrErr *net.AddrError + switch { + case err == nil: + case errors.As(err, &addrErr): + if addrErr.Err != "missing port in address" { + return nil, err + } + host = u.Host + default: + return nil, err + } + p.tlsConfig = tlsConfig.BuildModuleClientConfig(host) + } + return p, nil +} + +// Name returns the name of this provider. +func (p *adInput) Name() string { + return FullName +} + +func (*adInput) Test(v2.TestContext) error { return nil } + +// Run will start data collection on this provider. +func (p *adInput) Run(inputCtx v2.Context, store *kvstore.Store, client beat.Client) error { + p.logger = inputCtx.Logger.With("provider", Name, "domain", p.cfg.URL) + p.metrics = newMetrics(inputCtx.ID, nil) + defer p.metrics.Close() + + lastSyncTime, _ := getLastSync(store) + syncWaitTime := time.Until(lastSyncTime.Add(p.cfg.SyncInterval)) + lastUpdateTime, _ := getLastUpdate(store) + updateWaitTime := time.Until(lastUpdateTime.Add(p.cfg.UpdateInterval)) + + syncTimer := time.NewTimer(syncWaitTime) + updateTimer := time.NewTimer(updateWaitTime) + + for { + select { + case <-inputCtx.Cancelation.Done(): + if !errors.Is(inputCtx.Cancelation.Err(), context.Canceled) { + return inputCtx.Cancelation.Err() + } + return nil + case <-syncTimer.C: + start := time.Now() + if err := p.runFullSync(inputCtx, store, client); err != nil { + p.logger.Errorw("Error running full sync", "error", err) + p.metrics.syncError.Inc() + } + p.metrics.syncTotal.Inc() + p.metrics.syncProcessingTime.Update(time.Since(start).Nanoseconds()) + + syncTimer.Reset(p.cfg.SyncInterval) + p.logger.Debugf("Next sync expected at: %v", time.Now().Add(p.cfg.SyncInterval)) + + // Reset the update timer and wait the configured interval. If the + // update timer has already fired, then drain the timer's channel + // before resetting. + if !updateTimer.Stop() { + <-updateTimer.C + } + updateTimer.Reset(p.cfg.UpdateInterval) + p.logger.Debugf("Next update expected at: %v", time.Now().Add(p.cfg.UpdateInterval)) + case <-updateTimer.C: + start := time.Now() + if err := p.runIncrementalUpdate(inputCtx, store, client); err != nil { + p.logger.Errorw("Error running incremental update", "error", err) + p.metrics.updateError.Inc() + } + p.metrics.updateTotal.Inc() + p.metrics.updateProcessingTime.Update(time.Since(start).Nanoseconds()) + updateTimer.Reset(p.cfg.UpdateInterval) + p.logger.Debugf("Next update expected at: %v", time.Now().Add(p.cfg.UpdateInterval)) + } + } +} + +// clientOption returns constructed client configuration options, including +// setting up http+unix and http+npipe transports if requested. +func clientOptions(keepalive httpcommon.WithKeepaliveSettings) []httpcommon.TransportOption { + return []httpcommon.TransportOption{ + httpcommon.WithAPMHTTPInstrumentation(), + keepalive, + } +} + +// runFullSync performs a full synchronization. It will fetch user and group +// identities from Azure Active Directory, enrich users with group memberships, +// and publishes all known users (regardless if they have been modified) to the +// given beat.Client. +func (p *adInput) runFullSync(inputCtx v2.Context, store *kvstore.Store, client beat.Client) error { + p.logger.Debugf("Running full sync...") + + p.logger.Debugf("Opening new transaction...") + state, err := newStateStore(store) + if err != nil { + return fmt.Errorf("unable to begin transaction: %w", err) + } + p.logger.Debugf("Transaction opened") + defer func() { // If commit is successful, call to this close will be no-op. + closeErr := state.close(false) + if closeErr != nil { + p.logger.Errorw("Error rolling back full sync transaction", "error", closeErr) + } + }() + + ctx := ctxtool.FromCanceller(inputCtx.Cancelation) + p.logger.Debugf("Starting fetch...") + _, err = p.doFetchUsers(ctx, state, true) + if err != nil { + return err + } + + if len(state.users) != 0 { + tracker := kvstore.NewTxTracker(ctx) + + start := time.Now() + p.publishMarker(start, start, inputCtx.ID, true, client, tracker) + for _, u := range state.users { + p.publishUser(u, state, inputCtx.ID, client, tracker) + } + + end := time.Now() + p.publishMarker(end, end, inputCtx.ID, false, client, tracker) + + tracker.Wait() + } + + if ctx.Err() != nil { + return ctx.Err() + } + + state.lastSync = time.Now() + err = state.close(true) + if err != nil { + return fmt.Errorf("unable to commit state: %w", err) + } + + return nil +} + +// runIncrementalUpdate will run an incremental update. The process is similar +// to full synchronization, except only users which have changed (newly +// discovered, modified, or deleted) will be published. +func (p *adInput) runIncrementalUpdate(inputCtx v2.Context, store *kvstore.Store, client beat.Client) error { + p.logger.Debugf("Running incremental update...") + + state, err := newStateStore(store) + if err != nil { + return fmt.Errorf("unable to begin transaction: %w", err) + } + defer func() { // If commit is successful, call to this close will be no-op. + closeErr := state.close(false) + if closeErr != nil { + p.logger.Errorw("Error rolling back incremental update transaction", "error", closeErr) + } + }() + + ctx := ctxtool.FromCanceller(inputCtx.Cancelation) + updatedUsers, err := p.doFetchUsers(ctx, state, false) + if err != nil { + return err + } + + var tracker *kvstore.TxTracker + if len(updatedUsers) != 0 || state.len() != 0 { + // Active Directory does not have a notion of deleted users + // beyond absence from the directory, so compare found users + // with users already known by the state store and if any + // are in the store but not returned in the previous fetch, + // mark them as deleted and publish the deletion. We do not + // have the time of the deletion, so use now. + if state.len() != 0 { + found := make(map[string]bool) + for _, u := range updatedUsers { + found[u.ID] = true + } + deleted := make(map[string]*User) + now := time.Now() + state.forEach(func(u *User) { + if u.State == Deleted || found[u.ID] { + return + } + // This modifies the state store's copy since u + // is a pointer held by the state store map. + u.State = Deleted + u.WhenChanged = now + deleted[u.ID] = u + }) + for _, u := range deleted { + updatedUsers = append(updatedUsers, u) + } + } + if len(updatedUsers) != 0 { + tracker = kvstore.NewTxTracker(ctx) + for _, u := range updatedUsers { + p.publishUser(u, state, inputCtx.ID, client, tracker) + } + tracker.Wait() + } + } + + if ctx.Err() != nil { + return ctx.Err() + } + + state.lastUpdate = time.Now() + if err = state.close(true); err != nil { + return fmt.Errorf("unable to commit state: %w", err) + } + + return nil +} + +// doFetchUsers handles fetching user identities from Active Directory. If +// fullSync is true, then any existing whenChanged will be ignored, forcing a +// full synchronization from Active Directory. +// Returns a set of modified users by ID. +func (p *adInput) doFetchUsers(ctx context.Context, state *stateStore, fullSync bool) ([]*User, error) { + var since time.Time + if !fullSync { + since = state.whenChanged + } + + entries, err := activedirectory.GetDetails(p.cfg.URL, p.cfg.User, p.cfg.Password, p.baseDN, since, p.cfg.PagingSize, nil, p.tlsConfig) + p.logger.Debugf("received %d users from API", len(entries)) + if err != nil { + return nil, err + } + + var ( + users []*User + whenChanged time.Time + ) + if fullSync { + for _, u := range entries { + state.storeUser(u) + if u.WhenChanged.After(whenChanged) { + whenChanged = u.WhenChanged + } + } + } else { + users = make([]*User, 0, len(entries)) + for _, u := range entries { + users = append(users, state.storeUser(u)) + if u.WhenChanged.After(whenChanged) { + whenChanged = u.WhenChanged + } + } + p.logger.Debugf("processed %d users from API", len(users)) + } + if whenChanged.After(state.whenChanged) { + state.whenChanged = whenChanged + } + + return users, nil +} + +// publishMarker will publish a write marker document using the given beat.Client. +// If start is true, then it will be a start marker, otherwise an end marker. +func (p *adInput) publishMarker(ts, eventTime time.Time, inputID string, start bool, client beat.Client, tracker *kvstore.TxTracker) { + fields := mapstr.M{} + _, _ = fields.Put("labels.identity_source", inputID) + + if start { + _, _ = fields.Put("event.action", "started") + _, _ = fields.Put("event.start", eventTime) + } else { + _, _ = fields.Put("event.action", "completed") + _, _ = fields.Put("event.end", eventTime) + } + + event := beat.Event{ + Timestamp: ts, + Fields: fields, + Private: tracker, + } + tracker.Add() + if start { + p.logger.Debug("Publishing start write marker") + } else { + p.logger.Debug("Publishing end write marker") + } + + client.Publish(event) +} + +// publishUser will publish a user document using the given beat.Client. +func (p *adInput) publishUser(u *User, state *stateStore, inputID string, client beat.Client, tracker *kvstore.TxTracker) { + userDoc := mapstr.M{} + + _, _ = userDoc.Put("activedirectory", u.User) + _, _ = userDoc.Put("labels.identity_source", inputID) + _, _ = userDoc.Put("user.id", u.ID) + + switch u.State { + case Deleted: + _, _ = userDoc.Put("event.action", "user-deleted") + case Discovered: + _, _ = userDoc.Put("event.action", "user-discovered") + case Modified: + _, _ = userDoc.Put("event.action", "user-modified") + } + + event := beat.Event{ + Timestamp: time.Now(), + Fields: userDoc, + Private: tracker, + } + tracker.Add() + + p.logger.Debugf("Publishing user %q", u.ID) + + client.Publish(event) +} diff --git a/x-pack/filebeat/input/entityanalytics/provider/activedirectory/activedirectory_test.go b/x-pack/filebeat/input/entityanalytics/provider/activedirectory/activedirectory_test.go new file mode 100644 index 00000000000..e9a7573bb00 --- /dev/null +++ b/x-pack/filebeat/input/entityanalytics/provider/activedirectory/activedirectory_test.go @@ -0,0 +1,142 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package activedirectory + +import ( + "context" + "encoding/json" + "flag" + "os" + "sort" + "testing" + "time" + + "github.com/go-ldap/ldap/v3" + + "github.com/elastic/elastic-agent-libs/logp" +) + +var logResponses = flag.Bool("log_response", false, "use to log users/groups returned from the API") + +func TestActiveDirectoryDoFetch(t *testing.T) { + url, ok := os.LookupEnv("AD_URL") + if !ok { + t.Skip("activedirectory tests require ${AD_URL} to be set") + } + baseDN, ok := os.LookupEnv("AD_BASE") + if !ok { + t.Skip("activedirectory tests require ${AD_BASE} to be set") + } + user, ok := os.LookupEnv("AD_USER") + if !ok { + t.Skip("activedirectory tests require ${AD_USER} to be set") + } + pass, ok := os.LookupEnv("AD_PASS") + if !ok { + t.Skip("activedirectory tests require ${AD_PASS} to be set") + } + + base, err := ldap.ParseDN(baseDN) + if err != nil { + t.Fatalf("invalid base distinguished name: %v", err) + } + + const dbFilename = "TestActiveDirectoryDoFetch.db" + store := testSetupStore(t, dbFilename) + t.Cleanup(func() { + testCleanupStore(store, dbFilename) + }) + a := adInput{ + cfg: conf{ + BaseDN: baseDN, + URL: url, + User: user, + Password: pass, + }, + baseDN: base, + logger: logp.L(), + } + + ss, err := newStateStore(store) + if err != nil { + t.Fatalf("unexpected error making state store: %v", err) + } + defer ss.close(false) + + ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second) + defer cancel() + + var times []time.Time + t.Run("full", func(t *testing.T) { + ss.whenChanged = time.Time{} // Reach back to the start of time. + + users, err := a.doFetchUsers(ctx, ss, false) // We are lying about fullSync since we are not getting users via the store. + if err != nil { + t.Fatalf("unexpected error from doFetch: %v", err) + } + + if len(users) == 0 { + t.Error("expected non-empty result from query") + } + found := false + var gotUsers []string + for _, e := range users { + gotUsers = append(gotUsers, e.ID) + if e.ID == user { + found = true + } + + times = append(times, e.WhenChanged) + } + if !found { + t.Errorf("expected login user to be found in directory: got:%q", gotUsers) + } + + if !*logResponses { + return + } + b, err := json.MarshalIndent(users, "", "\t") + if err != nil { + t.Errorf("failed to marshal users for logging: %v", err) + } + t.Logf("user: %s", b) + }) + if len(times) == 0 { + t.Fatal("no entries found") + } + + // Find the time of the first changed entry for later. + sort.Slice(times, func(i, j int) bool { return times[i].Before(times[j]) }) + since := times[0].Add(time.Second) // Step past first entry by a small amount within LDAP resolution. + var want int + // ... and count all entries since then. + for _, when := range times[1:] { + if !since.After(when) { + want++ + } + } + + t.Run("update", func(t *testing.T) { + ss.whenChanged = since // Reach back until after the first entry. + + users, err := a.doFetchUsers(ctx, ss, false) + if err != nil { + t.Fatalf("unexpected error from doFetchUsers: %v", err) + } + + if len(users) != want { + t.Errorf("unexpected number of results from query since %v: got:%d want:%d", since, len(users), want) + } + + if !*logResponses && !t.Failed() { + return + } + b, err := json.MarshalIndent(users, "", "\t") + if err != nil { + t.Errorf("failed to marshal users for logging: %v", err) + } + t.Logf("user: %s", b) + }) +} diff --git a/x-pack/filebeat/input/entityanalytics/provider/activedirectory/conf.go b/x-pack/filebeat/input/entityanalytics/provider/activedirectory/conf.go new file mode 100644 index 00000000000..7dab7f5e456 --- /dev/null +++ b/x-pack/filebeat/input/entityanalytics/provider/activedirectory/conf.go @@ -0,0 +1,89 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package activedirectory + +import ( + "errors" + "net" + "net/url" + "time" + + "github.com/go-ldap/ldap/v3" + + "github.com/elastic/elastic-agent-libs/transport/tlscommon" +) + +// defaultConfig returns a default configuration. +func defaultConfig() conf { + return conf{ + SyncInterval: 24 * time.Hour, + UpdateInterval: 15 * time.Minute, + } +} + +// conf contains parameters needed to configure the input. +type conf struct { + BaseDN string `config:"ad_base_dn" validate:"required"` + + URL string `config:"ad_url" validate:"required"` + User string `config:"ad_user" validate:"required"` + Password string `config:"ad_password" validate:"required"` + + PagingSize uint32 `config:"ad_paging_size"` + + // SyncInterval is the time between full + // synchronisation operations. + SyncInterval time.Duration `config:"sync_interval"` + // UpdateInterval is the time between + // incremental updated. + UpdateInterval time.Duration `config:"update_interval"` + + // TLS provides ssl/tls setup settings + TLS *tlscommon.Config `config:"ssl" yaml:"ssl,omitempty" json:"ssl,omitempty"` +} + +var ( + errInvalidSyncInterval = errors.New("zero or negative sync_interval") + errInvalidUpdateInterval = errors.New("zero or negative update_interval") + errSyncBeforeUpdate = errors.New("sync_interval not longer than update_interval") +) + +// Validate runs validation against the config. +func (c *conf) Validate() error { + switch { + case c.SyncInterval <= 0: + return errInvalidSyncInterval + case c.UpdateInterval <= 0: + return errInvalidUpdateInterval + case c.SyncInterval <= c.UpdateInterval: + return errSyncBeforeUpdate + } + _, err := ldap.ParseDN(c.BaseDN) + if err != nil { + return err + } + u, err := url.Parse(c.URL) + if err != nil { + return err + } + if c.TLS.IsEnabled() && u.Scheme == "ldaps" { + _, err := tlscommon.LoadTLSConfig(c.TLS) + if err != nil { + return err + } + _, _, err = net.SplitHostPort(u.Host) + var addrErr *net.AddrError + switch { + case err == nil: + case errors.As(err, &addrErr): + if addrErr.Err != "missing port in address" { + return err + } + default: + return err + } + } + return nil +} diff --git a/x-pack/filebeat/input/entityanalytics/provider/activedirectory/conf_test.go b/x-pack/filebeat/input/entityanalytics/provider/activedirectory/conf_test.go new file mode 100644 index 00000000000..c518c122635 --- /dev/null +++ b/x-pack/filebeat/input/entityanalytics/provider/activedirectory/conf_test.go @@ -0,0 +1,57 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package activedirectory + +import ( + "testing" + "time" +) + +var validateTests = []struct { + name string + cfg conf + wantErr error +}{ + { + name: "default", + cfg: defaultConfig(), + wantErr: nil, + }, + { + name: "invalid_sync_interval", + cfg: conf{ + SyncInterval: 0, + UpdateInterval: time.Second * 2, + }, + wantErr: errInvalidSyncInterval, + }, + { + name: "invalid_update_interval", + cfg: conf{ + SyncInterval: time.Second, + UpdateInterval: 0, + }, + wantErr: errInvalidUpdateInterval, + }, + { + name: "invalid_relative_intervals", + cfg: conf{ + SyncInterval: time.Second, + UpdateInterval: time.Second * 2, + }, + wantErr: errSyncBeforeUpdate, + }, +} + +func TestConfValidate(t *testing.T) { + for _, test := range validateTests { + t.Run(test.name, func(t *testing.T) { + err := test.cfg.Validate() + if err != test.wantErr { + t.Errorf("unexpected error: got:%v want:%v", err, test.wantErr) + } + }) + } +} diff --git a/x-pack/filebeat/input/entityanalytics/provider/activedirectory/metrics.go b/x-pack/filebeat/input/entityanalytics/provider/activedirectory/metrics.go new file mode 100644 index 00000000000..070deab2886 --- /dev/null +++ b/x-pack/filebeat/input/entityanalytics/provider/activedirectory/metrics.go @@ -0,0 +1,50 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package activedirectory + +import ( + "github.com/rcrowley/go-metrics" + + "github.com/elastic/beats/v7/libbeat/monitoring/inputmon" + "github.com/elastic/elastic-agent-libs/monitoring" + "github.com/elastic/elastic-agent-libs/monitoring/adapter" +) + +// inputMetrics defines metrics for this provider. +type inputMetrics struct { + unregister func() + + syncTotal *monitoring.Uint // The total number of full synchronizations. + syncError *monitoring.Uint // The number of full synchronizations that failed due to an error. + syncProcessingTime metrics.Sample // Histogram of the elapsed full synchronization times in nanoseconds (time of API contact to items sent to output). + updateTotal *monitoring.Uint // The total number of incremental updates. + updateError *monitoring.Uint // The number of incremental updates that failed due to an error. + updateProcessingTime metrics.Sample // Histogram of the elapsed incremental update times in nanoseconds (time of API contact to items sent to output). +} + +// Close removes metrics from the registry. +func (m *inputMetrics) Close() { + m.unregister() +} + +// newMetrics creates a new instance for gathering metrics. +func newMetrics(id string, optionalParent *monitoring.Registry) *inputMetrics { + reg, unreg := inputmon.NewInputRegistry(FullName, id, optionalParent) + + out := inputMetrics{ + unregister: unreg, + syncTotal: monitoring.NewUint(reg, "sync_total"), + syncError: monitoring.NewUint(reg, "sync_error"), + syncProcessingTime: metrics.NewUniformSample(1024), + updateTotal: monitoring.NewUint(reg, "update_total"), + updateError: monitoring.NewUint(reg, "update_error"), + updateProcessingTime: metrics.NewUniformSample(1024), + } + + adapter.NewGoMetrics(reg, "sync_processing_time", adapter.Accept).Register("histogram", metrics.NewHistogram(out.syncProcessingTime)) //nolint:errcheck // A unique namespace is used so name collisions are impossible. + adapter.NewGoMetrics(reg, "update_processing_time", adapter.Accept).Register("histogram", metrics.NewHistogram(out.updateProcessingTime)) //nolint:errcheck // A unique namespace is used so name collisions are impossible. + + return &out +} diff --git a/x-pack/filebeat/input/entityanalytics/provider/activedirectory/state_string.go b/x-pack/filebeat/input/entityanalytics/provider/activedirectory/state_string.go new file mode 100644 index 00000000000..2d0c77582fa --- /dev/null +++ b/x-pack/filebeat/input/entityanalytics/provider/activedirectory/state_string.go @@ -0,0 +1,30 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +// Code generated by "stringer -type State"; DO NOT EDIT. + +package activedirectory + +import "strconv" + +func _() { + // An "invalid array index" compiler error signifies that the constant values have changed. + // Re-run the stringer command to generate them again. + var x [1]struct{} + _ = x[Discovered-1] + _ = x[Modified-2] + _ = x[Deleted-3] +} + +const _State_name = "DiscoveredModifiedDeleted" + +var _State_index = [...]uint8{0, 10, 18, 25} + +func (i State) String() string { + i -= 1 + if i < 0 || i >= State(len(_State_index)-1) { + return "State(" + strconv.FormatInt(int64(i+1), 10) + ")" + } + return _State_name[_State_index[i]:_State_index[i+1]] +} diff --git a/x-pack/filebeat/input/entityanalytics/provider/activedirectory/statestore.go b/x-pack/filebeat/input/entityanalytics/provider/activedirectory/statestore.go new file mode 100644 index 00000000000..74486ebaac6 --- /dev/null +++ b/x-pack/filebeat/input/entityanalytics/provider/activedirectory/statestore.go @@ -0,0 +1,208 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package activedirectory + +import ( + "encoding/json" + "errors" + "fmt" + "time" + + "github.com/elastic/beats/v7/x-pack/filebeat/input/entityanalytics/internal/kvstore" + "github.com/elastic/beats/v7/x-pack/filebeat/input/entityanalytics/provider/activedirectory/internal/activedirectory" +) + +var ( + usersBucket = []byte("users") + stateBucket = []byte("state") + + whenChangedKey = []byte("when_changed") + lastSyncKey = []byte("last_sync") + lastUpdateKey = []byte("last_update") +) + +//go:generate stringer -type State +//go:generate go-licenser -license Elastic +type State int + +const ( + Discovered State = iota + 1 + Modified + Deleted +) + +type User struct { + activedirectory.Entry `json:"activedirectory"` + State State `json:"state"` +} + +// stateStore wraps a kvstore.Transaction and provides convenience methods for +// accessing and store relevant data within the kvstore database. +type stateStore struct { + tx *kvstore.Transaction + + // whenChanged is the last whenChanged time in the set of + // users and their associated groups. + whenChanged time.Time + + // lastSync and lastUpdate are the times of the first update + // or sync operation of users/groups. + lastSync time.Time + lastUpdate time.Time + users map[string]*User +} + +// newStateStore creates a new instance of stateStore. It will open a new write +// transaction on the kvstore and load values from the database. Since this +// opens a write transaction, only one instance of stateStore may be created +// at a time. The close function must be called to release the transaction lock +// on the kvstore database. +func newStateStore(store *kvstore.Store) (*stateStore, error) { + tx, err := store.BeginTx(true) + if err != nil { + return nil, fmt.Errorf("unable to open state store transaction: %w", err) + } + + s := stateStore{ + users: make(map[string]*User), + tx: tx, + } + + err = s.tx.Get(stateBucket, lastSyncKey, &s.lastSync) + if err != nil && !errIsItemNotFound(err) { + return nil, fmt.Errorf("unable to get last sync time from state: %w", err) + } + err = s.tx.Get(stateBucket, lastUpdateKey, &s.lastUpdate) + if err != nil && !errIsItemNotFound(err) { + return nil, fmt.Errorf("unable to get last update time from state: %w", err) + } + err = s.tx.Get(stateBucket, whenChangedKey, &s.whenChanged) + if err != nil && !errIsItemNotFound(err) { + return nil, fmt.Errorf("unable to get last change time from state: %w", err) + } + + err = s.tx.ForEach(usersBucket, func(key, value []byte) error { + var u User + err = json.Unmarshal(value, &u) + if err != nil { + return fmt.Errorf("unable to unmarshal user from state: %w", err) + } + s.users[u.ID] = &u + + return nil + }) + if err != nil && !errIsItemNotFound(err) { + return nil, fmt.Errorf("unable to get users from state: %w", err) + } + + return &s, nil +} + +// storeUser stores a user. If the user does not exist in the store, then the +// user will be marked as discovered. Otherwise, the user will be marked +// as modified. +func (s *stateStore) storeUser(u activedirectory.Entry) *User { + su := User{Entry: u} + if existing, ok := s.users[u.ID]; ok { + su.State = Modified + *existing = su + } else { + su.State = Discovered + s.users[u.ID] = &su + } + return &su +} + +// len returns the number of user entries in the state store. +func (s *stateStore) len() int { + return len(s.users) +} + +// forEach iterates over all users in the state store. Changes to the +// User's fields will be reflected in the state store. +func (s *stateStore) forEach(fn func(*User)) { + for _, u := range s.users { + fn(u) + } +} + +// close will close out the stateStore. If commit is true, the staged values on the +// stateStore will be set in the kvstore database, and the transaction will be +// committed. Otherwise, all changes will be discarded and the transaction will +// be rolled back. The stateStore must NOT be used after close is called, rather, +// a new stateStore should be created. +func (s *stateStore) close(commit bool) (err error) { + if !commit { + return s.tx.Rollback() + } + + // Fallback in case one of the statements below fails. If everything is + // successful and Commit is called, then this call to Rollback will be a no-op. + defer func() { + if err == nil { + return + } + rollbackErr := s.tx.Rollback() + if rollbackErr == nil { + err = fmt.Errorf("multiple errors during statestore close: %w", errors.Join(err, rollbackErr)) + } + }() + + if !s.lastSync.IsZero() { + err = s.tx.Set(stateBucket, lastSyncKey, &s.lastSync) + if err != nil { + return fmt.Errorf("unable to save last sync time to state: %w", err) + } + } + if !s.lastUpdate.IsZero() { + err = s.tx.Set(stateBucket, lastUpdateKey, &s.lastUpdate) + if err != nil { + return fmt.Errorf("unable to save last update time to state: %w", err) + } + } + if !s.whenChanged.IsZero() { + err = s.tx.Set(stateBucket, whenChangedKey, &s.whenChanged) + if err != nil { + return fmt.Errorf("unable to save last change time to state: %w", err) + } + } + + for key, value := range s.users { + err = s.tx.Set(usersBucket, []byte(key), value) + if err != nil { + return fmt.Errorf("unable to save user %q to state: %w", key, err) + } + } + + return s.tx.Commit() +} + +// getLastSync retrieves the last full synchronization time from the kvstore +// database. If the value doesn't exist, a zero time.Time is returned. +func getLastSync(store *kvstore.Store) (time.Time, error) { + var t time.Time + err := store.RunTransaction(false, func(tx *kvstore.Transaction) error { + return tx.Get(stateBucket, lastSyncKey, &t) + }) + + return t, err +} + +// getLastUpdate retrieves the last incremental update time from the kvstore +// database. If the value doesn't exist, a zero time.Time is returned. +func getLastUpdate(store *kvstore.Store) (time.Time, error) { + var t time.Time + err := store.RunTransaction(false, func(tx *kvstore.Transaction) error { + return tx.Get(stateBucket, lastUpdateKey, &t) + }) + + return t, err +} + +// errIsItemNotFound returns true if the error represents an item not found +// error (bucket not found or key not found). +func errIsItemNotFound(err error) bool { + return errors.Is(err, kvstore.ErrBucketNotFound) || errors.Is(err, kvstore.ErrKeyNotFound) +} diff --git a/x-pack/filebeat/input/entityanalytics/provider/activedirectory/statestore_test.go b/x-pack/filebeat/input/entityanalytics/provider/activedirectory/statestore_test.go new file mode 100644 index 00000000000..747acacd8e3 --- /dev/null +++ b/x-pack/filebeat/input/entityanalytics/provider/activedirectory/statestore_test.go @@ -0,0 +1,246 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package activedirectory + +import ( + "bytes" + "encoding/json" + "errors" + "os" + "testing" + "time" + + "github.com/google/go-cmp/cmp" + + "github.com/elastic/beats/v7/x-pack/filebeat/input/entityanalytics/internal/kvstore" + "github.com/elastic/beats/v7/x-pack/filebeat/input/entityanalytics/provider/activedirectory/internal/activedirectory" + "github.com/elastic/elastic-agent-libs/logp" +) + +func TestStateStore(t *testing.T) { + lastSync, err := time.Parse(time.RFC3339Nano, "2023-01-12T08:47:23.296794-05:00") + if err != nil { + t.Fatalf("failed to parse lastSync") + } + lastUpdate, err := time.Parse(time.RFC3339Nano, "2023-01-12T08:50:04.546457-05:00") + if err != nil { + t.Fatalf("failed to parse lastUpdate") + } + + t.Run("new", func(t *testing.T) { + dbFilename := "TestStateStore_New.db" + store := testSetupStore(t, dbFilename) + t.Cleanup(func() { + testCleanupStore(store, dbFilename) + }) + + // Inject test values into store. + data := []struct { + key []byte + val any + }{ + {key: lastSyncKey, val: lastSync}, + {key: lastUpdateKey, val: lastUpdate}, + } + for _, kv := range data { + err := store.RunTransaction(true, func(tx *kvstore.Transaction) error { + return tx.Set(stateBucket, kv.key, kv.val) + }) + if err != nil { + t.Fatalf("failed to set %s: %v", kv.key, err) + } + } + + ss, err := newStateStore(store) + if err != nil { + t.Fatalf("failed to make new store: %v", err) + } + defer ss.close(false) + + checks := []struct { + name string + got, want any + }{ + {name: "lastSync", got: ss.lastSync, want: lastSync}, + {name: "lastUpdate", got: ss.lastUpdate, want: lastUpdate}, + } + for _, c := range checks { + if !cmp.Equal(c.got, c.want) { + t.Errorf("unexpected results for %s: got:%#v want:%#v", c.name, c.got, c.want) + } + } + }) + + t.Run("close", func(t *testing.T) { + dbFilename := "TestStateStore_Close.db" + store := testSetupStore(t, dbFilename) + t.Cleanup(func() { + testCleanupStore(store, dbFilename) + }) + + wantUsers := map[string]*User{ + "userid": { + State: Discovered, + Entry: activedirectory.Entry{ + ID: "userid", + }, + }, + } + + ss, err := newStateStore(store) + if err != nil { + t.Fatalf("failed to make new store: %v", err) + } + ss.lastSync = lastSync + ss.lastUpdate = lastUpdate + ss.users = wantUsers + + err = ss.close(true) + if err != nil { + t.Fatalf("unexpected error closing: %v", err) + } + + roundTripChecks := []struct { + name string + key []byte + val any + }{ + {name: "lastSyncKey", key: lastSyncKey, val: &ss.lastSync}, + {name: "lastUpdateKey", key: lastUpdateKey, val: &ss.lastUpdate}, + } + for _, check := range roundTripChecks { + want, err := json.Marshal(check.val) + if err != nil { + t.Errorf("unexpected error marshaling %s: %v", check.name, err) + } + var got []byte + err = store.RunTransaction(false, func(tx *kvstore.Transaction) error { + got, err = tx.GetBytes(stateBucket, check.key) + return err + }) + if err != nil { + t.Errorf("unexpected error from store run transaction %s: %v", check.name, err) + } + if !bytes.Equal(got, want) { + t.Errorf("unexpected result after store round-trip for %s: got:%s want:%s", check.name, got, want) + } + } + + users := map[string]*User{} + err = store.RunTransaction(false, func(tx *kvstore.Transaction) error { + return tx.ForEach(usersBucket, func(key, value []byte) error { + var u User + err = json.Unmarshal(value, &u) + if err != nil { + return err + } + users[u.ID] = &u + return nil + }) + }) + if err != nil { + t.Errorf("unexpected error from store run transaction: %v", err) + } + if !cmp.Equal(wantUsers, users) { + t.Errorf("unexpected result:\n- want\n+ got\n%s", cmp.Diff(wantUsers, users)) + } + }) + + t.Run("get_last_sync", func(t *testing.T) { + dbFilename := "TestGetLastSync.db" + store := testSetupStore(t, dbFilename) + t.Cleanup(func() { + testCleanupStore(store, dbFilename) + }) + + err := store.RunTransaction(true, func(tx *kvstore.Transaction) error { + return tx.Set(stateBucket, lastSyncKey, lastSync) + }) + if err != nil { + t.Fatalf("failed to set value: %v", err) + } + + got, err := getLastSync(store) + if err != nil { + t.Errorf("unexpected error from getLastSync: %v", err) + } + if !lastSync.Equal(got) { + t.Errorf("unexpected result from getLastSync: got:%v want:%v", got, lastSync) + } + }) + + t.Run("get_last_update", func(t *testing.T) { + dbFilename := "TestGetLastUpdate.db" + store := testSetupStore(t, dbFilename) + t.Cleanup(func() { + testCleanupStore(store, dbFilename) + }) + + err := store.RunTransaction(true, func(tx *kvstore.Transaction) error { + return tx.Set(stateBucket, lastUpdateKey, lastUpdate) + }) + if err != nil { + t.Fatalf("failed to set value: %v", err) + } + + got, err := getLastUpdate(store) + if err != nil { + t.Errorf("unexpected error from getLastUpdate: %v", err) + } + if !lastUpdate.Equal(got) { + t.Errorf("unexpected result from getLastUpdate: got:%v want:%v", got, lastUpdate) + } + }) +} + +func TestErrIsItemFound(t *testing.T) { + tests := []struct { + name string + err error + want bool + }{ + { + name: "bucket-not-found", + err: kvstore.ErrBucketNotFound, + want: true, + }, + { + name: "key-not-found", + err: kvstore.ErrKeyNotFound, + want: true, + }, + { + name: "invalid error", + err: errors.New("test error"), + want: false, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + got := errIsItemNotFound(test.err) + if got != test.want { + t.Errorf("unexpected result for %s: got:%t want:%t", test.name, got, test.want) + } + }) + } +} + +func ptr[T any](v T) *T { return &v } + +func testSetupStore(t *testing.T, path string) *kvstore.Store { + t.Helper() + + store, err := kvstore.NewStore(logp.L(), path, 0644) + if err != nil { + t.Fatalf("unexpected error making store: %v", err) + } + return store +} + +func testCleanupStore(store *kvstore.Store, path string) { + _ = store.Close() + _ = os.Remove(path) +} diff --git a/x-pack/filebeat/input/etw/config.go b/x-pack/filebeat/input/etw/config.go index 2f3925884f3..4df10e268d3 100644 --- a/x-pack/filebeat/input/etw/config.go +++ b/x-pack/filebeat/input/etw/config.go @@ -35,7 +35,7 @@ type config struct { SessionName string `config:"session_name"` // TraceLevel filters all provider events with a level value // that is less than or equal to this level. - // Allowed values are critical, error, warning, informational, and verbose. + // Allowed values are critical, error, warning, information, and verbose. TraceLevel string `config:"trace_level"` // MatchAnyKeyword is an 8-byte bitmask that enables the filtering of // events from specific provider subcomponents. The provider will write diff --git a/x-pack/filebeat/input/etw/input.go b/x-pack/filebeat/input/etw/input.go index 021805ebdfa..f030ada04e0 100644 --- a/x-pack/filebeat/input/etw/input.go +++ b/x-pack/filebeat/input/etw/input.go @@ -182,7 +182,7 @@ var ( // buildEvent builds the final beat.Event emitted by this input. func buildEvent(data map[string]any, h etw.EventHeader, session *etw.Session, cfg config) beat.Event { winlog := map[string]any{ - "activity_guid": h.ActivityId.String(), + "activity_id": h.ActivityId.String(), "channel": strconv.FormatUint(uint64(h.EventDescriptor.Channel), 10), "event_data": data, "flags": strconv.FormatUint(uint64(h.Flags), 10), diff --git a/x-pack/filebeat/input/etw/input_test.go b/x-pack/filebeat/input/etw/input_test.go index 95c9167a696..a55d22c7b70 100644 --- a/x-pack/filebeat/input/etw/input_test.go +++ b/x-pack/filebeat/input/etw/input_test.go @@ -367,8 +367,8 @@ func Test_buildEvent(t *testing.T) { expected: mapstr.M{ "winlog": map[string]any{ - "activity_guid": "{12345678-1234-1234-1234-123456789ABC}", - "channel": "10", + "activity_id": "{12345678-1234-1234-1234-123456789ABC}", + "channel": "10", "event_data": map[string]any{ "key": "value", }, @@ -435,8 +435,8 @@ func Test_buildEvent(t *testing.T) { expected: mapstr.M{ "winlog": map[string]any{ - "activity_guid": "{12345678-1234-1234-1234-123456789ABC}", - "channel": "10", + "activity_id": "{12345678-1234-1234-1234-123456789ABC}", + "channel": "10", "event_data": map[string]any{ "key": "value", }, @@ -461,7 +461,7 @@ func Test_buildEvent(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { evt := buildEvent(tt.data, tt.header, tt.session, tt.cfg) - assert.Equal(t, tt.expected["winlog"].(map[string]any)["activity_guid"], evt.Fields["winlog"].(map[string]any)["activity_guid"]) + assert.Equal(t, tt.expected["winlog"].(map[string]any)["activity_id"], evt.Fields["winlog"].(map[string]any)["activity_id"]) assert.Equal(t, tt.expected["winlog"].(map[string]any)["channel"], evt.Fields["winlog"].(map[string]any)["channel"]) assert.Equal(t, tt.expected["winlog"].(map[string]any)["event_data"], evt.Fields["winlog"].(map[string]any)["event_data"]) assert.Equal(t, tt.expected["winlog"].(map[string]any)["flags"], evt.Fields["winlog"].(map[string]any)["flags"]) diff --git a/x-pack/filebeat/input/gcs/scheduler.go b/x-pack/filebeat/input/gcs/scheduler.go index c68c3123cf2..ea992b49924 100644 --- a/x-pack/filebeat/input/gcs/scheduler.go +++ b/x-pack/filebeat/input/gcs/scheduler.go @@ -233,6 +233,7 @@ func (s *scheduler) addFailedJobs(ctx context.Context, jobs []*job) []*job { obj, err := s.bucket.Object(name).Attrs(ctx) if err != nil { s.log.Errorf("adding failed job %s to job list caused an error: %w", err) + continue } objectURI := "gs://" + s.src.BucketName + "/" + obj.Name diff --git a/x-pack/filebeat/input/http_endpoint/config.go b/x-pack/filebeat/input/http_endpoint/config.go index 3b0c97741de..1618dc90758 100644 --- a/x-pack/filebeat/input/http_endpoint/config.go +++ b/x-pack/filebeat/input/http_endpoint/config.go @@ -37,6 +37,7 @@ type config struct { URL string `config:"url" validate:"required"` Prefix string `config:"prefix"` ContentType string `config:"content_type"` + Program string `config:"program"` SecretHeader string `config:"secret.header"` SecretValue string `config:"secret.value"` HMACHeader string `config:"hmac.header"` diff --git a/x-pack/filebeat/input/http_endpoint/handler.go b/x-pack/filebeat/input/http_endpoint/handler.go index 0e2620b5b65..3d0948489ac 100644 --- a/x-pack/filebeat/input/http_endpoint/handler.go +++ b/x-pack/filebeat/input/http_endpoint/handler.go @@ -12,10 +12,16 @@ import ( "io" "net" "net/http" + "reflect" "time" + "github.com/google/cel-go/cel" + "github.com/google/cel-go/checker/decls" + "github.com/google/cel-go/common/types" + "github.com/google/cel-go/common/types/ref" "go.uber.org/zap" "go.uber.org/zap/zapcore" + "google.golang.org/protobuf/types/known/structpb" stateless "github.com/elastic/beats/v7/filebeat/input/v2/input-stateless" "github.com/elastic/beats/v7/libbeat/beat" @@ -24,6 +30,7 @@ import ( "github.com/elastic/beats/v7/x-pack/filebeat/input/internal/httplog" "github.com/elastic/elastic-agent-libs/logp" "github.com/elastic/elastic-agent-libs/mapstr" + "github.com/elastic/mito/lib" ) const headerContentEncoding = "Content-Encoding" @@ -43,6 +50,7 @@ type handler struct { reqLogger *zap.Logger host, scheme string + program *program messageField string responseCode int responseBody string @@ -80,7 +88,7 @@ func (h *handler) ServeHTTP(w http.ResponseWriter, r *http.Request) { r.Body = io.NopCloser(&buf) } - objs, _, status, err := httpReadJSON(body) + objs, _, status, err := httpReadJSON(body, h.program) if err != nil { h.sendAPIErrorResponse(w, r, h.log, status, err) h.metrics.apiErrors.Add(1) @@ -218,22 +226,22 @@ func (h *handler) publishEvent(obj, headers mapstr.M) error { return nil } -func httpReadJSON(body io.Reader) (objs []mapstr.M, rawMessages []json.RawMessage, status int, err error) { +func httpReadJSON(body io.Reader, prg *program) (objs []mapstr.M, rawMessages []json.RawMessage, status int, err error) { if body == http.NoBody { return nil, nil, http.StatusNotAcceptable, errBodyEmpty } - obj, rawMessage, err := decodeJSON(body) + obj, rawMessage, err := decodeJSON(body, prg) if err != nil { return nil, nil, http.StatusBadRequest, err } return obj, rawMessage, http.StatusOK, err } -func decodeJSON(body io.Reader) (objs []mapstr.M, rawMessages []json.RawMessage, err error) { +func decodeJSON(body io.Reader, prg *program) (objs []mapstr.M, rawMessages []json.RawMessage, err error) { decoder := json.NewDecoder(body) for decoder.More() { var raw json.RawMessage - if err := decoder.Decode(&raw); err != nil { + if err = decoder.Decode(&raw); err != nil { if err == io.EOF { //nolint:errorlint // This will never be a wrapped error. break } @@ -241,9 +249,22 @@ func decodeJSON(body io.Reader) (objs []mapstr.M, rawMessages []json.RawMessage, } var obj interface{} - if err := newJSONDecoder(bytes.NewReader(raw)).Decode(&obj); err != nil { + if err = newJSONDecoder(bytes.NewReader(raw)).Decode(&obj); err != nil { return nil, nil, fmt.Errorf("malformed JSON object at stream position %d: %w", decoder.InputOffset(), err) } + + if prg != nil { + obj, err = prg.eval(obj) + if err != nil { + return nil, nil, err + } + // Re-marshal to ensure the raw bytes agree with the constructed object. + raw, err = json.Marshal(obj) + if err != nil { + return nil, nil, fmt.Errorf("failed to remarshal object: %w", err) + } + } + switch v := obj.(type) { case map[string]interface{}: objs = append(objs, v) @@ -265,6 +286,86 @@ func decodeJSON(body io.Reader) (objs []mapstr.M, rawMessages []json.RawMessage, return objs, rawMessages, nil } +type program struct { + prg cel.Program + ast *cel.Ast +} + +func newProgram(src string) (*program, error) { + if src == "" { + return nil, nil + } + + registry, err := types.NewRegistry() + if err != nil { + return nil, fmt.Errorf("failed to create env: %w", err) + } + env, err := cel.NewEnv( + cel.Declarations(decls.NewVar("obj", decls.Dyn)), + cel.OptionalTypes(cel.OptionalTypesVersion(lib.OptionalTypesVersion)), + cel.CustomTypeAdapter(&numberAdapter{registry}), + cel.CustomTypeProvider(registry), + ) + if err != nil { + return nil, fmt.Errorf("failed to create env: %w", err) + } + + ast, iss := env.Compile(src) + if iss.Err() != nil { + return nil, fmt.Errorf("failed compilation: %w", iss.Err()) + } + + prg, err := env.Program(ast) + if err != nil { + return nil, fmt.Errorf("failed program instantiation: %w", err) + } + return &program{prg: prg, ast: ast}, nil +} + +var _ types.Adapter = (*numberAdapter)(nil) + +type numberAdapter struct { + fallback types.Adapter +} + +func (a *numberAdapter) NativeToValue(value any) ref.Val { + if n, ok := value.(json.Number); ok { + var errs []error + i, err := n.Int64() + if err == nil { + return types.Int(i) + } + errs = append(errs, err) + f, err := n.Float64() + if err == nil { + return types.Double(f) + } + errs = append(errs, err) + return types.NewErr("%v", errors.Join(errs...)) + } + return a.fallback.NativeToValue(value) +} + +func (p *program) eval(obj interface{}) (interface{}, error) { + out, _, err := p.prg.Eval(map[string]interface{}{"obj": obj}) + if err != nil { + err = lib.DecoratedError{AST: p.ast, Err: err} + return nil, fmt.Errorf("failed eval: %w", err) + } + + v, err := out.ConvertToNative(reflect.TypeOf((*structpb.Value)(nil))) + if err != nil { + return nil, fmt.Errorf("failed proto conversion: %w", err) + } + switch v := v.(type) { + case *structpb.Value: + return v.AsInterface(), nil + default: + // This should never happen. + return nil, fmt.Errorf("unexpected native conversion type: %T", v) + } +} + func decodeJSONArray(raw *bytes.Reader) (objs []mapstr.M, rawMessages []json.RawMessage, err error) { dec := newJSONDecoder(raw) token, err := dec.Token() diff --git a/x-pack/filebeat/input/http_endpoint/handler_test.go b/x-pack/filebeat/input/http_endpoint/handler_test.go index 6660508b15b..cb911f8ab18 100644 --- a/x-pack/filebeat/input/http_endpoint/handler_test.go +++ b/x-pack/filebeat/input/http_endpoint/handler_test.go @@ -38,6 +38,7 @@ func Test_httpReadJSON(t *testing.T) { tests := []struct { name string body string + program string wantObjs []mapstr.M wantStatus int wantErr bool @@ -135,10 +136,43 @@ func Test_httpReadJSON(t *testing.T) { }, wantStatus: http.StatusOK, }, + { + name: "kinesis", + body: `{ + "requestId": "ed4acda5-034f-9f42-bba1-f29aea6d7d8f", + "timestamp": 1578090901599, + "records": [ + { + "data": "aGVsbG8=" + }, + { + "data": "aGVsbG8gd29ybGQ=" + } + ] +}`, + program: `obj.records.map(r, { + "requestId": obj.requestId, + "timestamp": string(obj.timestamp), // leave timestamp in unix milli for ingest to handle. + "event": r, + })`, + wantRawMessage: []json.RawMessage{ + []byte(`{"event":{"data":"aGVsbG8="},"requestId":"ed4acda5-034f-9f42-bba1-f29aea6d7d8f","timestamp":"1578090901599"}`), + []byte(`{"event":{"data":"aGVsbG8gd29ybGQ="},"requestId":"ed4acda5-034f-9f42-bba1-f29aea6d7d8f","timestamp":"1578090901599"}`), + }, + wantObjs: []mapstr.M{ + {"event": map[string]any{"data": "aGVsbG8="}, "requestId": "ed4acda5-034f-9f42-bba1-f29aea6d7d8f", "timestamp": "1578090901599"}, + {"event": map[string]any{"data": "aGVsbG8gd29ybGQ="}, "requestId": "ed4acda5-034f-9f42-bba1-f29aea6d7d8f", "timestamp": "1578090901599"}, + }, + wantStatus: http.StatusOK, + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - gotObjs, rawMessages, gotStatus, err := httpReadJSON(strings.NewReader(tt.body)) + prg, err := newProgram(tt.program) + if err != nil { + t.Fatalf("failed to compile program: %v", err) + } + gotObjs, rawMessages, gotStatus, err := httpReadJSON(strings.NewReader(tt.body), prg) if (err != nil) != tt.wantErr { t.Errorf("httpReadJSON() error = %v, wantErr %v", err, tt.wantErr) return @@ -344,7 +378,7 @@ func Test_apiResponse(t *testing.T) { pub := new(publisher) metrics := newInputMetrics("") defer metrics.Close() - apiHandler := newHandler(ctx, tracerConfig(tc.name, tc.conf, *withTraces), pub, logp.NewLogger("http_endpoint.test"), metrics) + apiHandler := newHandler(ctx, tracerConfig(tc.name, tc.conf, *withTraces), nil, pub, logp.NewLogger("http_endpoint.test"), metrics) // Execute handler. respRec := httptest.NewRecorder() diff --git a/x-pack/filebeat/input/http_endpoint/input.go b/x-pack/filebeat/input/http_endpoint/input.go index ca648b69747..7d5055ebe65 100644 --- a/x-pack/filebeat/input/http_endpoint/input.go +++ b/x-pack/filebeat/input/http_endpoint/input.go @@ -131,6 +131,14 @@ func (p *pool) serve(ctx v2.Context, e *httpEndpoint, pub stateless.Publisher, m metrics.route.Set(u.Path) metrics.isTLS.Set(e.tlsConfig != nil) + var prg *program + if e.config.Program != "" { + prg, err = newProgram(e.config.Program) + if err != nil { + return err + } + } + p.mu.Lock() s, ok := p.servers[e.addr] if ok { @@ -149,7 +157,7 @@ func (p *pool) serve(ctx v2.Context, e *httpEndpoint, pub stateless.Publisher, m return err } log.Infof("Adding %s end point to server on %s", pattern, e.addr) - s.mux.Handle(pattern, newHandler(s.ctx, e.config, pub, log, metrics)) + s.mux.Handle(pattern, newHandler(s.ctx, e.config, prg, pub, log, metrics)) s.idOf[pattern] = ctx.ID p.mu.Unlock() <-s.ctx.Done() @@ -165,7 +173,7 @@ func (p *pool) serve(ctx v2.Context, e *httpEndpoint, pub stateless.Publisher, m srv: srv, } s.ctx, s.cancel = ctxtool.WithFunc(ctx.Cancelation, func() { srv.Close() }) - mux.Handle(pattern, newHandler(s.ctx, e.config, pub, log, metrics)) + mux.Handle(pattern, newHandler(s.ctx, e.config, prg, pub, log, metrics)) p.servers[e.addr] = s p.mu.Unlock() @@ -287,7 +295,7 @@ func (s *server) getErr() error { return s.err } -func newHandler(ctx context.Context, c config, pub stateless.Publisher, log *logp.Logger, metrics *inputMetrics) http.Handler { +func newHandler(ctx context.Context, c config, prg *program, pub stateless.Publisher, log *logp.Logger, metrics *inputMetrics) http.Handler { h := &handler{ log: log, publisher: pub, @@ -305,6 +313,7 @@ func newHandler(ctx context.Context, c config, pub stateless.Publisher, log *log hmacType: c.HMACType, hmacPrefix: c.HMACPrefix, }, + program: prg, messageField: c.Prefix, responseCode: c.ResponseCode, responseBody: c.ResponseBody, diff --git a/x-pack/filebeat/input/httpjson/config_auth.go b/x-pack/filebeat/input/httpjson/config_auth.go index d05592dfa50..b25bab03dd3 100644 --- a/x-pack/filebeat/input/httpjson/config_auth.go +++ b/x-pack/filebeat/input/httpjson/config_auth.go @@ -6,7 +6,6 @@ package httpjson import ( "context" - "crypto/x509" "encoding/json" "errors" "fmt" @@ -309,8 +308,11 @@ func (o *oAuth2Config) validateOktaProvider() error { } // jwk_pem if o.OktaJWKPEM != "" { - _, err := x509.ParsePKCS1PrivateKey([]byte(o.OktaJWKPEM)) - return err + _, err := pemPKCS8PrivateKey([]byte(o.OktaJWKPEM)) + if err != nil { + return fmt.Errorf("okta validation error: %w", err) + } + return nil } // jwk_file if o.OktaJWKFile != "" { diff --git a/x-pack/filebeat/input/httpjson/config_okta_auth.go b/x-pack/filebeat/input/httpjson/config_okta_auth.go index c2b4289d9c9..8d2a8415c2e 100644 --- a/x-pack/filebeat/input/httpjson/config_okta_auth.go +++ b/x-pack/filebeat/input/httpjson/config_okta_auth.go @@ -12,6 +12,7 @@ import ( "encoding/base64" "encoding/json" "encoding/pem" + "errors" "fmt" "math/big" "net/http" @@ -158,17 +159,24 @@ func (i *base64int) UnmarshalJSON(b []byte) error { } func generateOktaJWTPEM(pemdata string, cnf *oauth2.Config) (string, error) { - blk, rest := pem.Decode([]byte(pemdata)) - if rest := bytes.TrimSpace(rest); len(rest) != 0 { - return "", fmt.Errorf("PEM text has trailing data: %s", rest) - } - key, err := x509.ParsePKCS8PrivateKey(blk.Bytes) + key, err := pemPKCS8PrivateKey([]byte(pemdata)) if err != nil { return "", err } return signJWT(cnf, key) } +func pemPKCS8PrivateKey(pemdata []byte) (any, error) { + blk, rest := pem.Decode(pemdata) + if rest := bytes.TrimSpace(rest); len(rest) != 0 { + return nil, fmt.Errorf("PEM text has trailing data: %d bytes", len(rest)) + } + if blk == nil { + return nil, errors.New("no PEM data") + } + return x509.ParsePKCS8PrivateKey(blk.Bytes) +} + // signJWT creates a JWT token using required claims and sign it with the private key. func signJWT(cnf *oauth2.Config, key any) (string, error) { now := time.Now() @@ -179,7 +187,7 @@ func signJWT(cnf *oauth2.Config, key any) (string, error) { Expiration(now.Add(time.Hour)). Build() if err != nil { - return "", err + return "", fmt.Errorf("failed to create token: %w", err) } signedToken, err := jwt.Sign(tok, jwt.WithKey(jwa.RS256, key)) if err != nil { diff --git a/x-pack/filebeat/input/httpjson/config_test.go b/x-pack/filebeat/input/httpjson/config_test.go index d88c6ac4a62..910510b6e9c 100644 --- a/x-pack/filebeat/input/httpjson/config_test.go +++ b/x-pack/filebeat/input/httpjson/config_test.go @@ -499,6 +499,47 @@ func TestConfigOauth2Validation(t *testing.T) { }, }, }, + { + name: "okta successful pem oauth2 validation", + input: map[string]interface{}{ + "auth.oauth2": map[string]interface{}{ + "provider": "okta", + "client.id": "a_client_id", + "token_url": "localhost", + "scopes": []string{"foo"}, + "okta.jwk_pem": ` +-----BEGIN PRIVATE KEY----- +MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQCOuef3HMRhohVT +5kSoAJgV+atpDjkwTwkOq+ImnbBlv75GaApG90w8VpjXjhqN/1KJmwfyrKiquiMq +OPu+o/672Dys5rUAaWSbT7wRF1GjLDDZrM0GHRdV4DGxM/LKI8I5yE1Mx3EzV+D5 +ZLmcRc5U4oEoMwtGpr0zRZ7uUr6a28UQwcUsVIPItc1/9rERlo1WTv8dcaj4ECC3 +2Sc0y/F+9XqwJvLd4Uv6ckzP0Sv4tbDA+7jpD9MneAIUiZ4LVj2cwbBd+YRY6jXx +MkevcCSmSX60clBY1cIFkw1DYHqtdHEwAQcQHLGMoi72xRP2qrdzIPsaTKVYoHVo +WA9vADdHAgMBAAECggEAIlx7jjCsztyYyeQsL05FTzUWoWo9NnYwtgmHnshkCXsK +MiUmJEOxZO1sSqj5l6oakupyFWigCspZYPbrFNCiqVK7+NxqQzkccY/WtT6p9uDS +ufUyPwCN96zMCd952lSVlBe3FH8Hr9a+YQxw60CbFjCZ67WuR0opTsi6JKJjJSDb +TQQZ4qJR97D05I1TgfmO+VO7G/0/dDaNHnnlYz0AnOgZPSyvrU2G5cYye4842EMB +ng81xjHD+xp55JNui/xYkhmYspYhrB2KlEjkKb08OInUjBeaLEAgA1r9yOHsfV/3 +DQzDPRO9iuqx5BfJhdIqUB1aifrye+sbxt9uMBtUgQKBgQDVdfO3GYT+ZycOQG9P +QtdMn6uiSddchVCGFpk331u6M6yafCKjI/MlJDl29B+8R5sVsttwo8/qnV/xd3cn +pY14HpKAsE4l6/Ciagzoj+0NqfPEDhEzbo8CyArcd7pSxt3XxECAfZe2+xivEPHe +gFO60vSFjFtvlLRMDMOmqX3kYQKBgQCrK1DISyQTnD6/axsgh2/ESOmT7n+JRMx/ +YzA7Lxu3zGzUC8/sRDa1C41t054nf5ZXJueYLDSc4kEAPddzISuCLxFiTD2FQ75P +lHWMgsEzQObDm4GPE9cdKOjoAvtAJwbvZcjDa029CDx7aCaDzbNvdmplZ7EUrznR +55U8Wsm8pwKBgBytxTmzZwfbCgdDJvFKNKzpwuCB9TpL+v6Y6Kr2Clfg+26iAPFU +MiWqUUInGGBuamqm5g6jI5sM28gQWeTsvC4IRXyes1Eq+uCHSQax15J/Y+3SSgNT +9kjUYYkvWMwoRcPobRYWSZze7XkP2L8hFJ7EGvAaZGqAWxzgliS9HtnhAoGAONZ/ +UqMw7Zoac/Ga5mhSwrj7ZvXxP6Gqzjofj+eKqrOlB5yMhIX6LJATfH6iq7cAMxxm +Fu/G4Ll4oB3o5wACtI3wldV/MDtYfJBtoCTjBqPsfNOsZ9hMvBATlsc2qwzKjsAb +tFhzTevoOYpSD75EcSS/G8Ec2iN9bagatBnpl00CgYBVqAOFZelNfP7dj//lpk8y +EUAw7ABOq0S9wkpFWTXIVPoBQUipm3iAUqGNPmvr/9ShdZC9xeu5AwKram4caMWJ +ExRhcDP1hFM6CdmSkIYEgBKvN9N0O4Lx1ba34gk74Hm65KXxokjJHOC0plO7c7ok +LNV/bIgMHOMoxiGrwyjAhg== +-----END PRIVATE KEY----- +`, + }, + }, + }, } for _, c := range cases { diff --git a/x-pack/filebeat/input/httpjson/encoding.go b/x-pack/filebeat/input/httpjson/encoding.go index 5dd62f10535..a7da4f25c0d 100644 --- a/x-pack/filebeat/input/httpjson/encoding.go +++ b/x-pack/filebeat/input/httpjson/encoding.go @@ -16,6 +16,7 @@ import ( "net/http" "unicode" + "github.com/elastic/elastic-agent-libs/mapstr" "github.com/elastic/mito/lib/xml" ) @@ -64,13 +65,14 @@ type decoderFunc func(p []byte, dst *response) error // encodeAsJSON encodes trReq as a JSON message. func encodeAsJSON(trReq transformable) ([]byte, error) { - if len(trReq.body()) == 0 { + body, err := trReq.GetValue("body") + if err == mapstr.ErrKeyNotFound { return nil, nil } header := trReq.header() header.Set("Content-Type", "application/json") trReq.setHeader(header) - return json.Marshal(trReq.body()) + return json.Marshal(body) } // decodeAsJSON decodes the JSON message in p into dst. diff --git a/x-pack/filebeat/input/httpjson/input_test.go b/x-pack/filebeat/input/httpjson/input_test.go index 498ccc86183..6604c999d44 100644 --- a/x-pack/filebeat/input/httpjson/input_test.go +++ b/x-pack/filebeat/input/httpjson/input_test.go @@ -93,6 +93,17 @@ var testCases = []struct { handler: defaultHandler(http.MethodPost, `{"test":"abc"}`, ""), expected: []string{`{"hello":[{"world":"moon"},{"space":[{"cake":"pumpkin"}]}]}`}, }, + { + name: "POST_request_with_empty_object_body", + setupServer: newTestServer(httptest.NewServer), + baseConfig: map[string]interface{}{ + "interval": 1, + "request.method": http.MethodPost, + "request.body": map[string]interface{}{}, + }, + handler: defaultHandler(http.MethodPost, `{}`, ""), + expected: []string{`{"hello":[{"world":"moon"},{"space":[{"cake":"pumpkin"}]}]}`}, + }, { name: "repeated_POST_requests", setupServer: newTestServer(httptest.NewServer), @@ -1516,7 +1527,7 @@ func defaultHandler(expectedMethod, expectedBody, msg string) http.HandlerFunc { r.Body.Close() if expectedBody != string(body) { w.WriteHeader(http.StatusBadRequest) - msg = fmt.Sprintf(`{"error":"expected body was %q"}`, expectedBody) + msg = fmt.Sprintf(`{"error":"expected body was %q, but got %q"}`, expectedBody, body) } } diff --git a/x-pack/filebeat/input/httpjson/request.go b/x-pack/filebeat/input/httpjson/request.go index 9e60d22ac49..3e63f026716 100644 --- a/x-pack/filebeat/input/httpjson/request.go +++ b/x-pack/filebeat/input/httpjson/request.go @@ -440,7 +440,7 @@ func (rf *requestFactory) newRequest(ctx *transformContext) (transformable, erro req := transformable{} req.setURL(rf.url) - if rf.body != nil && len(*rf.body) > 0 { + if rf.body != nil { req.setBody(rf.body.Clone()) } diff --git a/x-pack/filebeat/module/o365/audit/test/13-dlp-exchange.log-expected.json b/x-pack/filebeat/module/o365/audit/test/13-dlp-exchange.log-expected.json index 6eae8240451..c6d25a2cc57 100644 --- a/x-pack/filebeat/module/o365/audit/test/13-dlp-exchange.log-expected.json +++ b/x-pack/filebeat/module/o365/audit/test/13-dlp-exchange.log-expected.json @@ -792,7 +792,6 @@ "forwarded" ], "url.domain": "example.net", - "url.extension": "com/sharepoint", "url.original": "https://example.net/testsiem2.onmicrosoft.com/sharepoint", "url.path": "/testsiem2.onmicrosoft.com/sharepoint", "url.scheme": "https", @@ -801,4 +800,4 @@ "user.id": "alice@testsiem2.onmicrosoft.com", "user.name": "alice" } -] \ No newline at end of file +] diff --git a/x-pack/metricbeat/metricbeat.reference.yml b/x-pack/metricbeat/metricbeat.reference.yml index 1e6abf11a60..f71e58904fd 100644 --- a/x-pack/metricbeat/metricbeat.reference.yml +++ b/x-pack/metricbeat/metricbeat.reference.yml @@ -1172,6 +1172,18 @@ metricbeat.modules: # By setting raw to true, all raw fields from the status metricset will be added to the event. #raw: false + # Optional SSL/TLS. By default is false. + #ssl.enabled: true + + # List of root certificates for SSL/TLS server verification + #ssl.certificate_authorities: ["/etc/pki/root/ca.crt"] + + # Certificate for SSL/TLS client authentication + #ssl.certificate: "/etc/pki/client/cert.crt" + + # Client certificate key file + #ssl.key: "/etc/pki/client/cert.key" + #--------------------------------- NATS Module --------------------------------- - module: nats metricsets: diff --git a/x-pack/metricbeat/module/azure/mock_service.go b/x-pack/metricbeat/module/azure/mock_service.go index 65f606dde12..9626952fa6d 100644 --- a/x-pack/metricbeat/module/azure/mock_service.go +++ b/x-pack/metricbeat/module/azure/mock_service.go @@ -29,8 +29,8 @@ func (client *MockService) GetResourceDefinitions(id []string, group []string, r return args.Get(0).([]*armresources.GenericResourceExpanded), args.Error(1) } -// GetMetricDefinitions is a mock function for the azure service -func (client *MockService) GetMetricDefinitions(resourceId string, namespace string) (armmonitor.MetricDefinitionCollection, error) { +// GetMetricDefinitionsWithRetry is a mock function for the azure service +func (client *MockService) GetMetricDefinitionsWithRetry(resourceId string, namespace string) (armmonitor.MetricDefinitionCollection, error) { args := client.Called(resourceId, namespace) return args.Get(0).(armmonitor.MetricDefinitionCollection), args.Error(1) } diff --git a/x-pack/metricbeat/module/azure/monitor/client_helper.go b/x-pack/metricbeat/module/azure/monitor/client_helper.go index 9d69f67f687..5fa5b9964e6 100644 --- a/x-pack/metricbeat/module/azure/monitor/client_helper.go +++ b/x-pack/metricbeat/module/azure/monitor/client_helper.go @@ -20,12 +20,24 @@ const missingNamespace = "no metric definitions were found for resource %s and n // mapMetrics should validate and map the metric related configuration to relevant azure monitor api parameters func mapMetrics(client *azure.Client, resources []*armresources.GenericResourceExpanded, resourceConfig azure.ResourceConfig) ([]azure.Metric, error) { var metrics []azure.Metric + for _, resource := range resources { + + // We use this map to avoid calling the metrics definition function for the same namespace and same resource + // multiple times. + namespaceMetrics := make(map[string]armmonitor.MetricDefinitionCollection) + for _, metric := range resourceConfig.Metrics { - // get all metrics supported by the namespace provided - metricDefinitions, err := client.AzureMonitorService.GetMetricDefinitions(*resource.ID, metric.Namespace) - if err != nil { - return nil, fmt.Errorf("no metric definitions were found for resource %s and namespace %s %w", *resource.ID, metric.Namespace, err) + + var err error + + metricDefinitions, exists := namespaceMetrics[metric.Namespace] + if !exists { + metricDefinitions, err = client.AzureMonitorService.GetMetricDefinitionsWithRetry(*resource.ID, metric.Namespace) + if err != nil { + return nil, err + } + namespaceMetrics[metric.Namespace] = metricDefinitions } if len(metricDefinitions.Value) == 0 { diff --git a/x-pack/metricbeat/module/azure/monitor/client_helper_test.go b/x-pack/metricbeat/module/azure/monitor/client_helper_test.go index d5c89bbbd78..782d941166b 100644 --- a/x-pack/metricbeat/module/azure/monitor/client_helper_test.go +++ b/x-pack/metricbeat/module/azure/monitor/client_helper_test.go @@ -88,7 +88,7 @@ func TestMapMetric(t *testing.T) { client := azure.NewMockClient() t.Run("return error when no metric definitions were found", func(t *testing.T) { m := &azure.MockService{} - m.On("GetMetricDefinitions", mock.Anything, mock.Anything).Return(armmonitor.MetricDefinitionCollection{}, fmt.Errorf("invalid resource ID")) + m.On("GetMetricDefinitionsWithRetry", mock.Anything, mock.Anything).Return(armmonitor.MetricDefinitionCollection{}, fmt.Errorf("invalid resource ID")) client.AzureMonitorService = m metric, err := mapMetrics(client, []*armresources.GenericResourceExpanded{resource}, resourceConfig) assert.Error(t, err) @@ -97,7 +97,7 @@ func TestMapMetric(t *testing.T) { }) t.Run("return all metrics when all metric names and aggregations were configured", func(t *testing.T) { m := &azure.MockService{} - m.On("GetMetricDefinitions", mock.Anything, mock.Anything).Return(metricDefinitions, nil) + m.On("GetMetricDefinitionsWithRetry", mock.Anything, mock.Anything).Return(metricDefinitions, nil) client.AzureMonitorService = m metricConfig.Name = []string{"*"} resourceConfig.Metrics = []azure.MetricConfig{metricConfig} @@ -112,7 +112,7 @@ func TestMapMetric(t *testing.T) { }) t.Run("return all metrics when specific metric names and aggregations were configured", func(t *testing.T) { m := &azure.MockService{} - m.On("GetMetricDefinitions", mock.Anything, mock.Anything).Return(metricDefinitions, nil) + m.On("GetMetricDefinitionsWithRetry", mock.Anything, mock.Anything).Return(metricDefinitions, nil) client.AzureMonitorService = m metricConfig.Name = []string{"TotalRequests", "Capacity"} metricConfig.Aggregations = []string{"Average"} diff --git a/x-pack/metricbeat/module/azure/monitor_service.go b/x-pack/metricbeat/module/azure/monitor_service.go index 823a9cdf22a..70d79729920 100644 --- a/x-pack/metricbeat/module/azure/monitor_service.go +++ b/x-pack/metricbeat/module/azure/monitor_service.go @@ -6,8 +6,13 @@ package azure import ( "context" + "errors" "fmt" + "net/http" "strings" + "time" + + "github.com/Azure/azure-sdk-for-go/sdk/azcore" "github.com/elastic/elastic-agent-libs/logp" @@ -195,8 +200,43 @@ func (service *MonitorService) GetMetricNamespaces(resourceId string) (armmonito return metricNamespaceCollection, nil } -// GetMetricDefinitions will return all supported metrics based on the resource id and namespace -func (service *MonitorService) GetMetricDefinitions(resourceId string, namespace string) (armmonitor.MetricDefinitionCollection, error) { +// sleepIfPossible will check for the error 429 in the azure response, and look for the retry after header. +// If the header is present, then metricbeat will sleep for that duration, otherwise it will return an error. +func (service *MonitorService) sleepIfPossible(err error, resourceId string, namespace string) error { + errorMsg := "no metric definitions were found for resource " + resourceId + " and namespace " + namespace + + var respError *azcore.ResponseError + ok := errors.As(err, &respError) + if !ok { + return fmt.Errorf("%s, failed to cast error to azcore.ResponseError", errorMsg) + } + // Check for TooManyRequests error and retry if it is the case + if respError.StatusCode != http.StatusTooManyRequests { + return fmt.Errorf("%s, %w", errorMsg, err) + } + + // Check if the error has the header Retry After. + // If it is present, then we should try to make this request again. + retryAfter := respError.RawResponse.Header.Get("Retry-After") + if retryAfter == "" { + return fmt.Errorf("%s %w, failed to find Retry-After header", errorMsg, err) + } + + duration, errD := time.ParseDuration(retryAfter + "s") + if errD != nil { + return fmt.Errorf("%s, failed to parse duration %s from header retry after", errorMsg, retryAfter) + } + + service.log.Infof("%s, metricbeat will try again after %s seconds", errorMsg, retryAfter) + time.Sleep(duration) + service.log.Infof("%s, metricbeat finished sleeping and will try again now", errorMsg) + + return nil +} + +// GetMetricDefinitionsWithRetry will return all supported metrics based on the resource id and namespace +// It will check for an error when moving the pager to the next page, and retry if possible. +func (service *MonitorService) GetMetricDefinitionsWithRetry(resourceId string, namespace string) (armmonitor.MetricDefinitionCollection, error) { opts := &armmonitor.MetricDefinitionsClientListOptions{} if namespace != "" { @@ -210,9 +250,12 @@ func (service *MonitorService) GetMetricDefinitions(resourceId string, namespace for pager.More() { nextPage, err := pager.NextPage(service.context) if err != nil { - return armmonitor.MetricDefinitionCollection{}, err + retryError := service.sleepIfPossible(err, resourceId, namespace) + if retryError != nil { + return armmonitor.MetricDefinitionCollection{}, err + } + continue } - metricDefinitionCollection.Value = append(metricDefinitionCollection.Value, nextPage.Value...) } diff --git a/x-pack/metricbeat/module/azure/service_interface.go b/x-pack/metricbeat/module/azure/service_interface.go index 39a7da63621..cb524c7f6ea 100644 --- a/x-pack/metricbeat/module/azure/service_interface.go +++ b/x-pack/metricbeat/module/azure/service_interface.go @@ -13,7 +13,7 @@ import ( type Service interface { GetResourceDefinitionById(id string) (armresources.GenericResource, error) GetResourceDefinitions(id []string, group []string, rType string, query string) ([]*armresources.GenericResourceExpanded, error) - GetMetricDefinitions(resourceId string, namespace string) (armmonitor.MetricDefinitionCollection, error) + GetMetricDefinitionsWithRetry(resourceId string, namespace string) (armmonitor.MetricDefinitionCollection, error) GetMetricNamespaces(resourceId string) (armmonitor.MetricNamespaceCollection, error) GetMetricValues(resourceId string, namespace string, timegrain string, timespan string, metricNames []string, aggregations string, filter string) ([]armmonitor.Metric, string, error) } diff --git a/x-pack/metricbeat/module/azure/storage/client_helper.go b/x-pack/metricbeat/module/azure/storage/client_helper.go index 393607be7ae..e60b9472a57 100644 --- a/x-pack/metricbeat/module/azure/storage/client_helper.go +++ b/x-pack/metricbeat/module/azure/storage/client_helper.go @@ -41,13 +41,13 @@ func mapMetrics(client *azure.Client, resources []*armresources.GenericResourceE } // get all metric definitions supported by the namespace provided - metricDefinitions, err := client.AzureMonitorService.GetMetricDefinitions(resourceID, namespace) + metricDefinitions, err := client.AzureMonitorService.GetMetricDefinitionsWithRetry(resourceID, namespace) if err != nil { - return nil, fmt.Errorf("no metric definitions were found for resource %s and namespace %s %w", resourceID, namespace, err) + return nil, err } if len(metricDefinitions.Value) == 0 { - return nil, fmt.Errorf("no metric definitions were found for resource %s and namespace %s %w", resourceID, namespace, err) + return nil, fmt.Errorf("no metric definitions were found for resource %s and namespace %s", resourceID, namespace) } var filteredMetricDefinitions []armmonitor.MetricDefinition diff --git a/x-pack/metricbeat/module/azure/storage/client_helper_test.go b/x-pack/metricbeat/module/azure/storage/client_helper_test.go index ecdf4941ac9..14121c3a0b3 100644 --- a/x-pack/metricbeat/module/azure/storage/client_helper_test.go +++ b/x-pack/metricbeat/module/azure/storage/client_helper_test.go @@ -119,17 +119,17 @@ func TestMapMetric(t *testing.T) { client := azure.NewMockClient() t.Run("return error when no metric definitions were found", func(t *testing.T) { m := &azure.MockService{} - m.On("GetMetricDefinitions", mock.Anything, mock.Anything).Return(emptyMetricDefinitions, nil) + m.On("GetMetricDefinitionsWithRetry", mock.Anything, mock.Anything).Return(emptyMetricDefinitions, nil) client.AzureMonitorService = m metric, err := mapMetrics(client, []*armresources.GenericResourceExpanded{resource}, resourceConfig) assert.Error(t, err) - assert.Equal(t, err.Error(), "no metric definitions were found for resource 123 and namespace Microsoft.Storage/storageAccounts %!w()") + assert.Equal(t, err.Error(), "no metric definitions were found for resource 123 and namespace Microsoft.Storage/storageAccounts") assert.Equal(t, metric, []azure.Metric(nil)) m.AssertExpectations(t) }) t.Run("return mapped metrics correctly", func(t *testing.T) { m := &azure.MockService{} - m.On("GetMetricDefinitions", mock.Anything, mock.Anything).Return(metricDefinitions, nil) + m.On("GetMetricDefinitionsWithRetry", mock.Anything, mock.Anything).Return(metricDefinitions, nil) client.AzureMonitorService = m metrics, err := mapMetrics(client, []*armresources.GenericResourceExpanded{resource}, resourceConfig) assert.NoError(t, err) diff --git a/x-pack/packetbeat/magefile.go b/x-pack/packetbeat/magefile.go index 03104ab9157..357e5e23585 100644 --- a/x-pack/packetbeat/magefile.go +++ b/x-pack/packetbeat/magefile.go @@ -172,6 +172,13 @@ func SystemTest(ctx context.Context) error { return devtools.GoTest(ctx, args) } +func getBucketName() string { + if os.Getenv("BUILDKITE") == "true" { + return "ingest-buildkite-ci" + } + return "obs-ci-cache" +} + // getNpcapInstaller gets the installer from the Google Cloud Storage service. // // On Windows platforms, if getNpcapInstaller is invoked with the environment variables @@ -198,7 +205,8 @@ func getNpcapInstaller() error { return err } } + ciBucketName := getBucketName() fmt.Printf("getting %s from private cache\n", installer) - return sh.RunV("gsutil", "cp", "gs://obs-ci-cache/private/"+installer, dstPath) + return sh.RunV("gsutil", "cp", "gs://"+ciBucketName+"/private/"+installer, dstPath) }