diff --git a/.buildkite/hooks/pre-command b/.buildkite/hooks/pre-command index 282fb5a00853..c23f5c3af645 100644 --- a/.buildkite/hooks/pre-command +++ b/.buildkite/hooks/pre-command @@ -3,6 +3,7 @@ set -euo pipefail AWS_SERVICE_ACCOUNT_SECRET_PATH="kv/ci-shared/platform-ingest/aws_account_auth" +PRIVATE_CI_GCS_CREDENTIALS_PATH="kv/ci-shared/platform-ingest/gcp-platform-ingest-ci-service-account" retry() { local retries=$1 @@ -33,7 +34,7 @@ if [[ "$BUILDKITE_PIPELINE_SLUG" == "filebeat" || "$BUILDKITE_PIPELINE_SLUG" == fi fi -if [[ "$BUILDKITE_PIPELINE_SLUG" == "beats-metricbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-libbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-packetbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-winlogbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-libbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-metricbeat" ]]; then +if [[ "$BUILDKITE_PIPELINE_SLUG" == "beats-metricbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-libbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-packetbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-winlogbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-libbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-metricbeat" || "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-packetbeat" ]]; then source .buildkite/scripts/setenv.sh if [[ "${BUILDKITE_COMMAND}" =~ ^buildkite-agent ]]; then echo "Skipped pre-command when running the Upload pipeline" @@ -47,3 +48,10 @@ if [[ "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-metricbeat" && "$BUILDKITE_STEP BEATS_AWS_ACCESS_KEY=$(retry 5 vault kv get -field access_key ${AWS_SERVICE_ACCOUNT_SECRET_PATH}) export BEATS_AWS_ACCESS_KEY fi + +if [[ "$BUILDKITE_PIPELINE_SLUG" == "beats-xpack-packetbeat" ]]; then + if [[ "$BUILDKITE_STEP_KEY" == "extended-win-10-system-tests" || "$BUILDKITE_STEP_KEY" == "mandatory-win-2022-system-tests" ]]; then + PRIVATE_CI_GCS_CREDENTIALS_SECRET=$(retry 5 vault kv get -field plaintext -format=json ${PRIVATE_CI_GCS_CREDENTIALS_PATH}) + export PRIVATE_CI_GCS_CREDENTIALS_SECRET + fi +fi diff --git a/.buildkite/pull-requests.json b/.buildkite/pull-requests.json index 8018411a743a..669b70d6570f 100644 --- a/.buildkite/pull-requests.json +++ b/.buildkite/pull-requests.json @@ -223,6 +223,86 @@ "skip_target_branches": [ ], "skip_ci_on_only_changed": [ ], "always_require_ci_on_changed": ["^x-pack/metricbeat/.*", "^.buildkite/.*", "^go.mod", "^pytest.ini", "^dev-tools/.*", "^libbeat/.*", "^testing/.*", "^x-pack/libbeat/.*"] + }, + { + "enabled": true, + "pipelineSlug": "beats-xpack-auditbeat", + "allow_org_users": true, + "allowed_repo_permissions": ["admin", "write"], + "allowed_list": [ ], + "set_commit_status": true, + "build_on_commit": true, + "build_on_comment": true, + "trigger_comment_regex": "^/test x-pack/auditbeat$", + "always_trigger_comment_regex": "^/test x-pack/auditbeat$", + "skip_ci_labels": [ ], + "skip_target_branches": [ ], + "skip_ci_on_only_changed": [ ], + "always_require_ci_on_changed": ["^x-pack/auditbeat/.*", "^.buildkite/.*", "^go.mod", "^pytest.ini", "^dev-tools/.*", "^libbeat/.*", "^testing/.*", "^x-pack/libbeat/.*"] + }, + { + "enabled": true, + "pipelineSlug": "beats-xpack-dockerlogbeat", + "allow_org_users": true, + "allowed_repo_permissions": ["admin", "write"], + "allowed_list": [ ], + "set_commit_status": true, + "build_on_commit": true, + "build_on_comment": true, + "trigger_comment_regex": "^/test x-pack/dockerlogbeat$", + "always_trigger_comment_regex": "^/test x-pack/dockerlogbeat$", + "skip_ci_labels": [ ], + "skip_target_branches": [ ], + "skip_ci_on_only_changed": [ ], + "always_require_ci_on_changed": ["^x-pack/dockerlogbeat/.*", "^.buildkite/.*", "^go.mod", "^pytest.ini", "^dev-tools/.*", "^libbeat/.*", "^testing/.*", "^x-pack/libbeat/.*"] + }, + { + "enabled": true, + "pipelineSlug": "beats-xpack-filebeat", + "allow_org_users": true, + "allowed_repo_permissions": ["admin", "write"], + "allowed_list": [ ], + "set_commit_status": true, + "build_on_commit": true, + "build_on_comment": true, + "trigger_comment_regex": "^/test x-pack/filebeat$", + "always_trigger_comment_regex": "^/test x-pack/filebeat$", + "skip_ci_labels": [ ], + "skip_target_branches": [ ], + "skip_ci_on_only_changed": [ ], + "always_require_ci_on_changed": ["^x-pack/filebeat/.*", "^.buildkite/.*", "^go.mod", "^pytest.ini", "^dev-tools/.*", "^libbeat/.*", "^testing/.*", "^x-pack/libbeat/.*"] + }, + { + "enabled": true, + "pipelineSlug": "beats-xpack-heartbeat", + "allow_org_users": true, + "allowed_repo_permissions": ["admin", "write"], + "allowed_list": [ ], + "set_commit_status": true, + "build_on_commit": true, + "build_on_comment": true, + "trigger_comment_regex": "^/test x-pack/heartbeat$", + "always_trigger_comment_regex": "^/test x-pack/heartbeat$", + "skip_ci_labels": [ ], + "skip_target_branches": [ ], + "skip_ci_on_only_changed": [ ], + "always_require_ci_on_changed": ["^x-pack/heartbeat/.*", "^.buildkite/.*", "^go.mod", "^pytest.ini", "^dev-tools/.*", "^libbeat/.*", "^testing/.*", "^x-pack/libbeat/.*"] + }, + { + "enabled": true, + "pipelineSlug": "beats-xpack-osquerybeat", + "allow_org_users": true, + "allowed_repo_permissions": ["admin", "write"], + "allowed_list": [ ], + "set_commit_status": true, + "build_on_commit": true, + "build_on_comment": true, + "trigger_comment_regex": "^/test x-pack/osquerybeat$", + "always_trigger_comment_regex": "^/test x-pack/osquerybeat$", + "skip_ci_labels": [ ], + "skip_target_branches": [ ], + "skip_ci_on_only_changed": [ ], + "always_require_ci_on_changed": ["^x-pack/osquerybeat/.*", "^.buildkite/.*", "^go.mod", "^pytest.ini", "^dev-tools/.*", "^libbeat/.*", "^testing/.*", "^x-pack/libbeat/.*"] } ] } diff --git a/.buildkite/scripts/common.sh b/.buildkite/scripts/common.sh index ebb15c937dd2..d6a91a482436 100755 --- a/.buildkite/scripts/common.sh +++ b/.buildkite/scripts/common.sh @@ -22,6 +22,8 @@ XPACK_MODULE_PATTERN="^x-pack\\/[a-z0-9]+beat\\/module\\/([^\\/]+)\\/.*" [ -z "${run_xpack_libbeat_arm_tests+x}" ] && run_xpack_libbeat_arm_tests="$(buildkite-agent meta-data get run_xpack_libbeat_arm_tests --default "false")" [ -z "${run_xpack_metricbeat_aws_tests+x}" ] && run_xpack_metricbeat_aws_tests="$(buildkite-agent meta-data get run_xpack_metricbeat_aws_tests --default "false")" [ -z "${run_xpack_metricbeat_macos_tests+x}" ] && run_xpack_metricbeat_macos_tests="$(buildkite-agent meta-data get run_xpack_metricbeat_macos_tests --default "false")" +[ -z "${run_xpack_packetbeat_arm_tests+x}" ] && run_xpack_packetbeat_arm_tests="$(buildkite-agent meta-data get run_xpack_packetbeat_arm_tests --default "false")" +[ -z "${run_xpack_packetbeat_macos_tests+x}" ] && run_xpack_packetbeat_macos_tests="$(buildkite-agent meta-data get run_xpack_packetbeat_macos_tests --default "false")" metricbeat_changeset=( "^metricbeat/.*" @@ -105,6 +107,9 @@ case "${BUILDKITE_PIPELINE_SLUG}" in "beats-xpack-metricbeat") BEAT_CHANGESET_REFERENCE=${xpack_metricbeat_changeset[@]} ;; + "beats-xpack-packetbeat") + BEAT_CHANGESET_REFERENCE=${xpack_packetbeat_changeset[@]} + ;; *) echo "The changeset for the ${BUILDKITE_PIPELINE_SLUG} pipeline hasn't been defined yet." ;; diff --git a/.buildkite/scripts/generate_xpack_metricbeat_pipeline.sh b/.buildkite/scripts/generate_xpack_metricbeat_pipeline.sh index c9c65a5e7573..af116b17209d 100755 --- a/.buildkite/scripts/generate_xpack_metricbeat_pipeline.sh +++ b/.buildkite/scripts/generate_xpack_metricbeat_pipeline.sh @@ -31,8 +31,6 @@ steps: provider: "gcp" image: "${DEFAULT_UBUNTU_X86_64_IMAGE}" machineType: "${GCP_DEFAULT_MACHINE_TYPE}" - disk_size: 100 - disk_type: "pd-ssd" artifact_paths: "${BEATS_PROJECT_NAME}/build/*.xml" - label: ":python: Python Integration Tests" @@ -42,8 +40,6 @@ steps: provider: "gcp" image: "${DEFAULT_UBUNTU_X86_64_IMAGE}" machineType: "${GCP_DEFAULT_MACHINE_TYPE}" - disk_size: 100 - disk_type: "pd-ssd" artifact_paths: "${BEATS_PROJECT_NAME}/build/*.xml" - label: ":windows: Windows Unit Tests - {{matrix.image}}" @@ -106,8 +102,7 @@ else fi #TODO: replace by commented-out below condition when issues mentioned in the PR https://github.com/elastic/beats/pull/38081 are resolved -if [[ are_conditions_met_aws_tests || are_conditions_met_macos_tests ]]; then -# if [[ are_conditions_met_macos_tests ]]; then +if are_conditions_met_aws_tests || are_conditions_met_macos_tests ; then cat >> $pipelineName <<- YAML - group: "Extended Tests" @@ -140,8 +135,6 @@ if are_conditions_met_aws_tests; then provider: "gcp" image: "${DEFAULT_UBUNTU_X86_64_IMAGE}" machineType: "${GCP_DEFAULT_MACHINE_TYPE}" - disk_size: 100 - disk_type: "pd-ssd" artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" YAML @@ -166,6 +159,8 @@ if are_conditions_met_packaging; then provider: "gcp" image: "${IMAGE_UBUNTU_X86_64}" machineType: "${GCP_HI_PERF_MACHINE_TYPE}" + disk_size: 100 + disk_type: "pd-ssd" env: PLATFORMS: "${PACKAGING_PLATFORMS}" diff --git a/.buildkite/scripts/generate_xpack_packetbeat_pipeline.sh b/.buildkite/scripts/generate_xpack_packetbeat_pipeline.sh new file mode 100644 index 000000000000..4eb2a1c3e049 --- /dev/null +++ b/.buildkite/scripts/generate_xpack_packetbeat_pipeline.sh @@ -0,0 +1,195 @@ +#!/usr/bin/env bash + +source .buildkite/scripts/common.sh + +set -euo pipefail + +pipelineName="pipeline.xpack-packetbeat-dynamic.yml" + +echo "Add the mandatory and extended tests without additional conditions into the pipeline" +if are_conditions_met_mandatory_tests; then + cat > $pipelineName <<- YAML + +steps: + + - group: "Mandatory Tests" + key: "mandatory-tests" + steps: + - label: ":linux: Ubuntu Unit Tests" + key: "mandatory-linux-unit-test" + command: "cd $BEATS_PROJECT_NAME && mage build unitTest" + agents: + provider: "gcp" + image: "${IMAGE_UBUNTU_X86_64}" + machineType: "${GCP_DEFAULT_MACHINE_TYPE}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.xml" + + - label: ":linux: Ubuntu System Tests" + key: "mandatory-linux-system-test" + command: "cd $BEATS_PROJECT_NAME && mage systemTest" + agents: + provider: "gcp" + image: "${IMAGE_UBUNTU_X86_64}" + machineType: "${GCP_DEFAULT_MACHINE_TYPE}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.xml" + + - label: ":rhel: RHEL-9 Unit Tests" + key: "mandatory-rhel9-unit-test" + command: ".buildkite/scripts/unit_tests.sh" + agents: + provider: "gcp" + image: "${IMAGE_RHEL9_X86_64}" + machineType: "${GCP_DEFAULT_MACHINE_TYPE}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + + + - label: ":windows: Windows Unit Tests - {{matrix.image}}" + command: ".buildkite/scripts/win_unit_tests.ps1" + key: "mandatory-win-unit-tests" + agents: + provider: "gcp" + image: "{{matrix.image}}" + machineType: "${GCP_WIN_MACHINE_TYPE}" + disk_size: 100 + disk_type: "pd-ssd" + matrix: + setup: + image: + - "${IMAGE_WIN_2016}" + - "${IMAGE_WIN_2022}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + + ## TODO: uncomment when the issue https://github.com/elastic/beats/issues/38142 is solved + # - label: ":windows: Windows 2022 System Tests" + # key: "mandatory-win-2022-system-tests" + # command: ".buildkite/scripts/win_unit_tests.ps1 systemtest" + # agents: + # provider: "gcp" + # image: "${IMAGE_WIN_2022}" + # machineType: "${GCP_WIN_MACHINE_TYPE}" + # disk_size: 100 + # disk_type: "pd-ssd" + # artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + +## TODO: this condition will be changed in the Phase 3 of the Migration Plan https://docs.google.com/document/d/1IPNprVtcnHlem-uyGZM0zGzhfUuFAh4LeSl9JFHMSZQ/edit#heading=h.sltz78yy249h + + - group: "Extended Windows Tests" + key: "extended-win-tests" + steps: + + - label: ":windows: Windows Unit Tests - {{matrix.image}}" + command: ".buildkite/scripts/win_unit_tests.ps1" + key: "extended-win-unit-tests" + agents: + provider: "gcp" + image: "{{matrix.image}}" + machineType: "${GCP_WIN_MACHINE_TYPE}" + disk_size: 100 + disk_type: "pd-ssd" + matrix: + setup: + image: + - "${IMAGE_WIN_10}" + - "${IMAGE_WIN_11}" + - "${IMAGE_WIN_2019}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + + ## TODO: uncomment when the issue https://github.com/elastic/beats/issues/38142 is solved + # - label: ":windows: Windows 10 System Tests" + # key: "extended-win-10-system-tests" + # command: ".buildkite/scripts/win_unit_tests.ps1 systemtest" + # agents: + # provider: "gcp" + # image: "${IMAGE_WIN_10}" + # machineType: "${GCP_WIN_MACHINE_TYPE}" + # disk_size: 100 + # disk_type: "pd-ssd" + # artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + +YAML +else + echo "The conditions don't match to requirements for generating pipeline steps." + exit 0 +fi + +if are_conditions_met_arm_tests || are_conditions_met_macos_tests ; then + cat >> $pipelineName <<- YAML + + - group: "Extended Tests" + key: "extended-tests" + steps: + +YAML +fi + +if are_conditions_met_macos_tests; then + cat >> $pipelineName <<- YAML + + - label: ":mac: MacOS Unit Tests" + key: "extended-macos-unit-tests" + command: ".buildkite/scripts/unit_tests.sh" + agents: + provider: "orka" + imagePrefix: "${IMAGE_MACOS_X86_64}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + +YAML +fi + +if are_conditions_met_arm_tests; then + cat >> $pipelineName <<- YAML + - label: ":linux: ARM Ubuntu Unit Tests" + key: "extended-arm64-unit-test" + command: "cd $BEATS_PROJECT_NAME && mage build unitTest" + agents: + provider: "aws" + imagePrefix: "${IMAGE_UBUNTU_ARM_64}" + instanceType: "${AWS_ARM_INSTANCE_TYPE}" + artifact_paths: "${BEATS_PROJECT_NAME}/build/*.*" + +YAML +fi + +echo "Check and add the Packaging into the pipeline" +if are_conditions_met_packaging; then + cat >> $pipelineName <<- YAML + + - wait: ~ + depends_on: + - step: "mandatory-tests" + allow_failure: false + + - group: "Packaging" # TODO: check conditions for future the main pipeline migration: https://github.com/elastic/beats/pull/28589 + key: "packaging" + steps: + - label: ":linux: Packaging Linux" + key: "packaging-linux" + command: "cd $BEATS_PROJECT_NAME && mage package" + agents: + provider: "gcp" + image: "${IMAGE_UBUNTU_X86_64}" + machineType: "${GCP_HI_PERF_MACHINE_TYPE}" + disk_size: 100 + disk_type: "pd-ssd" + env: + PLATFORMS: "${PACKAGING_PLATFORMS}" + + - label: ":linux: Packaging ARM" + key: "packaging-arm" + command: "cd $BEATS_PROJECT_NAME && mage package" + agents: + provider: "aws" + imagePrefix: "${IMAGE_UBUNTU_ARM_64}" + instanceType: "${AWS_ARM_INSTANCE_TYPE}" + env: + PLATFORMS: "${PACKAGING_ARM_PLATFORMS}" + PACKAGES: "docker" + +YAML +fi + +echo "--- Printing dynamic steps" #TODO: remove if the pipeline is public +cat $pipelineName + +echo "--- Loading dynamic steps" +buildkite-agent pipeline upload $pipelineName diff --git a/.buildkite/scripts/setenv.sh b/.buildkite/scripts/setenv.sh index 974886061d44..29a8a05446ec 100755 --- a/.buildkite/scripts/setenv.sh +++ b/.buildkite/scripts/setenv.sh @@ -11,9 +11,9 @@ SETUP_WIN_PYTHON_VERSION="3.11.0" NMAP_WIN_VERSION="7.12" # Earlier versions of NMap provide WinPcap (the winpcap packages don't install nicely because they pop-up a UI) GO_VERSION=$(cat .go-version) ASDF_MAGE_VERSION="1.15.0" -ASDF_TERRAFORM_VERSION="1.0.2" PACKAGING_PLATFORMS="+all linux/amd64 linux/arm64 windows/amd64 darwin/amd64 darwin/arm64" PACKAGING_ARM_PLATFORMS="linux/arm64" +ASDF_TERRAFORM_VERSION="1.0.2" AWS_REGION="eu-central-1" export SETUP_GVM_VERSION diff --git a/.buildkite/scripts/win_unit_tests.ps1 b/.buildkite/scripts/win_unit_tests.ps1 index b3c5c58fac0a..b61e4107c3cb 100644 --- a/.buildkite/scripts/win_unit_tests.ps1 +++ b/.buildkite/scripts/win_unit_tests.ps1 @@ -1,3 +1,7 @@ +param( + [string]$testType = "unittest" +) + $ErrorActionPreference = "Stop" # set -e $WorkFolder = $env:BEATS_PROJECT_NAME $WORKSPACE = Get-Location @@ -120,6 +124,23 @@ function withNmap($version) { } Start-Process -FilePath $nmapDownloadPath -ArgumentList "/S" -Wait } +function google_cloud_auth { + $tempFileName = "google-cloud-credentials.json" + $secretFileLocation = Join-Path $env:TEMP $tempFileName + $null = New-Item -ItemType File -Path $secretFileLocation + Set-Content -Path $secretFileLocation -Value $env:PRIVATE_CI_GCS_CREDENTIALS_SECRET + gcloud auth activate-service-account --key-file $secretFileLocation > $null 2>&1 + $env:GOOGLE_APPLICATION_CREDENTIALS = $secretFileLocation +} + +function google_cloud_auth_cleanup { + if (Test-Path $env:GOOGLE_APPLICATION_CREDENTIALS) { + Remove-Item $env:GOOGLE_APPLICATION_CREDENTIALS -Force + Remove-Item Env:\GOOGLE_APPLICATION_CREDENTIALS + } else { + Write-Host "No GCP credentials were added" + } +} fixCRLF @@ -142,10 +163,23 @@ $env:MAGEFILE_CACHE = $magefile New-Item -ItemType Directory -Force -Path "build" -if ($env:BUILDKITE_PIPELINE_SLUG -eq "beats-xpack-libbeat") { - mage -w reader/etw build goUnitTest -} else { - mage build unitTest +if ($testType -eq "unittest") { + if ($env:BUILDKITE_PIPELINE_SLUG -eq "beats-xpack-libbeat") { + mage -w reader/etw build goUnitTest + } else { + mage build unitTest + } +} +elseif ($testType -eq "systemtest") { + try { + google_cloud_auth + mage systemTest + } finally { + google_cloud_auth_cleanup + } +} +else { + Write-Host "Unknown test type. Please specify 'unittest' or 'systemtest'." } $EXITCODE=$LASTEXITCODE diff --git a/.buildkite/x-pack/pipeline.xpack.auditbeat.yml b/.buildkite/x-pack/pipeline.xpack.auditbeat.yml new file mode 100644 index 000000000000..2343eb6a4ddd --- /dev/null +++ b/.buildkite/x-pack/pipeline.xpack.auditbeat.yml @@ -0,0 +1,6 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json +name: "beats-xpack-auditbeat" + +steps: + - label: "Example test" + command: echo "Hello!" diff --git a/.buildkite/x-pack/pipeline.xpack.dockerlogbeat.yml b/.buildkite/x-pack/pipeline.xpack.dockerlogbeat.yml new file mode 100644 index 000000000000..c4a0805615b1 --- /dev/null +++ b/.buildkite/x-pack/pipeline.xpack.dockerlogbeat.yml @@ -0,0 +1,6 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json +name: "beats-xpack-dockerlogbeat" + +steps: + - label: "Example test" + command: echo "Hello!" diff --git a/.buildkite/x-pack/pipeline.xpack.filebeat.yml b/.buildkite/x-pack/pipeline.xpack.filebeat.yml new file mode 100644 index 000000000000..6d3d7d9daeee --- /dev/null +++ b/.buildkite/x-pack/pipeline.xpack.filebeat.yml @@ -0,0 +1,6 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json +name: "beats-xpack-filebeat" + +steps: + - label: "Example test" + command: echo "Hello!" diff --git a/.buildkite/x-pack/pipeline.xpack.heartbeat.yml b/.buildkite/x-pack/pipeline.xpack.heartbeat.yml new file mode 100644 index 000000000000..65175d3b029a --- /dev/null +++ b/.buildkite/x-pack/pipeline.xpack.heartbeat.yml @@ -0,0 +1,6 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json +name: "beats-xpack-heartbeat" + +steps: + - label: "Example test" + command: echo "Hello!" diff --git a/.buildkite/x-pack/pipeline.xpack.osquerybeat.yml b/.buildkite/x-pack/pipeline.xpack.osquerybeat.yml new file mode 100644 index 000000000000..22297e33ab59 --- /dev/null +++ b/.buildkite/x-pack/pipeline.xpack.osquerybeat.yml @@ -0,0 +1,6 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json +name: "beats-xpack-osquerybeat" + +steps: + - label: "Example test" + command: echo "Hello!" diff --git a/.buildkite/x-pack/pipeline.xpack.packetbeat.yml b/.buildkite/x-pack/pipeline.xpack.packetbeat.yml index 34321b61161b..750b59e716d8 100644 --- a/.buildkite/x-pack/pipeline.xpack.packetbeat.yml +++ b/.buildkite/x-pack/pipeline.xpack.packetbeat.yml @@ -1,5 +1,61 @@ # yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json +name: "beats-xpack-packetbeat" + +env: + IMAGE_UBUNTU_X86_64: "family/platform-ingest-beats-ubuntu-2204" + IMAGE_UBUNTU_ARM_64: "platform-ingest-beats-ubuntu-2204-aarch64" + IMAGE_RHEL9_X86_64: "family/platform-ingest-beats-rhel-9" + IMAGE_WIN_10: "family/general-windows-10" + IMAGE_WIN_11: "family/general-windows-11" + IMAGE_WIN_2016: "family/core-windows-2016" + IMAGE_WIN_2019: "family/core-windows-2019" + IMAGE_WIN_2022: "family/core-windows-2022" + IMAGE_MACOS_X86_64: "generic-13-ventura-x64" + GCP_DEFAULT_MACHINE_TYPE: "c2d-highcpu-8" + GCP_HI_PERF_MACHINE_TYPE: "c2d-highcpu-16" + GCP_WIN_MACHINE_TYPE: "n2-standard-8" + AWS_ARM_INSTANCE_TYPE: "t4g.xlarge" + BEATS_PROJECT_NAME: "x-pack/packetbeat" steps: - - label: "Example test" - command: echo "Hello!" + + - input: "Input Parameters" + key: "force-run-stages" + fields: + - select: "Packetbeat - run_xpack_packetbeat" + key: "run_xpack_packetbeat" + options: + - label: "True" + value: "true" + - label: "False" + value: "false" + default: "false" + - select: "Packetbeat - run_xpack_packetbeat_macos_tests" + key: "run_xpack_packetbeat_macos_tests" + options: + - label: "True" + value: "true" + - label: "False" + value: "false" + default: "false" + - select: "Packetbeat - run_xpack_packetbeat_arm_tests" + key: "run_xpack_packetbeat_arm_tests" + options: + - label: "True" + value: "true" + - label: "False" + value: "false" + default: "false" + + if: "build.source == 'ui'" + + - wait: ~ + if: "build.source == 'ui'" + allow_dependency_failure: false + + - label: ":linux: Load dynamic x-pack packetbeat pipeline" + key: "packetbeat-pipeline" + command: ".buildkite/scripts/generate_xpack_packetbeat_pipeline.sh" + notify: + - github_commit_status: + context: "${BEATS_PROJECT_NAME}: Load dynamic pipeline's steps" diff --git a/CHANGELOG.next.asciidoc b/CHANGELOG.next.asciidoc index 280b04c79855..d7515c65bcde 100644 --- a/CHANGELOG.next.asciidoc +++ b/CHANGELOG.next.asciidoc @@ -100,6 +100,7 @@ fields added to events containing the Beats version. {pull}37553[37553] - Prevent HTTPJSON holding response bodies between executions. {issue}35219[35219] {pull}38116[38116] - Fix "failed processing S3 event for object key" error on aws-s3 input when key contains the "+" character {issue}38012[38012] {pull}38125[38125] - Fix duplicated addition of regexp extension in CEL input. {pull}38181[38181] +- Fix the incorrect values generated by the uri_parts processor. {pull}38216[38216] *Heartbeat* @@ -200,6 +201,9 @@ Setting environmental variable ELASTIC_NETINFO:false in Elastic Agent pod will d - Update CEL mito extensions to v1.9.0 to add keys/values helper. {pull}37971[37971] - Add logging for cache processor file reads and writes. {pull}38052[38052] - Add parseDateInTZ value template for the HTTPJSON input {pull}37738[37738] +- Improve rate limit handling by HTTPJSON {issue}36207[36207] {pull}38161[38161] {pull}38237[38237] +- Add parseDateInTZ value template for the HTTPJSON input. {pull}37738[37738] +- Add support for complex event objects in the HTTP Endpoint input. {issue}37910[37910] {pull}38193[38193] *Auditbeat* diff --git a/catalog-info.yaml b/catalog-info.yaml index 5e0f94fd2df0..7f99ab9ce83a 100644 --- a/catalog-info.yaml +++ b/catalog-info.yaml @@ -109,7 +109,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -155,7 +155,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -201,7 +201,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -247,7 +247,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -293,7 +293,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -339,7 +339,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -385,7 +385,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -430,7 +430,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -476,7 +476,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -497,9 +497,9 @@ spec: name: beats-xpack-winlogbeat description: "Beats x-pack winlogbeat pipeline" spec: - # branch_configuration: "main 7.17 8.*" #TODO: uncomment after tests + branch_configuration: "main 7.17 8.*" pipeline_file: ".buildkite/x-pack/pipeline.xpack.winlogbeat.yml" - # maximum_timeout_in_minutes: 120 #TODO: uncomment after tests + maximum_timeout_in_minutes: 120 provider_settings: trigger_mode: none # don't trigger jobs from github activity build_pull_request_forks: false @@ -513,8 +513,8 @@ spec: cancel_intermediate_builds_branch_filter: "!main !7.17 !8.*" skip_intermediate_builds: true skip_intermediate_builds_branch_filter: "!main !7.17 !8.*" - # env: - # ELASTIC_PR_COMMENTS_ENABLED: "true" #TODO: uncomment after tests + env: + ELASTIC_PR_COMMENTS_ENABLED: "true" teams: ingest-fp: access_level: MANAGE_BUILD_AND_READ @@ -522,7 +522,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -543,9 +543,9 @@ spec: name: beats-xpack-packetbeat description: "Beats x-pack packetbeat pipeline" spec: - # branch_configuration: "main 7.17 8.*" #TODO: uncomment after tests + branch_configuration: "main 7.17 8.*" pipeline_file: ".buildkite/x-pack/pipeline.xpack.packetbeat.yml" - # maximum_timeout_in_minutes: 120 #TODO: uncomment after tests + maximum_timeout_in_minutes: 120 provider_settings: trigger_mode: none # don't trigger jobs from github activity build_pull_request_forks: false @@ -559,8 +559,8 @@ spec: cancel_intermediate_builds_branch_filter: "!main !7.17 !8.*" skip_intermediate_builds: true skip_intermediate_builds_branch_filter: "!main !7.17 !8.*" - # env: - # ELASTIC_PR_COMMENTS_ENABLED: "true" #TODO: uncomment after tests + env: + ELASTIC_PR_COMMENTS_ENABLED: "true" teams: ingest-fp: access_level: MANAGE_BUILD_AND_READ @@ -568,7 +568,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -589,9 +589,9 @@ spec: name: beats-xpack-libbeat description: "Beats x-pack libbeat pipeline" spec: - # branch_configuration: "main 7.17 8.*" #TODO: uncomment after tests + branch_configuration: "main 7.17 8.*" pipeline_file: ".buildkite/x-pack/pipeline.xpack.libbeat.yml" - # maximum_timeout_in_minutes: 120 #TODO: uncomment after tests + maximum_timeout_in_minutes: 120 provider_settings: trigger_mode: none # don't trigger jobs from github activity build_pull_request_forks: false @@ -605,8 +605,8 @@ spec: cancel_intermediate_builds_branch_filter: "!main !7.17 !8.*" skip_intermediate_builds: true skip_intermediate_builds_branch_filter: "!main !7.17 !8.*" - # env: - # ELASTIC_PR_COMMENTS_ENABLED: "true" #TODO: uncomment after tests + env: + ELASTIC_PR_COMMENTS_ENABLED: "true" teams: ingest-fp: access_level: MANAGE_BUILD_AND_READ @@ -614,7 +614,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -635,9 +635,9 @@ spec: name: beats-xpack-metricbeat description: "Beats x-pack metricbeat pipeline" spec: - # branch_configuration: "7.17" #TODO: uncomment after tests + branch_configuration: "main 7.17 8.*" pipeline_file: ".buildkite/x-pack/pipeline.xpack.metricbeat.yml" - maximum_timeout_in_minutes: 480 + maximum_timeout_in_minutes: 120 provider_settings: trigger_mode: none # don't trigger jobs from github activity build_pull_request_forks: false @@ -648,11 +648,11 @@ spec: build.pull_request.id == null || (build.creator.name == 'elasticmachine' && build.pull_request.id != null) repository: elastic/beats cancel_intermediate_builds: true - cancel_intermediate_builds_branch_filter: "!7.17" + cancel_intermediate_builds_branch_filter: "!main !7.17 !8.*" skip_intermediate_builds: true - skip_intermediate_builds_branch_filter: "!7.17" - # env: - # ELASTIC_PR_COMMENTS_ENABLED: "true" #TODO: uncomment after tests + skip_intermediate_builds_branch_filter: "!main !7.17 !8.*" + env: + ELASTIC_PR_COMMENTS_ENABLED: "true" teams: ingest-fp: access_level: MANAGE_BUILD_AND_READ @@ -660,7 +660,7 @@ spec: access_level: READ_ONLY --- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource metadata: @@ -693,3 +693,268 @@ spec: access_level: BUILD_AND_READ everyone: access_level: READ_ONLY +--- +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json +apiVersion: backstage.io/v1alpha1 +kind: Resource +metadata: + name: buildkite-elastic-agent-binary-dra-7-17 + description: Buildkite pipeline for packaging Elastic Agent core binary and publish it to DRA + links: + - title: Pipeline + url: https://buildkite.com/elastic/buildkite-elastic-agent-binary-dra-7-17 + +spec: + type: buildkite-pipeline + owner: group:ingest-fp + system: buildkite + implementation: + apiVersion: buildkite.elastic.dev/v1 + kind: Pipeline + metadata: + name: buildkite-elastic-agent-binary-dra-7-17 + description: Buildkite pipeline for packaging Elastic Agent core binary and publish it to DRA + spec: + pipeline_file: ".buildkite/x-pack/elastic-agent/.buildkite/pipeline.xpack.elastic-agent-binary-dra.yml" + provider_settings: + build_branches: true + build_pull_requests: true + cancel_deleted_branch_builds: true + filter_condition: 'build.branch == "7.17" || build.pull_request.base_branch == "7.17"' + filter_enabled: true + repository: elastic/beats + teams: + ingest-fp: + access_level: MANAGE_BUILD_AND_READ + everyone: + access_level: BUILD_AND_READ + +--- +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json +apiVersion: backstage.io/v1alpha1 +kind: Resource +metadata: + name: buildkite-pipeline-beats-xpack-auditbeat + description: "Beats x-pack auditbeat pipeline" + links: + - title: Pipeline + url: https://buildkite.com/elastic/beats-xpack-auditbeat + +spec: + type: buildkite-pipeline + owner: group:ingest-fp + system: buildkite + implementation: + apiVersion: buildkite.elastic.dev/v1 + kind: Pipeline + metadata: + name: beats-xpack-auditbeat + description: "Beats x-pack auditbeat pipeline" + spec: + # branch_configuration: "main 7.17 8.*" #TODO: uncomment after tests + pipeline_file: ".buildkite/x-pack/pipeline.xpack.auditbeat.yml" + maximum_timeout_in_minutes: 120 + provider_settings: + trigger_mode: none # don't trigger jobs from github activity + build_pull_request_forks: false + build_pull_requests: true # requires filter_enabled and filter_condition settings as below when used with buildkite-pr-bot + build_tags: true + filter_enabled: true + filter_condition: >- + build.pull_request.id == null || (build.creator.name == 'elasticmachine' && build.pull_request.id != null) + repository: elastic/beats + cancel_intermediate_builds: true + cancel_intermediate_builds_branch_filter: "!main !7.17 !8.*" + skip_intermediate_builds: true + skip_intermediate_builds_branch_filter: "!main !7.17 !8.*" + # env: + # ELASTIC_PR_COMMENTS_ENABLED: "true" #TODO: uncomment after tests + teams: + ingest-fp: + access_level: MANAGE_BUILD_AND_READ + everyone: + access_level: READ_ONLY + +--- +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json +apiVersion: backstage.io/v1alpha1 +kind: Resource +metadata: + name: buildkite-pipeline-beats-xpack-dockerlogbeat + description: "Beats x-pack dockerlogbeat pipeline" + links: + - title: Pipeline + url: https://buildkite.com/elastic/beats-xpack-dockerlogbeat + +spec: + type: buildkite-pipeline + owner: group:ingest-fp + system: buildkite + implementation: + apiVersion: buildkite.elastic.dev/v1 + kind: Pipeline + metadata: + name: beats-xpack-dockerlogbeat + description: "Beats x-pack dockerlogbeat pipeline" + spec: + # branch_configuration: "main 7.17 8.*" #TODO: uncomment after tests + pipeline_file: ".buildkite/x-pack/pipeline.xpack.dockerlogbeat.yml" + maximum_timeout_in_minutes: 120 + provider_settings: + trigger_mode: none # don't trigger jobs from github activity + build_pull_request_forks: false + build_pull_requests: true # requires filter_enabled and filter_condition settings as below when used with buildkite-pr-bot + build_tags: true + filter_enabled: true + filter_condition: >- + build.pull_request.id == null || (build.creator.name == 'elasticmachine' && build.pull_request.id != null) + repository: elastic/beats + cancel_intermediate_builds: true + cancel_intermediate_builds_branch_filter: "!main !7.17 !8.*" + skip_intermediate_builds: true + skip_intermediate_builds_branch_filter: "!main !7.17 !8.*" + # env: + # ELASTIC_PR_COMMENTS_ENABLED: "true" #TODO: uncomment after tests + teams: + ingest-fp: + access_level: MANAGE_BUILD_AND_READ + everyone: + access_level: READ_ONLY + +--- +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json +apiVersion: backstage.io/v1alpha1 +kind: Resource +metadata: + name: buildkite-pipeline-beats-xpack-filebeat + description: "Beats x-pack filebeat pipeline" + links: + - title: Pipeline + url: https://buildkite.com/elastic/beats-xpack-filebeat + +spec: + type: buildkite-pipeline + owner: group:ingest-fp + system: buildkite + implementation: + apiVersion: buildkite.elastic.dev/v1 + kind: Pipeline + metadata: + name: beats-xpack-filebeat + description: "Beats x-pack filebeat pipeline" + spec: + # branch_configuration: "main 7.17 8.*" #TODO: uncomment after tests + pipeline_file: ".buildkite/x-pack/pipeline.xpack.filebeat.yml" + maximum_timeout_in_minutes: 120 + provider_settings: + trigger_mode: none # don't trigger jobs from github activity + build_pull_request_forks: false + build_pull_requests: true # requires filter_enabled and filter_condition settings as below when used with buildkite-pr-bot + build_tags: true + filter_enabled: true + filter_condition: >- + build.pull_request.id == null || (build.creator.name == 'elasticmachine' && build.pull_request.id != null) + repository: elastic/beats + cancel_intermediate_builds: true + cancel_intermediate_builds_branch_filter: "!main !7.17 !8.*" + skip_intermediate_builds: true + skip_intermediate_builds_branch_filter: "!main !7.17 !8.*" + # env: + # ELASTIC_PR_COMMENTS_ENABLED: "true" #TODO: uncomment after tests + teams: + ingest-fp: + access_level: MANAGE_BUILD_AND_READ + everyone: + access_level: READ_ONLY + +--- +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json +apiVersion: backstage.io/v1alpha1 +kind: Resource +metadata: + name: buildkite-pipeline-beats-xpack-heartbeat + description: "Beats x-pack heartbeat pipeline" + links: + - title: Pipeline + url: https://buildkite.com/elastic/beats-xpack-heartbeat + +spec: + type: buildkite-pipeline + owner: group:ingest-fp + system: buildkite + implementation: + apiVersion: buildkite.elastic.dev/v1 + kind: Pipeline + metadata: + name: beats-xpack-heartbeat + description: "Beats x-pack heartbeat pipeline" + spec: + # branch_configuration: "main 7.17 8.*" #TODO: uncomment after tests + pipeline_file: ".buildkite/x-pack/pipeline.xpack.heartbeat.yml" + maximum_timeout_in_minutes: 120 + provider_settings: + trigger_mode: none # don't trigger jobs from github activity + build_pull_request_forks: false + build_pull_requests: true # requires filter_enabled and filter_condition settings as below when used with buildkite-pr-bot + build_tags: true + filter_enabled: true + filter_condition: >- + build.pull_request.id == null || (build.creator.name == 'elasticmachine' && build.pull_request.id != null) + repository: elastic/beats + cancel_intermediate_builds: true + cancel_intermediate_builds_branch_filter: "!main !7.17 !8.*" + skip_intermediate_builds: true + skip_intermediate_builds_branch_filter: "!main !7.17 !8.*" + # env: + # ELASTIC_PR_COMMENTS_ENABLED: "true" #TODO: uncomment after tests + teams: + ingest-fp: + access_level: MANAGE_BUILD_AND_READ + everyone: + access_level: READ_ONLY + +--- +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json +apiVersion: backstage.io/v1alpha1 +kind: Resource +metadata: + name: buildkite-pipeline-beats-xpack-osquerybeat + description: "Beats x-pack osquerybeat pipeline" + links: + - title: Pipeline + url: https://buildkite.com/elastic/beats-xpack-osquerybeat + +spec: + type: buildkite-pipeline + owner: group:ingest-fp + system: buildkite + implementation: + apiVersion: buildkite.elastic.dev/v1 + kind: Pipeline + metadata: + name: beats-xpack-osquerybeat + description: "Beats x-pack osquerybeat pipeline" + spec: + # branch_configuration: "main 7.17 8.*" #TODO: uncomment after tests + pipeline_file: ".buildkite/x-pack/pipeline.xpack.osquerybeat.yml" + maximum_timeout_in_minutes: 120 + provider_settings: + trigger_mode: none # don't trigger jobs from github activity + build_pull_request_forks: false + build_pull_requests: true # requires filter_enabled and filter_condition settings as below when used with buildkite-pr-bot + build_tags: true + filter_enabled: true + filter_condition: >- + build.pull_request.id == null || (build.creator.name == 'elasticmachine' && build.pull_request.id != null) + repository: elastic/beats + cancel_intermediate_builds: true + cancel_intermediate_builds_branch_filter: "!main !7.17 !8.*" + skip_intermediate_builds: true + skip_intermediate_builds_branch_filter: "!main !7.17 !8.*" + # env: + # ELASTIC_PR_COMMENTS_ENABLED: "true" #TODO: uncomment after tests + teams: + ingest-fp: + access_level: MANAGE_BUILD_AND_READ + everyone: + access_level: READ_ONLY diff --git a/filebeat/module/iis/error/test/iis_error_url.log-expected.json b/filebeat/module/iis/error/test/iis_error_url.log-expected.json index cc7213141754..88509d87dc55 100644 --- a/filebeat/module/iis/error/test/iis_error_url.log-expected.json +++ b/filebeat/module/iis/error/test/iis_error_url.log-expected.json @@ -39,7 +39,6 @@ "source.geo.region_name": "England", "source.ip": "81.2.69.145", "source.port": 12345, - "url.extension": "1", "url.original": "12.2.1", "url.path": "12.2.1" }, @@ -83,7 +82,6 @@ "source.geo.region_name": "England", "source.ip": "81.2.69.145", "source.port": 12345, - "url.extension": "/", "url.original": "./././././../../../../../../../../", "url.path": "./././././../../../../../../../../" }, @@ -343,4 +341,4 @@ "url.original": "/fee&fie=foe", "url.path": "/fee&fie=foe" } -] \ No newline at end of file +] diff --git a/libbeat/outputs/fileout/config.go b/libbeat/outputs/fileout/config.go index e72a9f87d6fc..69af40e4289b 100644 --- a/libbeat/outputs/fileout/config.go +++ b/libbeat/outputs/fileout/config.go @@ -26,14 +26,14 @@ import ( ) type fileOutConfig struct { - Path string `config:"path"` - Filename string `config:"filename"` - RotateEveryKb uint `config:"rotate_every_kb" validate:"min=1"` - NumberOfFiles uint `config:"number_of_files"` - Codec codec.Config `config:"codec"` - Permissions uint32 `config:"permissions"` - RotateOnStartup bool `config:"rotate_on_startup"` - Queue config.Namespace `config:"queue"` + Path *PathFormatString `config:"path"` + Filename string `config:"filename"` + RotateEveryKb uint `config:"rotate_every_kb" validate:"min=1"` + NumberOfFiles uint `config:"number_of_files"` + Codec codec.Config `config:"codec"` + Permissions uint32 `config:"permissions"` + RotateOnStartup bool `config:"rotate_on_startup"` + Queue config.Namespace `config:"queue"` } func defaultConfig() fileOutConfig { @@ -45,6 +45,18 @@ func defaultConfig() fileOutConfig { } } +func readConfig(cfg *config.C) (*fileOutConfig, error) { + foConfig := defaultConfig() + if err := cfg.Unpack(&foConfig); err != nil { + return nil, err + } + + // disable bulk support in publisher pipeline + _ = cfg.SetInt("bulk_max_size", -1, -1) + + return &foConfig, nil +} + func (c *fileOutConfig) Validate() error { if c.NumberOfFiles < 2 || c.NumberOfFiles > file.MaxBackupsLimit { return fmt.Errorf("the number_of_files to keep should be between 2 and %v", diff --git a/libbeat/outputs/fileout/config_test.go b/libbeat/outputs/fileout/config_test.go new file mode 100644 index 000000000000..7e149173f6df --- /dev/null +++ b/libbeat/outputs/fileout/config_test.go @@ -0,0 +1,100 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package fileout + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" + + "github.com/elastic/elastic-agent-libs/config" + "github.com/elastic/elastic-agent-libs/mapstr" +) + +func TestConfig(t *testing.T) { + for name, test := range map[string]struct { + config *config.C + useWindowsPath bool + assertion func(t *testing.T, config *fileOutConfig, err error) + }{ + "default config": { + config: config.MustNewConfigFrom([]byte(`{ }`)), + assertion: func(t *testing.T, actual *fileOutConfig, err error) { + expectedConfig := &fileOutConfig{ + NumberOfFiles: 7, + RotateEveryKb: 10 * 1024, + Permissions: 0600, + RotateOnStartup: true, + } + + assert.Equal(t, expectedConfig, actual) + assert.Nil(t, err) + }, + }, + "config given with posix path": { + config: config.MustNewConfigFrom(mapstr.M{ + "number_of_files": 10, + "rotate_every_kb": 5 * 1024, + "path": "/tmp/packetbeat/%{+yyyy-MM-dd-mm-ss-SSSSSS}", + "filename": "pb", + }), + assertion: func(t *testing.T, actual *fileOutConfig, err error) { + assert.Equal(t, uint(10), actual.NumberOfFiles) + assert.Equal(t, uint(5*1024), actual.RotateEveryKb) + assert.Equal(t, true, actual.RotateOnStartup) + assert.Equal(t, uint32(0600), actual.Permissions) + assert.Equal(t, "pb", actual.Filename) + + path, runErr := actual.Path.Run(time.Date(2024, 1, 2, 3, 4, 5, 67890, time.UTC)) + assert.Nil(t, runErr) + + assert.Equal(t, "/tmp/packetbeat/2024-01-02-04-05-000067", path) + assert.Nil(t, err) + }, + }, + "config given with windows path": { + useWindowsPath: true, + config: config.MustNewConfigFrom(mapstr.M{ + "number_of_files": 10, + "rotate_every_kb": 5 * 1024, + "path": "c:\\tmp\\packetbeat\\%{+yyyy-MM-dd-mm-ss-SSSSSS}", + "filename": "pb", + }), + assertion: func(t *testing.T, actual *fileOutConfig, err error) { + assert.Equal(t, uint(10), actual.NumberOfFiles) + assert.Equal(t, uint(5*1024), actual.RotateEveryKb) + assert.Equal(t, true, actual.RotateOnStartup) + assert.Equal(t, uint32(0600), actual.Permissions) + assert.Equal(t, "pb", actual.Filename) + + path, runErr := actual.Path.Run(time.Date(2024, 1, 2, 3, 4, 5, 67890, time.UTC)) + assert.Nil(t, runErr) + + assert.Equal(t, "c:\\tmp\\packetbeat\\2024-01-02-04-05-000067", path) + assert.Nil(t, err) + }, + }, + } { + t.Run(name, func(t *testing.T) { + isWindowsPath = test.useWindowsPath + cfg, err := readConfig(test.config) + test.assertion(t, cfg, err) + }) + } +} diff --git a/libbeat/outputs/fileout/docs/fileout.asciidoc b/libbeat/outputs/fileout/docs/fileout.asciidoc index 54dfdd0772aa..bb2a953ec75e 100644 --- a/libbeat/outputs/fileout/docs/fileout.asciidoc +++ b/libbeat/outputs/fileout/docs/fileout.asciidoc @@ -49,6 +49,14 @@ The default value is `true`. The path to the directory where the generated files will be saved. This option is mandatory. +The path may include the timestamp when the file output is initialized using the `+FORMAT` syntax where `FORMAT` is a +valid https://github.com/elastic/beats/blob/{doc-branch}/libbeat/common/dtfmt/doc.go[time format], +and enclosed with expansion braces: `%{+FORMAT}`. For example: + +``` +path: 'fileoutput-%{+yyyy.MM.dd}' +``` + ===== `filename` The name of the generated files. The default is set to the Beat name. For example, the files diff --git a/libbeat/outputs/fileout/file.go b/libbeat/outputs/fileout/file.go index 4ddc5955d6ef..34c57f29791f 100644 --- a/libbeat/outputs/fileout/file.go +++ b/libbeat/outputs/fileout/file.go @@ -52,20 +52,17 @@ func makeFileout( observer outputs.Observer, cfg *c.C, ) (outputs.Group, error) { - foConfig := defaultConfig() - if err := cfg.Unpack(&foConfig); err != nil { + foConfig, err := readConfig(cfg) + if err != nil { return outputs.Fail(err) } - // disable bulk support in publisher pipeline - _ = cfg.SetInt("bulk_max_size", -1, -1) - fo := &fileOutput{ log: logp.NewLogger("file"), beat: beat, observer: observer, } - if err := fo.init(beat, foConfig); err != nil { + if err = fo.init(beat, *foConfig); err != nil { return outputs.Fail(err) } @@ -74,10 +71,14 @@ func makeFileout( func (out *fileOutput) init(beat beat.Info, c fileOutConfig) error { var path string + configPath, runErr := c.Path.Run(time.Now().UTC()) + if runErr != nil { + return runErr + } if c.Filename != "" { - path = filepath.Join(c.Path, c.Filename) + path = filepath.Join(configPath, c.Filename) } else { - path = filepath.Join(c.Path, out.beat.Beat) + path = filepath.Join(configPath, out.beat.Beat) } out.filePath = path diff --git a/libbeat/outputs/fileout/pathformatstring.go b/libbeat/outputs/fileout/pathformatstring.go new file mode 100644 index 000000000000..acd2a7605fe6 --- /dev/null +++ b/libbeat/outputs/fileout/pathformatstring.go @@ -0,0 +1,66 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package fileout + +import ( + "os" + "strings" + "time" + + "github.com/elastic/beats/v7/libbeat/common/fmtstr" + + "github.com/elastic/beats/v7/libbeat/beat" +) + +var isWindowsPath = os.PathSeparator == '\\' + +// PathFormatString is a wrapper around EventFormatString for the +// handling paths with a format expression that has access to the timestamp format. +// It has special handling for paths, specifically for windows path separator +// which would be interpreted as an escape character. This formatter double escapes +// the path separator so it is properly interpreted by the fmtstr processor +type PathFormatString struct { + efs *fmtstr.EventFormatString +} + +// Run executes the format string returning a new expanded string or an error +// if execution or event field expansion fails. +func (fs *PathFormatString) Run(timestamp time.Time) (string, error) { + placeholderEvent := &beat.Event{ + Timestamp: timestamp, + } + return fs.efs.Run(placeholderEvent) +} + +// Unpack tries to initialize the PathFormatString from provided value +// (which must be a string). Unpack method satisfies go-ucfg.Unpacker interface +// required by config.C, in order to use PathFormatString with +// `common.(*Config).Unpack()`. +func (fs *PathFormatString) Unpack(v interface{}) error { + path, ok := v.(string) + if !ok { + return nil + } + + if isWindowsPath { + path = strings.ReplaceAll(path, "\\", "\\\\") + } + + fs.efs = &fmtstr.EventFormatString{} + return fs.efs.Unpack(path) +} diff --git a/libbeat/outputs/fileout/pathformatstring_test.go b/libbeat/outputs/fileout/pathformatstring_test.go new file mode 100644 index 000000000000..b8eee4e44eaa --- /dev/null +++ b/libbeat/outputs/fileout/pathformatstring_test.go @@ -0,0 +1,87 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package fileout + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestPathFormatString(t *testing.T) { + tests := []struct { + title string + useWindowsPath bool + format string + timestamp time.Time + expected string + }{ + { + "empty string", + false, + "", + time.Time{}, + "", + }, + { + "no fields configured", + false, + "format string", + time.Time{}, + "format string", + }, + { + "test timestamp formatter", + false, + "timestamp: %{+YYYY.MM.dd}", + time.Date(2015, 5, 1, 20, 12, 34, 0, time.UTC), + "timestamp: 2015.05.01", + }, + { + "test timestamp formatter with posix path", + false, + "/tmp/%{+YYYY.MM.dd}", + time.Date(2015, 5, 1, 20, 12, 34, 0, time.UTC), + "/tmp/2015.05.01", + }, + { + "test timestamp formatter with windows path", + true, + "C:\\tmp\\%{+YYYY.MM.dd}", + time.Date(2015, 5, 1, 20, 12, 34, 0, time.UTC), + "C:\\tmp\\2015.05.01", + }, + } + + for i, test := range tests { + t.Logf("test(%v): %v", i, test.title) + isWindowsPath = test.useWindowsPath + pfs := &PathFormatString{} + err := pfs.Unpack(test.format) + if err != nil { + t.Error(err) + continue + } + + actual, err := pfs.Run(test.timestamp) + + assert.NoError(t, err) + assert.Equal(t, test.expected, actual) + } +} diff --git a/metricbeat/module/logstash/fields.go b/metricbeat/module/logstash/fields.go index 1cdf0ce0e6a6..d1550b9555c7 100644 --- a/metricbeat/module/logstash/fields.go +++ b/metricbeat/module/logstash/fields.go @@ -32,5 +32,5 @@ func init() { // AssetLogstash returns asset data. // This is the base64 encoded zlib format compressed contents of module/logstash. func AssetLogstash() string { - return "eJzEWU2v4zQU3b9fcdU1E4kFmy4QC0AMAsFqNghFbnKb+D0nzvijw+PXo7RNGyf+bNwhiy6S+Jxzbef63tMP8Ibve2C8kYrI9gVAUcVwD7vfrrd2LwA1ykrQQVHe7+H7FwCA6TF0vNYMXwAEMiQS99CQFwCJStG+kXv4aycl230Du1apYff3+KzlQpUV74+02cORMDmOP1Jktdyf0T9ATzq86yqlIkqeHwGo92FkEVwP1zvzofPhinYoFemG25NpNGGUyNndgah2D7sfbiN2K7DXU7eCmYuwCZmP77Az7rswXDhzrBbJUGqJdUn78vCuUK5edUVqRjxNcNHzGovzLBevp67osCsiSAw9Hfnn6XKcHJMSPYxLOL7TUcboksotwi/ACTvx4gl7JTdtENpnEHvRUSywJgquVT6OJdhEUmtBxlSRdRGunB7sZc7YtBZa0zqD6tu9Fd5EdEIhKc+x8rd7S8iJaqADMtrjepf2KBXWqwF8236uBp0t4TFO6pKcUJAGnbnFBuwDnxN8+90yO5vYrrwF3jXhsqgGXczVFy6mm5KvJsSv46tNyILotn1sy/n4DqoGTapKFVqSBsue9DznITXGdVZVxPC4vxB/jL44DdyjLD9rrkjZ0UpYAw0HmxBwEUU4qRsBvIpcoYfCn5P0ujugKPmxREaGsYgZUFBeuyKNm4+UORnvFeky5rVrqVrBlWJYOzfsE7UnKVjP+7mUvo//v+bdJeN2KgpeobykpQFFhb2K7BZsUnxoE+NnjRqvBU1Zcb2J0AFmbaAwuYGaSobCqF7SNGJhQ1kxtLaS7QGOG46jHowPHhmRilYSiajaomJaKhS2mXjD9y9crGMb5QUrKEtvPV2L8WaDPV0u9TnIL203dKgErWSRrGWux1FPL+cOFgfFfWWzVQIrJX4186GLTZo2GIcWOxSElRsUHIiq2lLSf91lMON94xz/hYs3FO7SxztY4CBQYq/OPdhTShfHDENwguYg45ubQeydWTpOI8jgDylHvYN1Y3VdTCZ+eMXKVXwZoStaPYZ3W0guU72GcUgx/owIizc62lw6/z0ooZdPPXlsvH7hUsEKdFsLLlGcaIWrbjuD3Ju3+smCbbcjwbuZAny/fvodPvZHnphH3R9I6OMI6BkvW+hg1A3uHOpv36YSzY7gX7hI8X9eKODjj85zze6JPXKc2UxuE7W+l37r4et95JfjkwTxRqxJZD11wGuexykNqYVUUz1S9wo3YI5HwDqdZnOsayYitu1PZ2Tr9oTIVV952BGBResbr499xTvaN9dpgHPLg6Lwqlrb3rll/aFVw1NlHSlTKJytcS5tP19pkrQFTfwIlR5H3hy9Kc+snXoT/Nm1ntUtzxef3SMMY4fwId5djyeMIYUo193kbAk7lkfGncbhAvYpqHm0eu1tEynH0sbZ3iZv1Knm3pdxAcQEAemedkIkEOdJp4UEkWHBRq/aVBWMEx53ljcwPWIGR9DFWbcmUM31gbmrYNt/oybA4j/S+xWTxAPWDyQdVhkgsrgsZ+v5+caG3Se3E0ZlLo87BFETAMtJOPtxmbuGsWHICh7sHyDrujk7gQipEFW7JwJFV7SJuJdFGrRsy2dRBBqFCLRIZy+Q6iBh/Z81F9d8kGt7TYm/jEjREJ0dFr1ZGski0s0fwEJMBGzwpIFHZyIi7Cd+TwsxjzHdm6a+Ka+9dFiX95uCxFrY4o/bWcOrM0sNhDmPcxM1mGUEjh3l88+YI6FMi1wnrtTVWE5u9ee8f+9bcP4LAAD//53osjw=" + return "eJzEWk2P6zQU3c+vuOqaF4kFmy4QC0AMAsHqbRCKPM5t4hknzvNHH8OvR2mbNk5sx27cIYtZJPE551479r2n8wne8H0PXNRKE9U8AWimOe5h99vl1u4JoEJFJes1E90evn8CABgfQysqw/EJQCJHonAPNXkCUKg162q1h792SvHdN7BrtO53fw/PGiF1SUV3YPUeDoSrYfyBIa/U/oT+CTrS4k1XqTTR6vQIQL/3A4sUpr/cmQ6dDtesRaVJ21+fjKMJZ0RN7vZEN3vY/XAdsVuAvR7bBcxUhEvIdHyLrXXfh+HDmWI1SPrSKKxK1pUv7xrV4lVfpHbEY4KLTlRYnLJcvB7bosW2iCCx9LTkn4fL8XKMSkw/TOHwTss4Z3Mqv4iwAC/syItH7LTatEBYl0HsWUcxwxophNH5OOZgI0llJBm2iqyTcOEMYM/3jE1zYQyrMqi+3lvgjURHlIqJHDN/vTeHHKl61iNnHS5XaYdKY7UYILatZ9qbbBseF6QqyRElqdG7t7iAQ+BTgm+/m+/ONrZv34LgnAhV0N4UU/WFj+mq5MOEhHV8WEJmRNfl45rO+1cQ7Q2hVBdGkRrLjnQi5yE1xHVSVcTw+L+QcIyhOC3cgyq/GKFJ2TIqnYGuB5sQcBFFOKobAIKKfKGvhT8l6Uz7grIUhxI56YcipkfJROWLNC4fKTkZ7hXpMqa1a6kbKbTmWHkX7AO1JylY5v1USt/G/19598m4nopSUFTnbalHSbHTkd2CS0oIbWT8YtDgpaApqTCbCD1gzgYKkxuosWQorOolTSMWLpQFQ+Mq2e7guOJ46sH44JETpRlVSCRtCsqN0ihdmXjD969CLmMb5K1WUI7eerxm4+0Ge7x86nOQn9tuaFFLRlWRrGWqx1NPz3MHs4PiNrPZKoGFkrCa6dDZIk0bjH2DLUrCyw0KXoimTanYv/4ymIuu9o7/KuQbSn/pExwssZeosNOnHuwhpYsnw7CaoCnI8OZmEHdnlo5TS9KHQ8pR72BVO10Xm0m8vCL1FV9W6JrR+/CuEylUqtcwDCmGPwPC7I2W1efOfw9amvnTwD42XL8IpWEBuq0FVyiPjOKi284g9+qtfnZgu+1ICC6mFb5fP/8Oz91BJO6j/g9k7eNY0TNcrtDBqhv8e2i4fRtLNDdCeOIixf95poDnH73nmtsTu+c4c5ncNmp1K/2Ww5frKCwnJAnijVibyHnqQNA8j1O6phZSTfVI3QvcFXM8AtbrNNtjfZmIWLY/nZCdyxMiZ33hYUcEFq1vuJ47KlrW1Zc0wKnlQVkEVS1t79yy/jC6FqmyDoxrlN7WOJe2ny80SdpWTfwIlQFH3h69aZ9ZOvU2+KNrPadbni8+t0e4jr2GD/HuejxhDClEue42Z0P4oTxw4TUOZ7APQc2jNWhv20g5pjbO9rZ5o041/7qMCyAmCEj3tBMigThPOi0kiAwLNnrVtqrVOOF+Z3kD0z1mcARdnHVrA1XCvHB/Fez6bdQGmP1GertiNvEV6weSDqsMEFlclpP1/Hhjw+2Tuwmjdq6AOwRRCYB5Ek5+XOauYWgYHgZOSU8o0+8fuoOnRxQZ1ZzEdBJJVXobpQ3w/XCKjtl7UAjZc7TaOULWL9bbA0ZIhaiuLREoupdJxD1PVG9UUz6KYqVFjECL9HRXDjlImP9H5eJyEuRaXuORX0YczhB9Lsy68jSSWaSbP4CZmAjY1RoD7s1ERNgP/J5mYu5jurXLXV1eXJR1XcFvChLPUMcvI27W9dmZbA2Eews5G3V1l5HIBfE2LvnOmANh3Mhc5ZAydGgktjqzwX/scOD8FwAA//81Lx/S" } diff --git a/metricbeat/module/logstash/node_stats/_meta/fields.yml b/metricbeat/module/logstash/node_stats/_meta/fields.yml index 8e37a165659c..202907fdc29a 100644 --- a/metricbeat/module/logstash/node_stats/_meta/fields.yml +++ b/metricbeat/module/logstash/node_stats/_meta/fields.yml @@ -161,6 +161,17 @@ type: long - name: max_queue_size_in_bytes type: long + - name: capacity + type: group + fields: + - name: max_queue_size_in_bytes + type: long + - name: max_unread_events + type: long + - name: page_capacity_in_bytes + type: long + - name: queue_size_in_bytes + type: long - name: events type: group fields: diff --git a/testing/environments/snapshot.yml b/testing/environments/snapshot.yml index fd3c6007409e..7f78746abe08 100644 --- a/testing/environments/snapshot.yml +++ b/testing/environments/snapshot.yml @@ -3,7 +3,7 @@ version: '2.3' services: elasticsearch: - image: docker.elastic.co/elasticsearch/elasticsearch:8.14.0-74a79bf3-SNAPSHOT + image: docker.elastic.co/elasticsearch/elasticsearch:8.14.0-faa2d2d3-SNAPSHOT # When extend is used it merges healthcheck.tests, see: # https://github.com/docker/compose/issues/8962 # healthcheck: @@ -31,7 +31,7 @@ services: - "./docker/elasticsearch/users_roles:/usr/share/elasticsearch/config/users_roles" logstash: - image: docker.elastic.co/logstash/logstash:8.14.0-74a79bf3-SNAPSHOT + image: docker.elastic.co/logstash/logstash:8.14.0-faa2d2d3-SNAPSHOT healthcheck: test: ["CMD", "curl", "-f", "http://localhost:9600/_node/stats"] retries: 600 @@ -44,7 +44,7 @@ services: - 5055:5055 kibana: - image: docker.elastic.co/kibana/kibana:8.14.0-74a79bf3-SNAPSHOT + image: docker.elastic.co/kibana/kibana:8.14.0-faa2d2d3-SNAPSHOT environment: - "ELASTICSEARCH_USERNAME=kibana_system_user" - "ELASTICSEARCH_PASSWORD=testing" diff --git a/x-pack/filebeat/docs/inputs/input-http-endpoint.asciidoc b/x-pack/filebeat/docs/inputs/input-http-endpoint.asciidoc index b7a7ee06f70d..7f3050d1f686 100644 --- a/x-pack/filebeat/docs/inputs/input-http-endpoint.asciidoc +++ b/x-pack/filebeat/docs/inputs/input-http-endpoint.asciidoc @@ -227,6 +227,11 @@ The prefix for the signature. Certain webhooks prefix the HMAC signature with a By default the input expects the incoming POST to include a Content-Type of `application/json` to try to enforce the incoming data to be valid JSON. In certain scenarios when the source of the request is not able to do that, it can be overwritten with another value or set to null. +[float] +==== `program` + +The normal operation of the input treats the body either as a single event when the body is an object, or as a set of events when the body is an array. If the body should be treated handled differently, for example a set of events in an array field of an object to be handled as a set of events, then a https://opensource.google.com/projects/cel[Common Expression Language (CEL)] program can be provided through this configuration field. No CEL extensions are provided beyond the function in the CEL https://github.com/google/cel-spec/blob/master/doc/langdef.md#standard[standard library]. CEL https://pkg.go.dev/github.com/google/cel-go/cel#OptionalTypes[optional types] are supported. + [float] ==== `response_code` diff --git a/x-pack/filebeat/input/http_endpoint/config.go b/x-pack/filebeat/input/http_endpoint/config.go index 3b0c97741dee..1618dc907583 100644 --- a/x-pack/filebeat/input/http_endpoint/config.go +++ b/x-pack/filebeat/input/http_endpoint/config.go @@ -37,6 +37,7 @@ type config struct { URL string `config:"url" validate:"required"` Prefix string `config:"prefix"` ContentType string `config:"content_type"` + Program string `config:"program"` SecretHeader string `config:"secret.header"` SecretValue string `config:"secret.value"` HMACHeader string `config:"hmac.header"` diff --git a/x-pack/filebeat/input/http_endpoint/handler.go b/x-pack/filebeat/input/http_endpoint/handler.go index 0e2620b5b658..3d0948489ac8 100644 --- a/x-pack/filebeat/input/http_endpoint/handler.go +++ b/x-pack/filebeat/input/http_endpoint/handler.go @@ -12,10 +12,16 @@ import ( "io" "net" "net/http" + "reflect" "time" + "github.com/google/cel-go/cel" + "github.com/google/cel-go/checker/decls" + "github.com/google/cel-go/common/types" + "github.com/google/cel-go/common/types/ref" "go.uber.org/zap" "go.uber.org/zap/zapcore" + "google.golang.org/protobuf/types/known/structpb" stateless "github.com/elastic/beats/v7/filebeat/input/v2/input-stateless" "github.com/elastic/beats/v7/libbeat/beat" @@ -24,6 +30,7 @@ import ( "github.com/elastic/beats/v7/x-pack/filebeat/input/internal/httplog" "github.com/elastic/elastic-agent-libs/logp" "github.com/elastic/elastic-agent-libs/mapstr" + "github.com/elastic/mito/lib" ) const headerContentEncoding = "Content-Encoding" @@ -43,6 +50,7 @@ type handler struct { reqLogger *zap.Logger host, scheme string + program *program messageField string responseCode int responseBody string @@ -80,7 +88,7 @@ func (h *handler) ServeHTTP(w http.ResponseWriter, r *http.Request) { r.Body = io.NopCloser(&buf) } - objs, _, status, err := httpReadJSON(body) + objs, _, status, err := httpReadJSON(body, h.program) if err != nil { h.sendAPIErrorResponse(w, r, h.log, status, err) h.metrics.apiErrors.Add(1) @@ -218,22 +226,22 @@ func (h *handler) publishEvent(obj, headers mapstr.M) error { return nil } -func httpReadJSON(body io.Reader) (objs []mapstr.M, rawMessages []json.RawMessage, status int, err error) { +func httpReadJSON(body io.Reader, prg *program) (objs []mapstr.M, rawMessages []json.RawMessage, status int, err error) { if body == http.NoBody { return nil, nil, http.StatusNotAcceptable, errBodyEmpty } - obj, rawMessage, err := decodeJSON(body) + obj, rawMessage, err := decodeJSON(body, prg) if err != nil { return nil, nil, http.StatusBadRequest, err } return obj, rawMessage, http.StatusOK, err } -func decodeJSON(body io.Reader) (objs []mapstr.M, rawMessages []json.RawMessage, err error) { +func decodeJSON(body io.Reader, prg *program) (objs []mapstr.M, rawMessages []json.RawMessage, err error) { decoder := json.NewDecoder(body) for decoder.More() { var raw json.RawMessage - if err := decoder.Decode(&raw); err != nil { + if err = decoder.Decode(&raw); err != nil { if err == io.EOF { //nolint:errorlint // This will never be a wrapped error. break } @@ -241,9 +249,22 @@ func decodeJSON(body io.Reader) (objs []mapstr.M, rawMessages []json.RawMessage, } var obj interface{} - if err := newJSONDecoder(bytes.NewReader(raw)).Decode(&obj); err != nil { + if err = newJSONDecoder(bytes.NewReader(raw)).Decode(&obj); err != nil { return nil, nil, fmt.Errorf("malformed JSON object at stream position %d: %w", decoder.InputOffset(), err) } + + if prg != nil { + obj, err = prg.eval(obj) + if err != nil { + return nil, nil, err + } + // Re-marshal to ensure the raw bytes agree with the constructed object. + raw, err = json.Marshal(obj) + if err != nil { + return nil, nil, fmt.Errorf("failed to remarshal object: %w", err) + } + } + switch v := obj.(type) { case map[string]interface{}: objs = append(objs, v) @@ -265,6 +286,86 @@ func decodeJSON(body io.Reader) (objs []mapstr.M, rawMessages []json.RawMessage, return objs, rawMessages, nil } +type program struct { + prg cel.Program + ast *cel.Ast +} + +func newProgram(src string) (*program, error) { + if src == "" { + return nil, nil + } + + registry, err := types.NewRegistry() + if err != nil { + return nil, fmt.Errorf("failed to create env: %w", err) + } + env, err := cel.NewEnv( + cel.Declarations(decls.NewVar("obj", decls.Dyn)), + cel.OptionalTypes(cel.OptionalTypesVersion(lib.OptionalTypesVersion)), + cel.CustomTypeAdapter(&numberAdapter{registry}), + cel.CustomTypeProvider(registry), + ) + if err != nil { + return nil, fmt.Errorf("failed to create env: %w", err) + } + + ast, iss := env.Compile(src) + if iss.Err() != nil { + return nil, fmt.Errorf("failed compilation: %w", iss.Err()) + } + + prg, err := env.Program(ast) + if err != nil { + return nil, fmt.Errorf("failed program instantiation: %w", err) + } + return &program{prg: prg, ast: ast}, nil +} + +var _ types.Adapter = (*numberAdapter)(nil) + +type numberAdapter struct { + fallback types.Adapter +} + +func (a *numberAdapter) NativeToValue(value any) ref.Val { + if n, ok := value.(json.Number); ok { + var errs []error + i, err := n.Int64() + if err == nil { + return types.Int(i) + } + errs = append(errs, err) + f, err := n.Float64() + if err == nil { + return types.Double(f) + } + errs = append(errs, err) + return types.NewErr("%v", errors.Join(errs...)) + } + return a.fallback.NativeToValue(value) +} + +func (p *program) eval(obj interface{}) (interface{}, error) { + out, _, err := p.prg.Eval(map[string]interface{}{"obj": obj}) + if err != nil { + err = lib.DecoratedError{AST: p.ast, Err: err} + return nil, fmt.Errorf("failed eval: %w", err) + } + + v, err := out.ConvertToNative(reflect.TypeOf((*structpb.Value)(nil))) + if err != nil { + return nil, fmt.Errorf("failed proto conversion: %w", err) + } + switch v := v.(type) { + case *structpb.Value: + return v.AsInterface(), nil + default: + // This should never happen. + return nil, fmt.Errorf("unexpected native conversion type: %T", v) + } +} + func decodeJSONArray(raw *bytes.Reader) (objs []mapstr.M, rawMessages []json.RawMessage, err error) { dec := newJSONDecoder(raw) token, err := dec.Token() diff --git a/x-pack/filebeat/input/http_endpoint/handler_test.go b/x-pack/filebeat/input/http_endpoint/handler_test.go index 6660508b15b4..cb911f8ab188 100644 --- a/x-pack/filebeat/input/http_endpoint/handler_test.go +++ b/x-pack/filebeat/input/http_endpoint/handler_test.go @@ -38,6 +38,7 @@ func Test_httpReadJSON(t *testing.T) { tests := []struct { name string body string + program string wantObjs []mapstr.M wantStatus int wantErr bool @@ -135,10 +136,43 @@ func Test_httpReadJSON(t *testing.T) { }, wantStatus: http.StatusOK, }, + { + name: "kinesis", + body: `{ + "requestId": "ed4acda5-034f-9f42-bba1-f29aea6d7d8f", + "timestamp": 1578090901599, + "records": [ + { + "data": "aGVsbG8=" + }, + { + "data": "aGVsbG8gd29ybGQ=" + } + ] +}`, + program: `obj.records.map(r, { + "requestId": obj.requestId, + "timestamp": string(obj.timestamp), // leave timestamp in unix milli for ingest to handle. + "event": r, + })`, + wantRawMessage: []json.RawMessage{ + []byte(`{"event":{"data":"aGVsbG8="},"requestId":"ed4acda5-034f-9f42-bba1-f29aea6d7d8f","timestamp":"1578090901599"}`), + []byte(`{"event":{"data":"aGVsbG8gd29ybGQ="},"requestId":"ed4acda5-034f-9f42-bba1-f29aea6d7d8f","timestamp":"1578090901599"}`), + }, + wantObjs: []mapstr.M{ + {"event": map[string]any{"data": "aGVsbG8="}, "requestId": "ed4acda5-034f-9f42-bba1-f29aea6d7d8f", "timestamp": "1578090901599"}, + {"event": map[string]any{"data": "aGVsbG8gd29ybGQ="}, "requestId": "ed4acda5-034f-9f42-bba1-f29aea6d7d8f", "timestamp": "1578090901599"}, + }, + wantStatus: http.StatusOK, + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - gotObjs, rawMessages, gotStatus, err := httpReadJSON(strings.NewReader(tt.body)) + prg, err := newProgram(tt.program) + if err != nil { + t.Fatalf("failed to compile program: %v", err) + } + gotObjs, rawMessages, gotStatus, err := httpReadJSON(strings.NewReader(tt.body), prg) if (err != nil) != tt.wantErr { t.Errorf("httpReadJSON() error = %v, wantErr %v", err, tt.wantErr) return @@ -344,7 +378,7 @@ func Test_apiResponse(t *testing.T) { pub := new(publisher) metrics := newInputMetrics("") defer metrics.Close() - apiHandler := newHandler(ctx, tracerConfig(tc.name, tc.conf, *withTraces), pub, logp.NewLogger("http_endpoint.test"), metrics) + apiHandler := newHandler(ctx, tracerConfig(tc.name, tc.conf, *withTraces), nil, pub, logp.NewLogger("http_endpoint.test"), metrics) // Execute handler. respRec := httptest.NewRecorder() diff --git a/x-pack/filebeat/input/http_endpoint/input.go b/x-pack/filebeat/input/http_endpoint/input.go index ca648b697470..7d5055ebe653 100644 --- a/x-pack/filebeat/input/http_endpoint/input.go +++ b/x-pack/filebeat/input/http_endpoint/input.go @@ -131,6 +131,14 @@ func (p *pool) serve(ctx v2.Context, e *httpEndpoint, pub stateless.Publisher, m metrics.route.Set(u.Path) metrics.isTLS.Set(e.tlsConfig != nil) + var prg *program + if e.config.Program != "" { + prg, err = newProgram(e.config.Program) + if err != nil { + return err + } + } + p.mu.Lock() s, ok := p.servers[e.addr] if ok { @@ -149,7 +157,7 @@ func (p *pool) serve(ctx v2.Context, e *httpEndpoint, pub stateless.Publisher, m return err } log.Infof("Adding %s end point to server on %s", pattern, e.addr) - s.mux.Handle(pattern, newHandler(s.ctx, e.config, pub, log, metrics)) + s.mux.Handle(pattern, newHandler(s.ctx, e.config, prg, pub, log, metrics)) s.idOf[pattern] = ctx.ID p.mu.Unlock() <-s.ctx.Done() @@ -165,7 +173,7 @@ func (p *pool) serve(ctx v2.Context, e *httpEndpoint, pub stateless.Publisher, m srv: srv, } s.ctx, s.cancel = ctxtool.WithFunc(ctx.Cancelation, func() { srv.Close() }) - mux.Handle(pattern, newHandler(s.ctx, e.config, pub, log, metrics)) + mux.Handle(pattern, newHandler(s.ctx, e.config, prg, pub, log, metrics)) p.servers[e.addr] = s p.mu.Unlock() @@ -287,7 +295,7 @@ func (s *server) getErr() error { return s.err } -func newHandler(ctx context.Context, c config, pub stateless.Publisher, log *logp.Logger, metrics *inputMetrics) http.Handler { +func newHandler(ctx context.Context, c config, prg *program, pub stateless.Publisher, log *logp.Logger, metrics *inputMetrics) http.Handler { h := &handler{ log: log, publisher: pub, @@ -305,6 +313,7 @@ func newHandler(ctx context.Context, c config, pub stateless.Publisher, log *log hmacType: c.HMACType, hmacPrefix: c.HMACPrefix, }, + program: prg, messageField: c.Prefix, responseCode: c.ResponseCode, responseBody: c.ResponseBody, diff --git a/x-pack/filebeat/module/o365/audit/test/13-dlp-exchange.log-expected.json b/x-pack/filebeat/module/o365/audit/test/13-dlp-exchange.log-expected.json index 6eae82404516..c6d25a2cc57f 100644 --- a/x-pack/filebeat/module/o365/audit/test/13-dlp-exchange.log-expected.json +++ b/x-pack/filebeat/module/o365/audit/test/13-dlp-exchange.log-expected.json @@ -792,7 +792,6 @@ "forwarded" ], "url.domain": "example.net", - "url.extension": "com/sharepoint", "url.original": "https://example.net/testsiem2.onmicrosoft.com/sharepoint", "url.path": "/testsiem2.onmicrosoft.com/sharepoint", "url.scheme": "https", @@ -801,4 +800,4 @@ "user.id": "alice@testsiem2.onmicrosoft.com", "user.name": "alice" } -] \ No newline at end of file +] diff --git a/x-pack/packetbeat/magefile.go b/x-pack/packetbeat/magefile.go index 03104ab9157e..357e5e235855 100644 --- a/x-pack/packetbeat/magefile.go +++ b/x-pack/packetbeat/magefile.go @@ -172,6 +172,13 @@ func SystemTest(ctx context.Context) error { return devtools.GoTest(ctx, args) } +func getBucketName() string { + if os.Getenv("BUILDKITE") == "true" { + return "ingest-buildkite-ci" + } + return "obs-ci-cache" +} + // getNpcapInstaller gets the installer from the Google Cloud Storage service. // // On Windows platforms, if getNpcapInstaller is invoked with the environment variables @@ -198,7 +205,8 @@ func getNpcapInstaller() error { return err } } + ciBucketName := getBucketName() fmt.Printf("getting %s from private cache\n", installer) - return sh.RunV("gsutil", "cp", "gs://obs-ci-cache/private/"+installer, dstPath) + return sh.RunV("gsutil", "cp", "gs://"+ciBucketName+"/private/"+installer, dstPath) }