Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/main' into fix-system-pid-revert
Browse files Browse the repository at this point in the history
  • Loading branch information
fearful-symmetry committed May 28, 2024
2 parents dac850f + f411b06 commit 3327337
Show file tree
Hide file tree
Showing 11 changed files with 34 additions and 26 deletions.
File renamed without changes.
6 changes: 3 additions & 3 deletions .buildkite/scripts/common.sh
Original file line number Diff line number Diff line change
Expand Up @@ -382,7 +382,7 @@ startCloudTestEnv() {
local dir=$1
withAWS
echo "--- Run docker-compose services for emulated cloud env"
docker-compose -f .ci/jobs/docker-compose.yml up -d #TODO: move all docker-compose files from the .ci to .buildkite folder before switching to BK
docker-compose -f .buildkite/deploy/docker/docker-compose.yml up -d
with_Terraform
terraformInit "$dir"
export TF_VAR_BRANCH=$(echo "${BUILDKITE_BRANCH}" | tr '[:upper:]' '[:lower:]' | sed 's/[^a-z0-9-]/-/g')
Expand Down Expand Up @@ -444,10 +444,10 @@ installNodeJsDependencies() {
teardown() {
# Teardown resources after using them
echo "---Terraform Cleanup"
.ci/scripts/terraform-cleanup.sh "${MODULE_DIR}" #TODO: move all docker-compose files from the .ci to .buildkite folder before switching to BK
.buildkite/scripts/terraform-cleanup.sh "${MODULE_DIR}"

echo "---Docker Compose Cleanup"
docker-compose -f .ci/jobs/docker-compose.yml down -v #TODO: move all docker-compose files from the .ci to .buildkite folder before switching to BK
docker-compose -f .buildkite/deploy/docker/docker-compose.yml down -v
}

unset_secrets () {
Expand Down
4 changes: 2 additions & 2 deletions .buildkite/scripts/setup_cloud_env.sh
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ teardown() {
tf_cleanup "${MODULE_DIR}" #TODO: move all docker-compose files from the .ci to .buildkite folder before switching to BK

echo "~~~ Docker Compose Cleanup"
docker-compose -f .ci/jobs/docker-compose.yml down -v #TODO: move all docker-compose files from the .ci to .buildkite folder before switching to BK
docker-compose -f .buildkite/deploy/docker/docker-compose.yml down -v
}

tf_cleanup() {
Expand All @@ -38,7 +38,7 @@ export TEST_TAGS="${TEST_TAGS:+$TEST_TAGS,}aws"
set -o xtrace

echo "~~~ Run docker-compose services for emulated cloud env"
docker-compose -f .ci/jobs/docker-compose.yml up -d #TODO: move all docker-compose files from the .ci to .buildkite folder before switching to BK
docker-compose -f .buildkite/deploy/docker/docker-compose.yml up -d
echo "~~~ Initialize TF cloud resources"
cd "$MODULE_DIR"
export TF_VAR_BRANCH=$(echo "${BUILDKITE_BRANCH}" | tr '[:upper:]' '[:lower:]' | sed 's/[^a-z0-9-]/-/g')
Expand Down
File renamed without changes.
4 changes: 4 additions & 0 deletions CHANGELOG.next.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,8 @@ https://github.com/elastic/beats/compare/v8.8.1\...main[Check the HEAD diff]

*Heartbeat*

- Fix monitor state loader to not wait extra seconds for the last attempt {pull}39621[39621]

*Metricbeat*

- Setting period for counter cache for Prometheus remote_write at least to 60sec {pull}38553[38553]
Expand Down Expand Up @@ -91,6 +93,7 @@ https://github.com/elastic/beats/compare/v8.8.1\...main[Check the HEAD diff]
- Fix cache processor expiries heap cleanup on partial file writes. {pull}38561[38561]
- Fix cache processor expiries infinite growth when large a large TTL is used and recurring keys are cached. {pull}38561[38561]
- Fix parsing of RFC 3164 process IDs in syslog processor. {issue}38947[38947] {pull}38982[38982]
- Rename the field "apache2.module.error" to "apache.module.error" in Apache error visualization. {issue}39480[39480] {pull}39481[39481]

*Auditbeat*
- Set field types to correctly match ECS in sessionmd processor {issue}38955[38955] {pull}38994[38994]
Expand Down Expand Up @@ -168,6 +171,7 @@ https://github.com/elastic/beats/compare/v8.8.1\...main[Check the HEAD diff]
- Fix timeout caused by the retrival of which indices are hidden {pull}39165[39165]
- Fix Azure Monitor support for multiple aggregation types {issue}39192[39192] {pull}39204[39204]
- Fix for MySQL/Performance - Query failure for MySQL versions below v8.0.1, for performance metric `quantile_95`. {pull}38710[38710]
- Normalize AWS RDS CPU Utilization values before making the metadata API call. {pull}39664[39664]

*Osquerybeat*

Expand Down
5 changes: 2 additions & 3 deletions dev-tools/packaging/templates/docker/Dockerfile.tmpl
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,9 @@ RUN microdnf -y update && \
microdnf -y install findutils shadow-utils && \
microdnf clean all
{{- else }}
# Install nghttp2 to keep it up to date, it is present in the base image but at an older version.
RUN for iter in {1..10}; do \
apt-get update -y && \
DEBIAN_FRONTEND=noninteractive apt-get install --no-install-recommends --yes ca-certificates curl gawk libcap2-bin xz-utils nghttp2 && \
DEBIAN_FRONTEND=noninteractive apt-get install --no-install-recommends --yes ca-certificates curl gawk libcap2-bin xz-utils && \
apt-get clean all && \
exit_code=0 && break || exit_code=$? && echo "apt-get error: retry $iter in 10s" && sleep 10; \
done; \
Expand Down Expand Up @@ -154,7 +153,7 @@ RUN chown -R {{ .user }} $NODE_PATH
USER {{ .user }}
# If this fails dump the NPM logs
RUN npm i -g --loglevel verbose --engine-strict @elastic/synthetics@stack_release || sh -c 'tail -n +1 /root/.npm/_logs/* && exit 1'
RUN chmod ug+rwX -R $NODE_PATH
RUN chmod ug+rwX -R $NODE_PATH
USER root

# Install the deps as needed by the exact version of playwright elastic synthetics uses
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@
"columns": [
"source.address",
"log.level",
"apache2.error.module",
"apache.error.module",
"message"
],
"enhancements": {},
Expand Down Expand Up @@ -215,4 +215,4 @@
"type": "dashboard",
"updated_at": "2021-08-04T16:33:55.372Z",
"version": "WzQzNDUsMV0="
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
"columns": [
"source.address",
"log.level",
"apache2.error.module",
"apache.error.module",
"message"
],
"description": "",
Expand Down Expand Up @@ -55,4 +55,4 @@
"type": "search",
"updated_at": "2021-08-04T16:33:55.372Z",
"version": "WzQzNDMsMV0="
}
}
16 changes: 9 additions & 7 deletions heartbeat/monitors/wrappers/monitorstate/tracker.go
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ func (t *Tracker) GetCurrentState(sf stdfields.StdMonitorFields, rc RetryConfig)
var loadedState *State
var err error
var i int
for i = 0; i < attempts; i++ {
for i = 1; i <= attempts; i++ {
loadedState, err = t.stateLoader(sf)
if err == nil {
if loadedState != nil {
Expand All @@ -111,7 +111,13 @@ func (t *Tracker) GetCurrentState(sf stdfields.StdMonitorFields, rc RetryConfig)
}
var loaderError LoaderError
if errors.As(err, &loaderError) && !loaderError.Retry {
logp.L().Warnf("could not load last externally recorded state: %v", loaderError)
logp.L().Warnf("failed to load previous monitor state: %v", loaderError)
break
}

// last attempt, exit and log error without sleeping
if i == attempts {
logp.L().Warnf("failed to load previous monitor state: %s after %d attempts: %v", sf.ID, i, err)
break
}

Expand All @@ -120,17 +126,13 @@ func (t *Tracker) GetCurrentState(sf stdfields.StdMonitorFields, rc RetryConfig)
if rc.waitFn != nil {
sleepFor = rc.waitFn()
}
logp.L().Warnf("could not load last externally recorded state, will retry again in %d milliseconds: %v", sleepFor.Milliseconds(), err)
logp.L().Warnf("could not load previous monitor state, retrying in %d milliseconds: %v", sleepFor.Milliseconds(), err)
time.Sleep(sleepFor)
}
if err != nil {
logp.L().Warnf("could not load prior state from elasticsearch after %d attempts, will create new state for monitor: %s", i+1, sf.ID)
}

if loadedState != nil {
t.states[sf.ID] = loadedState
}

// Return what we found, even if nil
return loadedState
}
Expand Down
2 changes: 1 addition & 1 deletion x-pack/filebeat/input/cel/input_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -1570,7 +1570,7 @@ func TestInput(t *testing.T) {
t.Fatalf("unexpected error running test: %v", err)
}

ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()

v2Ctx := v2.Context{
Expand Down
15 changes: 9 additions & 6 deletions x-pack/metricbeat/module/aws/cloudwatch/metadata/rds/rds.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,9 @@ func AddMetadata(regionName string, awsConfig awssdk.Config, fips_enabled bool,
}
})

// Get DBInstance IDs per region
dbDetailsMap, err := getDBInstancesPerRegion(svc)
if err != nil {
return events, fmt.Errorf("aws.rds.db_instance fields are not available, skipping region %s: %w", regionName, err)
}

// Normalize CPU Utilization values before making the API call,
// because the API call can fail, and we need to ensure the
// CPU values are correctly scaled regardless of the API call outcome.
for _, event := range events {
cpuValue, err := event.RootFields.GetValue("aws.rds.metrics.CPUUtilization.avg")
if err == nil {
Expand All @@ -42,6 +39,12 @@ func AddMetadata(regionName string, awsConfig awssdk.Config, fips_enabled bool,
}
}

// Get DBInstance IDs per region
dbDetailsMap, err := getDBInstancesPerRegion(svc)
if err != nil {
return events, fmt.Errorf("aws.rds.db_instance fields are not available, skipping region %s: %w", regionName, err)
}

for dbInstanceIdentifier, output := range dbDetailsMap {
for eventIdentifier := range events {
eventIdentifierComponents := strings.Split(eventIdentifier, "-")
Expand Down

0 comments on commit 3327337

Please sign in to comment.