Skip to content

Commit

Permalink
move collectd-spark to golden
Browse files Browse the repository at this point in the history
  • Loading branch information
atoulme committed Jan 13, 2025
1 parent ab498e0 commit 9e00c61
Show file tree
Hide file tree
Showing 7 changed files with 780 additions and 95 deletions.
12 changes: 11 additions & 1 deletion tests/receivers/smartagent/collectd-spark/collectd_spark_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ import (
"path"
"testing"

"github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatatest/pmetrictest"
"github.com/stretchr/testify/require"

"github.com/signalfx/splunk-otel-collector/tests/testutils"
Expand Down Expand Up @@ -72,7 +73,16 @@ func TestCollectdSparkReceiverProvidesAllMetrics(t *testing.T) {
{"worker metrics", "all_worker.yaml", "all_worker_metrics_config.yaml"},
} {
t.Run(args.name, func(tt *testing.T) {
testutils.AssertAllMetricsReceived(tt, args.resourceMetricsFilename, args.collectorConfigFilename, nil, nil)
testutils.RunMetricsCollectionTest(t, args.collectorConfigFilename, args.resourceMetricsFilename,
testutils.WithCompareMetricsOptions(
pmetrictest.IgnoreTimestamp(),
pmetrictest.IgnoreStartTimestamp(),
pmetrictest.IgnoreResourceMetricsOrder(),
pmetrictest.IgnoreScopeMetricsOrder(),
pmetrictest.IgnoreMetricsOrder(),
pmetrictest.IgnoreMetricValues(),
),
)
})
}
}
334 changes: 334 additions & 0 deletions tests/receivers/smartagent/collectd-spark/testdata/all_master.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,334 @@
resourceMetrics:
- resource:
attributes:
- key: system.type
value:
stringValue: spark
scopeMetrics:
- metrics:
- gauge:
dataPoints:
- asInt: "71093280"
attributes:
- key: dsname
value:
stringValue: value
- key: plugin
value:
stringValue: apache_spark
- key: spark_process
value:
stringValue: master
timeUnixNano: "1000000"
name: gauge.jvm.total.used
- gauge:
dataPoints:
- asInt: "345702400"
attributes:
- key: dsname
value:
stringValue: value
- key: plugin
value:
stringValue: apache_spark
- key: spark_process
value:
stringValue: master
timeUnixNano: "1000000"
name: gauge.jvm.total.committed
- gauge:
dataPoints:
- asInt: "24778008"
attributes:
- key: dsname
value:
stringValue: value
- key: plugin
value:
stringValue: apache_spark
- key: spark_process
value:
stringValue: master
timeUnixNano: "1000000"
name: gauge.jvm.heap.used
- gauge:
dataPoints:
- asInt: "296222720"
attributes:
- key: dsname
value:
stringValue: value
- key: plugin
value:
stringValue: apache_spark
- key: spark_process
value:
stringValue: master
timeUnixNano: "1000000"
name: gauge.jvm.heap.committed
- gauge:
dataPoints:
- asInt: "46313496"
attributes:
- key: dsname
value:
stringValue: value
- key: plugin
value:
stringValue: apache_spark
- key: spark_process
value:
stringValue: master
timeUnixNano: "1000000"
name: gauge.jvm.non-heap.used
- gauge:
dataPoints:
- asInt: "49479680"
attributes:
- key: dsname
value:
stringValue: value
- key: plugin
value:
stringValue: apache_spark
- key: spark_process
value:
stringValue: master
timeUnixNano: "1000000"
name: gauge.jvm.non-heap.committed
- gauge:
dataPoints:
- asInt: "1"
attributes:
- key: dsname
value:
stringValue: value
- key: plugin
value:
stringValue: apache_spark
- key: spark_process
value:
stringValue: master
timeUnixNano: "1000000"
name: gauge.master.aliveWorkers
- gauge:
dataPoints:
- asInt: "1"
attributes:
- key: dsname
value:
stringValue: value
- key: plugin
value:
stringValue: apache_spark
- key: spark_process
value:
stringValue: master
timeUnixNano: "1000000"
name: gauge.master.apps
- gauge:
dataPoints:
- asInt: "0"
attributes:
- key: dsname
value:
stringValue: value
- key: plugin
value:
stringValue: apache_spark
- key: spark_process
value:
stringValue: master
timeUnixNano: "1000000"
name: gauge.master.waitingApps
- gauge:
dataPoints:
- asInt: "1"
attributes:
- key: dsname
value:
stringValue: value
- key: plugin
value:
stringValue: apache_spark
- key: spark_process
value:
stringValue: master
timeUnixNano: "1000000"
name: gauge.master.workers
- gauge:
dataPoints:
- asInt: "6520128"
attributes:
- key: dsname
value:
stringValue: value
- key: plugin
value:
stringValue: apache_spark
- key: spark_process
value:
stringValue: master
timeUnixNano: "1000000"
name: gauge.jvm.pools.Code-Cache.used
- gauge:
dataPoints:
- asInt: "7274496"
attributes:
- key: dsname
value:
stringValue: value
- key: plugin
value:
stringValue: apache_spark
- key: spark_process
value:
stringValue: master
timeUnixNano: "1000000"
name: gauge.jvm.pools.Code-Cache.committed
- gauge:
dataPoints:
- asInt: "4846272"
attributes:
- key: dsname
value:
stringValue: value
- key: plugin
value:
stringValue: apache_spark
- key: spark_process
value:
stringValue: master
timeUnixNano: "1000000"
name: gauge.jvm.pools.Compressed-Class-Space.used
- gauge:
dataPoints:
- asInt: "5111808"
attributes:
- key: dsname
value:
stringValue: value
- key: plugin
value:
stringValue: apache_spark
- key: spark_process
value:
stringValue: master
timeUnixNano: "1000000"
name: gauge.jvm.pools.Compressed-Class-Space.committed
- gauge:
dataPoints:
- asInt: "34947384"
attributes:
- key: dsname
value:
stringValue: value
- key: plugin
value:
stringValue: apache_spark
- key: spark_process
value:
stringValue: master
timeUnixNano: "1000000"
name: gauge.jvm.pools.Metaspace.used
- gauge:
dataPoints:
- asInt: "37093376"
attributes:
- key: dsname
value:
stringValue: value
- key: plugin
value:
stringValue: apache_spark
- key: spark_process
value:
stringValue: master
timeUnixNano: "1000000"
name: gauge.jvm.pools.Metaspace.committed
- name: counter.HiveExternalCatalog.fileCacheHits
sum:
aggregationTemporality: 2
dataPoints:
- asInt: "0"
attributes:
- key: dsname
value:
stringValue: value
- key: plugin
value:
stringValue: apache_spark
- key: spark_process
value:
stringValue: master
timeUnixNano: "1000000"
isMonotonic: true
- name: counter.HiveExternalCatalog.filesDiscovered
sum:
aggregationTemporality: 2
dataPoints:
- asInt: "0"
attributes:
- key: dsname
value:
stringValue: value
- key: plugin
value:
stringValue: apache_spark
- key: spark_process
value:
stringValue: master
timeUnixNano: "1000000"
isMonotonic: true
- name: counter.HiveExternalCatalog.hiveClientCalls
sum:
aggregationTemporality: 2
dataPoints:
- asInt: "0"
attributes:
- key: dsname
value:
stringValue: value
- key: plugin
value:
stringValue: apache_spark
- key: spark_process
value:
stringValue: master
timeUnixNano: "1000000"
isMonotonic: true
- name: counter.HiveExternalCatalog.parallelListingJobCount
sum:
aggregationTemporality: 2
dataPoints:
- asInt: "0"
attributes:
- key: dsname
value:
stringValue: value
- key: plugin
value:
stringValue: apache_spark
- key: spark_process
value:
stringValue: master
timeUnixNano: "1000000"
isMonotonic: true
- name: counter.HiveExternalCatalog.partitionsFetched
sum:
aggregationTemporality: 2
dataPoints:
- asInt: "0"
attributes:
- key: dsname
value:
stringValue: value
- key: plugin
value:
stringValue: apache_spark
- key: spark_process
value:
stringValue: master
timeUnixNano: "1000000"
isMonotonic: true
scope: {}
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,15 @@ receivers:
enhancedMetrics: true
extraMetrics: ["*"]
intervalSeconds: 1

processors:
batch:
batch/2:
groupbyattrs:
keys:
- system.type
groupbyattrs/2:
keys:
- system.type
exporters:
otlp:
endpoint: "${OTLP_ENDPOINT}"
Expand All @@ -21,4 +29,5 @@ service:
metrics:
receivers:
- smartagent/collectd_spark_master
processors: [ groupbyattrs, batch, groupbyattrs/2, batch/2 ]
exporters: [otlp]
Loading

0 comments on commit 9e00c61

Please sign in to comment.