Skip to content

Commit

Permalink
Fixes broken metrics unit tests
Browse files Browse the repository at this point in the history
Quite often unit test failure showing "*** a different view
with the same name is already registered" messages at many places.
At the biggning of every metrics test, it tries to register respective
metrics views(metrics data it want present), publish metrics and
uniregister views. Uniregister view is important becuase for next
test it dont need to hold the old metrics data.
unregister() views were getting called using `defer` in
table test for loop, just like
for _, td := range tests {
	defer unregister()
	.......
}
This were resulting unwanted execution behaviour
for unregister() views function, which umtimately faling tests.
Its better to unregister() views before start executing the
tests.

This patch fixes the behaviour by calling uniregister() views function
at the end of loop.

Fixes #1659
  • Loading branch information
hrishin authored and tekton-robot committed Dec 11, 2019
1 parent c28df26 commit 21e0683
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 7 deletions.
7 changes: 5 additions & 2 deletions pkg/reconciler/pipelinerun/metrics_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ func TestRecordPipelineRunDurationCount(t *testing.T) {

for _, test := range testData {
t.Run(test.name, func(t *testing.T) {
defer unregisterMetrics()
unregisterMetrics()

metrics, err := NewRecorder()
assertErrIsNil(err, "Recorder initialization failed", t)
Expand All @@ -102,12 +102,13 @@ func TestRecordPipelineRunDurationCount(t *testing.T) {
assertErrIsNil(err, "DurationAndCount recording recording got an error", t)
metricstest.CheckDistributionData(t, "pipelinerun_duration_seconds", test.expectedTags, 1, test.expectedDuration, test.expectedDuration)
metricstest.CheckCountData(t, "pipelinerun_count", test.expectedTags, test.expectedCount)

})
}
}

func TestRecordRunningPipelineRunsCount(t *testing.T) {
defer unregisterMetrics()
unregisterMetrics()

ctx, _ := rtesting.SetupFakeContext(t)
informer := fakepipelineruninformer.Get(ctx)
Expand All @@ -121,6 +122,7 @@ func TestRecordRunningPipelineRunsCount(t *testing.T) {
err = metrics.RunningPipelineRuns(informer.Lister())
assertErrIsNil(err, "RunningPrsCount recording expected to return nil but got error", t)
metricstest.CheckLastValueData(t, "running_pipelineruns_count", map[string]string{}, 1)

}

func addPipelineRun(informer alpha1.PipelineRunInformer, run, pipeline, ns string, status corev1.ConditionStatus, t *testing.T) {
Expand Down Expand Up @@ -156,4 +158,5 @@ func assertErrIsNil(err error, message string, t *testing.T) {

func unregisterMetrics() {
metricstest.Unregister("pipelinerun_duration_seconds", "pipelinerun_count", "running_pipelineruns_count")

}
12 changes: 7 additions & 5 deletions pkg/reconciler/taskrun/metrics_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ func TestRecordTaskrunDurationCount(t *testing.T) {

for _, test := range testData {
t.Run(test.name, func(t *testing.T) {
defer unregisterMetrics()
unregisterMetrics()

metrics, err := NewRecorder()
assertErrIsNil(err, "Recorder initialization failed", t)
Expand All @@ -110,6 +110,7 @@ func TestRecordTaskrunDurationCount(t *testing.T) {
assertErrIsNil(err, "DurationAndCount recording got an error", t)
metricstest.CheckDistributionData(t, "taskrun_duration_seconds", test.expectedTags, 1, test.expectedDuration, test.expectedDuration)
metricstest.CheckCountData(t, "taskrun_count", test.expectedTags, test.expectedCount)

})
}
}
Expand Down Expand Up @@ -179,21 +180,21 @@ func TestRecordPipelinerunTaskrunDurationCount(t *testing.T) {

for _, test := range testData {
t.Run(test.name, func(t *testing.T) {
defer unregisterMetrics()

unregisterMetrics()
metrics, err := NewRecorder()
assertErrIsNil(err, "Recorder initialization failed", t)

err = metrics.DurationAndCount(test.taskRun)
assertErrIsNil(err, "DurationAndCount recording got an error", t)
metricstest.CheckDistributionData(t, "pipelinerun_taskrun_duration_seconds", test.expectedTags, 1, test.expectedDuration, test.expectedDuration)
metricstest.CheckCountData(t, "taskrun_count", test.expectedTags, test.expectedCount)

})
}
}

func TestRecordRunningTaskrunsCount(t *testing.T) {
defer unregisterMetrics()
unregisterMetrics()

ctx, _ := rtesting.SetupFakeContext(t)
informer := faketaskruninformer.Get(ctx)
Expand Down Expand Up @@ -255,7 +256,7 @@ func TestRecordPodLatency(t *testing.T) {

for _, td := range testData {
t.Run(td.name, func(t *testing.T) {
defer unregisterMetrics()
unregisterMetrics()

metrics, err := NewRecorder()
assertErrIsNil(err, "Recorder initialization failed", t)
Expand All @@ -267,6 +268,7 @@ func TestRecordPodLatency(t *testing.T) {
}
assertErrIsNil(err, "RecordPodLatency recording expected to return nil but got error", t)
metricstest.CheckLastValueData(t, "taskruns_pod_latency", td.expectedTags, td.expectedValue)

})
}

Expand Down

0 comments on commit 21e0683

Please sign in to comment.