Skip to content
This repository was archived by the owner on Jul 31, 2023. It is now read-only.

Commit aad2c52

Browse files
authored
exclude zero bucket from aggregation_data (#1183)
* exclude zero bucket from aggregation_data * fix error string in test file. * add one more testcase.
1 parent 3b5a343 commit aad2c52

5 files changed

+150
-10
lines changed

go.mod

-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@ require (
88
golang.org/x/net v0.0.0-20190620200207-3b0461eec859
99
golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd // indirect
1010
golang.org/x/text v0.3.2 // indirect
11-
google.golang.org/appengine v1.4.0 // indirect
1211
google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb // indirect
1312
google.golang.org/grpc v1.20.1
1413
)

stats/view/aggregation.go

+5-4
Original file line numberDiff line numberDiff line change
@@ -99,13 +99,14 @@ func Sum() *Aggregation {
9999
// If len(bounds) is 1 then there is no finite buckets, and that single
100100
// element is the common boundary of the overflow and underflow buckets.
101101
func Distribution(bounds ...float64) *Aggregation {
102-
return &Aggregation{
102+
agg := &Aggregation{
103103
Type: AggTypeDistribution,
104104
Buckets: bounds,
105-
newData: func() AggregationData {
106-
return newDistributionData(bounds)
107-
},
108105
}
106+
agg.newData = func() AggregationData {
107+
return newDistributionData(agg)
108+
}
109+
return agg
109110
}
110111

111112
// LastValue only reports the last value recorded using this

stats/view/aggregation_data.go

+3-3
Original file line numberDiff line numberDiff line change
@@ -128,12 +128,12 @@ type DistributionData struct {
128128
bounds []float64 // histogram distribution of the values
129129
}
130130

131-
func newDistributionData(bounds []float64) *DistributionData {
132-
bucketCount := len(bounds) + 1
131+
func newDistributionData(agg *Aggregation) *DistributionData {
132+
bucketCount := len(agg.Buckets) + 1
133133
return &DistributionData{
134134
CountPerBucket: make([]int64, bucketCount),
135135
ExemplarsPerBucket: make([]*metricdata.Exemplar, bucketCount),
136-
bounds: bounds,
136+
bounds: agg.Buckets,
137137
Min: math.MaxFloat64,
138138
Max: math.SmallestNonzeroFloat64,
139139
}

stats/view/aggregation_data_test.go

+8-2
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,10 @@ import (
2626
)
2727

2828
func TestDataClone(t *testing.T) {
29-
dist := newDistributionData([]float64{1, 2, 3, 4})
29+
agg := &Aggregation{
30+
Buckets: []float64{1, 2, 3, 4},
31+
}
32+
dist := newDistributionData(agg)
3033
dist.Count = 7
3134
dist.Max = 11
3235
dist.Min = 1
@@ -66,7 +69,10 @@ func TestDataClone(t *testing.T) {
6669
}
6770

6871
func TestDistributionData_addSample(t *testing.T) {
69-
dd := newDistributionData([]float64{1, 2})
72+
agg := &Aggregation{
73+
Buckets: []float64{1, 2},
74+
}
75+
dd := newDistributionData(agg)
7076
attachments1 := map[string]interface{}{"key1": "value1"}
7177
t1 := time.Now()
7278
dd.addSample(0.5, attachments1, t1)

stats/view/view_to_metric_test.go

+134
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ import (
2424

2525
"github.com/google/go-cmp/cmp"
2626
"go.opencensus.io/metric/metricdata"
27+
"go.opencensus.io/metric/metricexport"
2728
"go.opencensus.io/stats"
2829
"go.opencensus.io/tag"
2930
)
@@ -516,6 +517,139 @@ func TestUnitConversionForAggCount(t *testing.T) {
516517
}
517518
}
518519

520+
type mockExp struct {
521+
metrics []*metricdata.Metric
522+
}
523+
524+
func (me *mockExp) ExportMetrics(ctx context.Context, metrics []*metricdata.Metric) error {
525+
me.metrics = append(me.metrics, metrics...)
526+
return nil
527+
}
528+
529+
var _ metricexport.Exporter = (*mockExp)(nil)
530+
531+
func TestViewToMetric_OutOfOrderWithZeroBuckets(t *testing.T) {
532+
m := stats.Int64("OutOfOrderWithZeroBuckets", "", "")
533+
now := time.Now()
534+
tts := []struct {
535+
v *View
536+
m *metricdata.Metric
537+
}{
538+
{
539+
v: &View{
540+
Name: m.Name() + "_order1",
541+
Measure: m,
542+
Aggregation: Distribution(10, 0, 2),
543+
},
544+
m: &metricdata.Metric{
545+
Descriptor: metricdata.Descriptor{
546+
Name: "OutOfOrderWithZeroBuckets_order1",
547+
Unit: metricdata.UnitDimensionless,
548+
Type: metricdata.TypeCumulativeDistribution,
549+
LabelKeys: []metricdata.LabelKey{},
550+
},
551+
TimeSeries: []*metricdata.TimeSeries{
552+
{Points: []metricdata.Point{
553+
{Value: &metricdata.Distribution{
554+
Count: 3,
555+
Sum: 9.0,
556+
SumOfSquaredDeviation: 8,
557+
BucketOptions: &metricdata.BucketOptions{
558+
Bounds: []float64{2, 10},
559+
},
560+
Buckets: []metricdata.Bucket{
561+
{Count: 1, Exemplar: nil},
562+
{Count: 2, Exemplar: nil},
563+
{Count: 0, Exemplar: nil},
564+
},
565+
},
566+
Time: now,
567+
},
568+
},
569+
StartTime: now,
570+
LabelValues: []metricdata.LabelValue{},
571+
},
572+
},
573+
},
574+
},
575+
{
576+
v: &View{
577+
Name: m.Name() + "_order2",
578+
Measure: m,
579+
Aggregation: Distribution(0, 5, 10),
580+
},
581+
m: &metricdata.Metric{
582+
Descriptor: metricdata.Descriptor{
583+
Name: "OutOfOrderWithZeroBuckets_order2",
584+
Unit: metricdata.UnitDimensionless,
585+
Type: metricdata.TypeCumulativeDistribution,
586+
LabelKeys: []metricdata.LabelKey{},
587+
},
588+
TimeSeries: []*metricdata.TimeSeries{
589+
{Points: []metricdata.Point{
590+
{Value: &metricdata.Distribution{
591+
Count: 3,
592+
Sum: 9.0,
593+
SumOfSquaredDeviation: 8,
594+
BucketOptions: &metricdata.BucketOptions{
595+
Bounds: []float64{5, 10},
596+
},
597+
Buckets: []metricdata.Bucket{
598+
{Count: 2, Exemplar: nil},
599+
{Count: 1, Exemplar: nil},
600+
{Count: 0, Exemplar: nil},
601+
},
602+
},
603+
Time: now,
604+
},
605+
},
606+
StartTime: now,
607+
LabelValues: []metricdata.LabelValue{},
608+
},
609+
},
610+
},
611+
},
612+
}
613+
for _, tt := range tts {
614+
err := Register(tt.v)
615+
if err != nil {
616+
t.Fatalf("error registering view %v, err: %v", tt.v, err)
617+
}
618+
619+
}
620+
621+
stats.Record(context.Background(), m.M(5), m.M(1), m.M(3))
622+
time.Sleep(1 * time.Second)
623+
624+
me := &mockExp{}
625+
reader := metricexport.NewReader()
626+
reader.ReadAndExport(me)
627+
628+
var got *metricdata.Metric
629+
lookup := func(vname string, metrics []*metricdata.Metric) *metricdata.Metric {
630+
for _, m := range metrics {
631+
if m.Descriptor.Name == vname {
632+
return m
633+
}
634+
}
635+
return nil
636+
}
637+
638+
for _, tt := range tts {
639+
got = lookup(tt.v.Name, me.metrics)
640+
if got == nil {
641+
t.Fatalf("metric %s not found in %v\n", tt.v.Name, me.metrics)
642+
}
643+
got.TimeSeries[0].Points[0].Time = now
644+
got.TimeSeries[0].StartTime = now
645+
646+
want := tt.m
647+
if diff := cmp.Diff(got, want); diff != "" {
648+
t.Errorf("buckets differ -got +want: %s \n Serialized got %v\n, Serialized want %v\n", diff, serializeAsJSON(got), serializeAsJSON(want))
649+
}
650+
}
651+
}
652+
519653
func serializeAsJSON(v interface{}) string {
520654
blob, _ := json.MarshalIndent(v, "", " ")
521655
return string(blob)

0 commit comments

Comments
 (0)