Skip to content
This repository was archived by the owner on Sep 15, 2025. It is now read-only.

Commit 7c614b1

Browse files
authored
Error on non monotonic bounds (#744)
* Error on non monotonic bounds * changelog * add issue # * feedback
1 parent 5d167b0 commit 7c614b1

File tree

5 files changed

+138
-11
lines changed

5 files changed

+138
-11
lines changed
Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix'
2+
change_type: bug_fix
3+
4+
# The name of the component (e.g. pkg/quantile)
5+
component: pkg/quantile
6+
7+
# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`).
8+
note: Return an error on non monotonic bounds for histogram metrics. Previously, this would create a panic.
9+
10+
# The PR related to this change
11+
issues: [744]
12+
13+
# (Optional) One or more lines of additional information to render under the primary note.
14+
# These lines will be padded with 2 spaces and then inserted directly into the document.
15+
# Use pipe (|) for multiline entries.
16+
subtext:

pkg/otlp/metrics/histograms_test.go

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,12 +6,16 @@
66
package metrics
77

88
import (
9+
"context"
10+
"os"
911
"testing"
1012

1113
"github.com/DataDog/opentelemetry-mapping-go/pkg/otlp/attributes"
14+
"github.com/DataDog/opentelemetry-mapping-go/pkg/quantile"
1215
"github.com/stretchr/testify/assert"
1316
"github.com/stretchr/testify/require"
1417
"go.opentelemetry.io/collector/component/componenttest"
18+
"go.opentelemetry.io/collector/pdata/pmetric"
1519
"go.uber.org/zap"
1620
"go.uber.org/zap/zapcore"
1721
"go.uber.org/zap/zaptest/observer"
@@ -146,6 +150,28 @@ func TestDeltaHistogramTranslatorOptions(t *testing.T) {
146150
}
147151
}
148152

153+
func TestNonMonotonicCount(t *testing.T) {
154+
// Unmarshal OTLP data.
155+
otlpbytes, err := os.ReadFile("testdata/otlpdata/histogram/simple-delta-non-monotonic-bound.json")
156+
require.NoError(t, err, "failed to read OTLP file %q", "testdata/otlpdata/histogram/simple-delta-non-monotonic-bound.json")
157+
158+
var unmarshaler pmetric.JSONUnmarshaler
159+
otlpdata, err := unmarshaler.UnmarshalMetrics(otlpbytes)
160+
require.NoError(t, err, "failed to unmarshal OTLP data from file %q", "testdata/otlpdata/histogram/simple-delta-non-monotonic-bound.json")
161+
// Map metrics using translator.
162+
consumer := NewTestConsumer()
163+
options := []TranslatorOption{WithOriginProduct(OriginProductDatadogAgent)}
164+
set := componenttest.NewNopTelemetrySettings()
165+
attributesTranslator, err := attributes.NewTranslator(set)
166+
assert.NoError(t, err)
167+
168+
translator, err := NewTranslator(set, attributesTranslator, options...)
169+
assert.NoError(t, err)
170+
171+
_, err = translator.MapMetrics(context.Background(), otlpdata, &consumer, nil)
172+
assert.EqualError(t, err, quantile.ErrNonMonotonicBoundaries)
173+
}
174+
149175
func TestCumulativeHistogramTranslatorOptions(t *testing.T) {
150176
tests := []struct {
151177
name string

pkg/otlp/metrics/metrics_translator.go

Lines changed: 31 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -298,7 +298,7 @@ func (t *Translator) getSketchBuckets(
298298
p pmetric.HistogramDataPoint,
299299
histInfo histogramInfo,
300300
delta bool,
301-
) {
301+
) error {
302302
startTs := uint64(p.StartTimestamp())
303303
ts := uint64(p.Timestamp())
304304
as := &quantile.Agent{}
@@ -360,10 +360,17 @@ func (t *Translator) getSketchBuckets(
360360
var nonZeroBucket bool
361361
if delta {
362362
nonZeroBucket = count > 0
363-
as.InsertInterpolate(lowerBound, upperBound, uint(count))
363+
err := as.InsertInterpolate(lowerBound, upperBound, uint(count))
364+
if err != nil {
365+
return err
366+
}
364367
} else if dx, ok := t.prevPts.Diff(bucketDims, startTs, ts, float64(count)); ok {
365368
nonZeroBucket = dx > 0
366-
as.InsertInterpolate(lowerBound, upperBound, uint(dx))
369+
err := as.InsertInterpolate(lowerBound, upperBound, uint(dx))
370+
if err != nil {
371+
return err
372+
}
373+
367374
}
368375

369376
if nonZeroBucket {
@@ -413,6 +420,7 @@ func (t *Translator) getSketchBuckets(
413420

414421
consumer.ConsumeSketch(ctx, pointDims, ts, 0, sketch)
415422
}
423+
return nil
416424
}
417425

418426
func (t *Translator) getLegacyBuckets(
@@ -462,7 +470,7 @@ func (t *Translator) mapHistogramMetrics(
462470
dims *Dimensions,
463471
slice pmetric.HistogramDataPointSlice,
464472
delta bool,
465-
) {
473+
) error {
466474
for i := 0; i < slice.Len(); i++ {
467475
p := slice.At(i)
468476
if p.Flags().NoRecordedValue() {
@@ -532,9 +540,13 @@ func (t *Translator) mapHistogramMetrics(
532540
case HistogramModeCounters:
533541
t.getLegacyBuckets(ctx, consumer, pointDims, p, delta)
534542
case HistogramModeDistributions:
535-
t.getSketchBuckets(ctx, consumer, pointDims, p, histInfo, delta)
543+
err := t.getSketchBuckets(ctx, consumer, pointDims, p, histInfo, delta)
544+
if err != nil {
545+
return err
546+
}
536547
}
537548
}
549+
return nil
538550
}
539551

540552
// formatFloat formats a float number as close as possible to what
@@ -825,12 +837,18 @@ func (t *Translator) MapMetrics(ctx context.Context, md pmetric.Metrics, consume
825837
renameMetrics(md)
826838
}
827839

828-
t.mapToDDFormat(ctx, md, consumer, additionalTags, host, scopeName, rattrs)
840+
err := t.mapToDDFormat(ctx, md, consumer, additionalTags, host, scopeName, rattrs)
841+
if err != nil {
842+
return metadata, err
843+
}
829844
}
830845

831846
for k := 0; k < newMetrics.Len(); k++ {
832847
md := newMetrics.At(k)
833-
t.mapToDDFormat(ctx, md, consumer, additionalTags, host, scopeName, rattrs)
848+
err := t.mapToDDFormat(ctx, md, consumer, additionalTags, host, scopeName, rattrs)
849+
if err != nil {
850+
return metadata, err
851+
}
834852
}
835853
}
836854

@@ -851,7 +869,7 @@ func (t *Translator) MapMetrics(ctx context.Context, md pmetric.Metrics, consume
851869
return metadata, nil
852870
}
853871

854-
func (t *Translator) mapToDDFormat(ctx context.Context, md pmetric.Metric, consumer Consumer, additionalTags []string, host string, scopeName string, rattrs pcommon.Map) {
872+
func (t *Translator) mapToDDFormat(ctx context.Context, md pmetric.Metric, consumer Consumer, additionalTags []string, host string, scopeName string, rattrs pcommon.Map) error {
855873
baseDims := &Dimensions{
856874
name: md.Name(),
857875
tags: additionalTags,
@@ -889,7 +907,10 @@ func (t *Translator) mapToDDFormat(ctx context.Context, md pmetric.Metric, consu
889907
switch md.Histogram().AggregationTemporality() {
890908
case pmetric.AggregationTemporalityCumulative, pmetric.AggregationTemporalityDelta:
891909
delta := md.Histogram().AggregationTemporality() == pmetric.AggregationTemporalityDelta
892-
t.mapHistogramMetrics(ctx, consumer, baseDims, md.Histogram().DataPoints(), delta)
910+
err := t.mapHistogramMetrics(ctx, consumer, baseDims, md.Histogram().DataPoints(), delta)
911+
if err != nil {
912+
return err
913+
}
893914
default: // pmetric.AggregationTemporalityUnspecified or any other not supported type
894915
t.logger.Debug("Unknown or unsupported aggregation temporality",
895916
zap.String("metric name", md.Name()),
@@ -912,4 +933,5 @@ func (t *Translator) mapToDDFormat(ctx context.Context, md pmetric.Metric, consu
912933
default: // pmetric.MetricDataTypeNone or any other not supported type
913934
t.logger.Debug("Unknown or unsupported metric type", zap.String(metricName, md.Name()), zap.Any("data type", md.Type()))
914935
}
936+
return nil
915937
}
Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
{
2+
"resourceMetrics": [
3+
{
4+
"resource": {
5+
"attributes": [
6+
{
7+
"key": "host.name",
8+
"value": {
9+
"stringValue": "hostname"
10+
}
11+
}
12+
]
13+
},
14+
"scopeMetrics": [
15+
{
16+
"scope": {},
17+
"metrics": [
18+
{
19+
"name": "doubleHist.test",
20+
"histogram": {
21+
"dataPoints": [
22+
{
23+
"attributes": [
24+
{
25+
"key": "attribute_tag",
26+
"value": {
27+
"stringValue": "attribute_value"
28+
}
29+
}
30+
],
31+
"timeUnixNano": "1667560641226420924",
32+
"count": "20",
33+
"sum": 3.141592653589793,
34+
"bucketCounts": [
35+
"2",
36+
"18"
37+
],
38+
"explicitBounds": [
39+
1,
40+
0.1
41+
],
42+
"min": -100,
43+
"max": 100
44+
}
45+
],
46+
"aggregationTemporality": 1
47+
}
48+
}
49+
]
50+
}
51+
]
52+
}
53+
]
54+
}

pkg/quantile/agent.go

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,11 +5,16 @@
55

66
package quantile
77

8+
import "errors"
9+
810
const (
911
agentBufCap = 512
1012
)
1113

12-
var agentConfig = Default()
14+
var (
15+
agentConfig = Default()
16+
ErrNonMonotonicBoundaries = "explicit bucket histogram: non-monotonic boundaries"
17+
)
1318

1419
// An Agent sketch is an insert optimized version of the sketch for use in the
1520
// datadog-agent.
@@ -83,14 +88,17 @@ func (a *Agent) Insert(v float64, sampleRate float64) {
8388
}
8489

8590
// InsertInterpolate linearly interpolates a count from the given lower to upper bounds
86-
func (a *Agent) InsertInterpolate(lower float64, upper float64, count uint) {
91+
func (a *Agent) InsertInterpolate(lower float64, upper float64, count uint) error {
8792
keys := make([]Key, 0)
8893
for k := agentConfig.key(lower); k <= agentConfig.key(upper); k++ {
8994
keys = append(keys, k)
9095
}
9196
whatsLeft := int(count)
9297
distance := upper - lower
9398
startIdx := 0
99+
if len(keys) == 0 {
100+
return errors.New(ErrNonMonotonicBoundaries)
101+
}
94102
lowerB := agentConfig.binLow(keys[startIdx])
95103
endIdx := 1
96104
var remainder float64
@@ -126,4 +134,5 @@ func (a *Agent) InsertInterpolate(lower float64, upper float64, count uint) {
126134
a.CountBuf = append(a.CountBuf, KeyCount{k: keys[startIdx], n: uint(whatsLeft)})
127135
}
128136
a.flush()
137+
return nil
129138
}

0 commit comments

Comments
 (0)