Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@

### General

- Support fixed bucket histogram aggregation and exporting them with OTLP/Prometheus exporter.

#### Breaking Changes

- Methods and classes deprecated in 0.14.x have been removed.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@

import io.opentelemetry.api.common.Labels;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.DoubleHistogramPointData;
import io.opentelemetry.sdk.metrics.data.DoublePointData;
import io.opentelemetry.sdk.metrics.data.DoubleSumData;
import io.opentelemetry.sdk.metrics.data.DoubleSummaryPointData;
Expand Down Expand Up @@ -46,7 +47,9 @@ final class MetricAdapter {

static final String SAMPLE_SUFFIX_COUNT = "_count";
static final String SAMPLE_SUFFIX_SUM = "_sum";
static final String SAMPLE_SUFFIX_BUCKET = "_bucket";
static final String LABEL_NAME_QUANTILE = "quantile";
static final String LABEL_NAME_LE = "le";

// Converts a MetricData to a Prometheus MetricFamilySamples.
static MetricFamilySamples toMetricFamilySamples(MetricData metricData) {
Expand Down Expand Up @@ -85,6 +88,8 @@ static Collector.Type toMetricFamilyType(MetricData metricData) {
return Collector.Type.GAUGE;
case SUMMARY:
return Collector.Type.SUMMARY;
case HISTOGRAM:
return Collector.Type.HISTOGRAM;
}
return Collector.Type.UNTYPED;
}
Expand Down Expand Up @@ -122,6 +127,10 @@ static List<Sample> toSamples(
addSummarySamples(
(DoubleSummaryPointData) pointData, name, labelNames, labelValues, samples);
break;
case HISTOGRAM:
addHistogramSamples(
(DoubleHistogramPointData) pointData, name, labelNames, labelValues, samples);
break;
}
}
return samples;
Expand Down Expand Up @@ -169,6 +178,46 @@ private static void addSummarySamples(
}
}

private static void addHistogramSamples(
DoubleHistogramPointData doubleHistogramPointData,
String name,
List<String> labelNames,
List<String> labelValues,
List<Sample> samples) {
samples.add(
new Sample(
name + SAMPLE_SUFFIX_COUNT,
labelNames,
labelValues,
doubleHistogramPointData.getCount()));
samples.add(
new Sample(
name + SAMPLE_SUFFIX_SUM, labelNames, labelValues, doubleHistogramPointData.getSum()));

List<String> labelNamesWithLe = new ArrayList<>(labelNames.size() + 1);
labelNamesWithLe.addAll(labelNames);
labelNamesWithLe.add(LABEL_NAME_LE);
long[] cumulativeCount = new long[] {0};
doubleHistogramPointData.forEach(
(upperBound, count) -> {
List<String> labelValuesWithLe = new ArrayList<>(labelValues.size() + 1);
labelValuesWithLe.addAll(labelValues);
labelValuesWithLe.add(doubleToGoString(upperBound));
// According to
// https://github.com/open-telemetry/opentelemetry-proto/blob/v0.7.0/opentelemetry/proto/metrics/v1/metrics.proto#L505
// the upper bound is exclusive while Prometheus requires them to be inclusive.
// There is not much we can do here until the proto add a field to support inclusive upper
// bounds.
cumulativeCount[0] += count;
samples.add(
new Sample(
name + SAMPLE_SUFFIX_BUCKET,
labelNamesWithLe,
labelValuesWithLe,
cumulativeCount[0]));
});
}

private static int estimateNumSamples(int numPoints, MetricDataType type) {
if (type == MetricDataType.SUMMARY) {
// count + sum + estimated 2 percentiles (default MinMaxSumCount aggregator).
Expand All @@ -189,6 +238,8 @@ private static Collection<? extends PointData> getPoints(MetricData metricData)
return metricData.getLongSumData().getPoints();
case SUMMARY:
return metricData.getDoubleSummaryData().getPoints();
case HISTOGRAM:
return metricData.getDoubleHistogramData().getPoints();
}
return Collections.emptyList();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@
import io.opentelemetry.sdk.common.InstrumentationLibraryInfo;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.DoubleGaugeData;
import io.opentelemetry.sdk.metrics.data.DoubleHistogramData;
import io.opentelemetry.sdk.metrics.data.DoubleHistogramPointData;
import io.opentelemetry.sdk.metrics.data.DoublePointData;
import io.opentelemetry.sdk.metrics.data.DoubleSumData;
import io.opentelemetry.sdk.metrics.data.DoubleSummaryData;
Expand Down Expand Up @@ -157,6 +159,24 @@ class MetricAdapterTest {
Collections.singletonList(
DoubleSummaryPointData.create(
123, 456, Labels.of("kp", "vp"), 5, 7, Collections.emptyList()))));
private static final MetricData HISTOGRAM =
MetricData.createDoubleHistogram(
Resource.create(Attributes.of(stringKey("kr"), "vr")),
InstrumentationLibraryInfo.create("full", "version"),
"instrument.name",
"description",
"1",
DoubleHistogramData.create(
AggregationTemporality.DELTA,
Collections.singletonList(
DoubleHistogramPointData.create(
123,
456,
Labels.of("kp", "vp"),
1.0,
2L,
Collections.emptyList(),
Collections.singletonList(2L)))));

@Test
void toProtoMetricDescriptorType() {
Expand Down Expand Up @@ -204,6 +224,10 @@ void toProtoMetricDescriptorType() {
metricFamilySamples = MetricAdapter.toMetricFamilySamples(LONG_GAUGE);
assertThat(metricFamilySamples.type).isEqualTo(Collector.Type.GAUGE);
assertThat(metricFamilySamples.samples).hasSize(1);

metricFamilySamples = MetricAdapter.toMetricFamilySamples(HISTOGRAM);
assertThat(metricFamilySamples.type).isEqualTo(Collector.Type.HISTOGRAM);
assertThat(metricFamilySamples.samples).hasSize(3);
}

@Test
Expand Down Expand Up @@ -323,6 +347,37 @@ void toSamples_SummaryPoints() {
12.3));
}

@Test
void toSamples_HistogramPoints() {
assertThat(
MetricAdapter.toSamples("full_name", MetricDataType.HISTOGRAM, Collections.emptyList()))
.isEmpty();

assertThat(
MetricAdapter.toSamples(
"full_name",
MetricDataType.HISTOGRAM,
ImmutableList.of(
DoubleHistogramPointData.create(
321,
654,
Labels.of("kp", "vp"),
18.3,
9,
ImmutableList.of(1.0),
ImmutableList.of(4L, 9L)))))
.containsExactly(
new Sample("full_name_count", ImmutableList.of("kp"), ImmutableList.of("vp"), 9),
new Sample("full_name_sum", ImmutableList.of("kp"), ImmutableList.of("vp"), 18.3),
new Sample(
"full_name_bucket", ImmutableList.of("kp", "le"), ImmutableList.of("vp", "1.0"), 4),
new Sample(
"full_name_bucket",
ImmutableList.of("kp", "le"),
ImmutableList.of("vp", "+Inf"),
13));
}

@Test
void toMetricFamilySamples() {
MetricData metricData = MONOTONIC_CUMULATIVE_DOUBLE_SUM;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@
import io.opentelemetry.proto.metrics.v1.ResourceMetrics;
import io.opentelemetry.sdk.common.InstrumentationLibraryInfo;
import io.opentelemetry.sdk.metrics.data.DoubleGaugeData;
import io.opentelemetry.sdk.metrics.data.DoubleHistogramData;
import io.opentelemetry.sdk.metrics.data.DoubleHistogramPointData;
import io.opentelemetry.sdk.metrics.data.DoublePointData;
import io.opentelemetry.sdk.metrics.data.DoubleSumData;
import io.opentelemetry.sdk.metrics.data.DoubleSummaryData;
Expand Down Expand Up @@ -149,6 +151,15 @@ static Metric toProtoMetric(MetricData metricData) {
.addAllDataPoints(toDoubleDataPoints(doubleGaugeData.getPoints()))
.build());
break;
case HISTOGRAM:
DoubleHistogramData doubleHistogramData = metricData.getDoubleHistogramData();
builder.setDoubleHistogram(
DoubleHistogram.newBuilder()
.setAggregationTemporality(
mapToTemporality(doubleHistogramData.getAggregationTemporality()))
.addAllDataPoints(toDoubleHistogramDataPoints(doubleHistogramData.getPoints()))
.build());
break;
}
return builder.build();
}
Expand Down Expand Up @@ -198,6 +209,27 @@ static Collection<DoubleDataPoint> toDoubleDataPoints(Collection<DoublePointData
return result;
}

static Collection<DoubleHistogramDataPoint> toDoubleHistogramDataPoints(
Collection<DoubleHistogramPointData> points) {
List<DoubleHistogramDataPoint> result = new ArrayList<>(points.size());
for (DoubleHistogramPointData doubleHistogramPoint : points) {
DoubleHistogramDataPoint.Builder builder =
DoubleHistogramDataPoint.newBuilder()
.setStartTimeUnixNano(doubleHistogramPoint.getStartEpochNanos())
.setTimeUnixNano(doubleHistogramPoint.getEpochNanos())
.setCount(doubleHistogramPoint.getCount())
.setSum(doubleHistogramPoint.getSum())
.addAllBucketCounts(doubleHistogramPoint.getCounts())
.addAllExplicitBounds(doubleHistogramPoint.getBoundaries());
Collection<StringKeyValue> labels = toProtoLabels(doubleHistogramPoint.getLabels());
if (!labels.isEmpty()) {
builder.addAllLabels(labels);
}
result.add(builder.build());
}
return result;
}

static List<DoubleHistogramDataPoint> toSummaryDataPoints(
Collection<DoubleSummaryPointData> points) {
List<DoubleHistogramDataPoint> result = new ArrayList<>(points.size());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,8 @@
import io.opentelemetry.sdk.common.InstrumentationLibraryInfo;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.DoubleGaugeData;
import io.opentelemetry.sdk.metrics.data.DoubleHistogramData;
import io.opentelemetry.sdk.metrics.data.DoubleHistogramPointData;
import io.opentelemetry.sdk.metrics.data.DoublePointData;
import io.opentelemetry.sdk.metrics.data.DoubleSumData;
import io.opentelemetry.sdk.metrics.data.DoubleSummaryData;
Expand All @@ -43,6 +45,7 @@
import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.data.ValueAtPercentile;
import io.opentelemetry.sdk.resources.Resource;
import java.util.Arrays;
import java.util.Collections;
import org.junit.jupiter.api.Test;

Expand Down Expand Up @@ -206,6 +209,48 @@ void toSummaryDataPoints() {
.build());
}

@Test
void toHistogramDataPoints() {
assertThat(
MetricAdapter.toDoubleHistogramDataPoints(
ImmutableList.of(
DoubleHistogramPointData.create(
123,
456,
Labels.of("k", "v"),
14.2,
5,
Collections.singletonList(1.0),
Arrays.asList(1L, 5L)),
DoubleHistogramPointData.create(
123,
456,
Labels.empty(),
15.3,
7,
Collections.emptyList(),
Collections.singletonList(7L)))))
.containsExactly(
DoubleHistogramDataPoint.newBuilder()
.setStartTimeUnixNano(123)
.setTimeUnixNano(456)
.addAllLabels(
singletonList(StringKeyValue.newBuilder().setKey("k").setValue("v").build()))
.setCount(5)
.setSum(14.2)
.addBucketCounts(1)
.addBucketCounts(5)
.addExplicitBounds(1.0)
.build(),
DoubleHistogramDataPoint.newBuilder()
.setStartTimeUnixNano(123)
.setTimeUnixNano(456)
.setCount(7)
.setSum(15.3)
.addBucketCounts(7)
.build());
}

@Test
void toProtoMetric_monotonic() {
assertThat(
Expand Down Expand Up @@ -462,6 +507,53 @@ void toProtoMetric_summary() {
.build());
}

@Test
void toProtoMetric_histogram() {
assertThat(
MetricAdapter.toProtoMetric(
MetricData.createDoubleHistogram(
Resource.getEmpty(),
InstrumentationLibraryInfo.getEmpty(),
"name",
"description",
"1",
DoubleHistogramData.create(
AggregationTemporality.DELTA,
singletonList(
DoubleHistogramPointData.create(
123,
456,
Labels.of("k", "v"),
4.0,
33L,
emptyList(),
Collections.singletonList(33L)))))))
.isEqualTo(
Metric.newBuilder()
.setName("name")
.setDescription("description")
.setUnit("1")
.setDoubleHistogram(
DoubleHistogram.newBuilder()
.setAggregationTemporality(AGGREGATION_TEMPORALITY_DELTA)
.addDataPoints(
DoubleHistogramDataPoint.newBuilder()
.setStartTimeUnixNano(123)
.setTimeUnixNano(456)
.addAllLabels(
singletonList(
StringKeyValue.newBuilder()
.setKey("k")
.setValue("v")
.build()))
.setCount(33)
.setSum(4.0)
.addBucketCounts(33)
.build())
.build())
.build());
}

@Test
void toProtoResourceMetrics() {
Resource resource = Resource.create(Attributes.of(stringKey("ka"), "va"));
Expand Down
Loading