From b9055801f75c009465550d2f4d79e9084457d604 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Sat, 6 Nov 2021 11:34:08 -0400 Subject: [PATCH 01/12] Improve tests for OpenCensus metric conversions. - Move metric conversion into its own utility (outside exporter) - Add a series of tests for all open-census types. - Update conversion code to work for ALL features of OTel metrics and OpenCensus metrics. Notes: - OC exemplars have wonky trace-attachments. Using crazy "no binary" workaround. - OC has gauge histogram. Encoding as DELTA with same start/stop timestamp as closest OTEL equivalent - OC has summaries, so we'll need to keep that data type around for compatibility, unless we can tackle the raw measurements and push them into Histograms. --- .../OpenTelemetryMetricsExporter.java | 190 +-------- .../opencensusshim/metrics/MetricAdapter.java | 358 +++++++++++++++++ .../metrics/MetricAdapterTest.java | 368 ++++++++++++++++++ 3 files changed, 730 insertions(+), 186 deletions(-) create mode 100644 opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/MetricAdapter.java create mode 100644 opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/MetricAdapterTest.java diff --git a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryMetricsExporter.java b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryMetricsExporter.java index 75684ebd902..5e9aba571fa 100644 --- a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryMetricsExporter.java +++ b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryMetricsExporter.java @@ -13,46 +13,26 @@ import io.opencensus.metrics.Metrics; import io.opencensus.metrics.export.Metric; import io.opencensus.metrics.export.MetricDescriptor; -import io.opencensus.metrics.export.Point; -import io.opencensus.metrics.export.Summary; -import io.opencensus.metrics.export.Summary.Snapshot; -import io.opencensus.metrics.export.TimeSeries; -import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.api.common.AttributesBuilder; -import io.opentelemetry.sdk.common.InstrumentationLibraryInfo; -import io.opentelemetry.sdk.metrics.data.AggregationTemporality; -import io.opentelemetry.sdk.metrics.data.DoubleGaugeData; -import io.opentelemetry.sdk.metrics.data.DoublePointData; -import io.opentelemetry.sdk.metrics.data.DoubleSumData; -import io.opentelemetry.sdk.metrics.data.DoubleSummaryData; -import io.opentelemetry.sdk.metrics.data.DoubleSummaryPointData; -import io.opentelemetry.sdk.metrics.data.LongGaugeData; -import io.opentelemetry.sdk.metrics.data.LongPointData; -import io.opentelemetry.sdk.metrics.data.LongSumData; +import io.opentelemetry.opencensusshim.metrics.MetricAdapter; import io.opentelemetry.sdk.metrics.data.MetricData; -import io.opentelemetry.sdk.metrics.data.PointData; -import io.opentelemetry.sdk.metrics.data.ValueAtPercentile; import io.opentelemetry.sdk.resources.Resource; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Set; -import java.util.concurrent.TimeUnit; import java.util.logging.Logger; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; public final class OpenTelemetryMetricsExporter extends MetricExporter { private static final Logger LOGGER = Logger.getLogger(OpenTelemetryMetricsExporter.class.getName()); private static final String EXPORTER_NAME = "OpenTelemetryMetricExporter"; - private static final InstrumentationLibraryInfo INSTRUMENTATION_LIBRARY_INFO = - InstrumentationLibraryInfo.create("io.opentelemetry.opencensusshim", null); private final IntervalMetricReader intervalMetricReader; private final io.opentelemetry.sdk.metrics.export.MetricExporter otelExporter; + // TODO - find this from OTel SDK. + private final Resource resource = Resource.getDefault(); public static OpenTelemetryMetricsExporter createAndRegister( io.opentelemetry.sdk.metrics.export.MetricExporter otelExporter) { @@ -84,28 +64,7 @@ public void export(Collection metrics) { List metricData = new ArrayList<>(); Set unsupportedTypes = new HashSet<>(); for (Metric metric : metrics) { - for (TimeSeries timeSeries : metric.getTimeSeriesList()) { - AttributesBuilder attributesBuilder = Attributes.builder(); - for (int i = 0; i < metric.getMetricDescriptor().getLabelKeys().size(); i++) { - if (timeSeries.getLabelValues().get(i).getValue() != null) { - attributesBuilder.put( - metric.getMetricDescriptor().getLabelKeys().get(i).getKey(), - timeSeries.getLabelValues().get(i).getValue()); - } - } - Attributes attributes = attributesBuilder.build(); - List points = new ArrayList<>(); - MetricDescriptor.Type type = null; - for (Point point : timeSeries.getPoints()) { - type = mapAndAddPoint(unsupportedTypes, metric, attributes, points, point); - } - if (type != null) { - MetricData md = toMetricData(type, metric.getMetricDescriptor(), points); - if (md != null) { - metricData.add(md); - } - } - } + metricData.add(MetricAdapter.convert(resource, metric)); } if (!unsupportedTypes.isEmpty()) { LOGGER.warning( @@ -117,148 +76,7 @@ public void export(Collection metrics) { } } - private static MetricDescriptor.Type mapAndAddPoint( - Set unsupportedTypes, - Metric metric, - Attributes attributes, - List points, - Point point) { - long timestampNanos = - TimeUnit.SECONDS.toNanos(point.getTimestamp().getSeconds()) - + point.getTimestamp().getNanos(); - MetricDescriptor.Type type = metric.getMetricDescriptor().getType(); - switch (type) { - case GAUGE_INT64: - case CUMULATIVE_INT64: - points.add(mapLongPoint(attributes, point, timestampNanos)); - break; - case GAUGE_DOUBLE: - case CUMULATIVE_DOUBLE: - points.add(mapDoublePoint(attributes, point, timestampNanos)); - break; - case SUMMARY: - points.add(mapSummaryPoint(attributes, point, timestampNanos)); - break; - default: - unsupportedTypes.add(type); - break; - } - return type; - } - public void stop() { intervalMetricReader.stop(); } - - @Nonnull - private static DoubleSummaryPointData mapSummaryPoint( - Attributes attributes, Point point, long timestampNanos) { - return DoubleSummaryPointData.create( - timestampNanos, - timestampNanos, - attributes, - point - .getValue() - .match(arg -> null, arg -> null, arg -> null, Summary::getCount, arg -> null), - point.getValue().match(arg -> null, arg -> null, arg -> null, Summary::getSum, arg -> null), - point - .getValue() - .match( - arg -> null, - arg -> null, - arg -> null, - OpenTelemetryMetricsExporter::mapPercentiles, - arg -> null)); - } - - private static List mapPercentiles(Summary arg) { - List percentiles = new ArrayList<>(); - for (Snapshot.ValueAtPercentile percentile : arg.getSnapshot().getValueAtPercentiles()) { - percentiles.add(ValueAtPercentile.create(percentile.getPercentile(), percentile.getValue())); - } - return percentiles; - } - - @Nonnull - private static DoublePointData mapDoublePoint( - Attributes attributes, Point point, long timestampNanos) { - return DoublePointData.create( - timestampNanos, - timestampNanos, - attributes, - point - .getValue() - .match(arg -> arg, Long::doubleValue, arg -> null, arg -> null, arg -> null)); - } - - @Nonnull - private static LongPointData mapLongPoint( - Attributes attributes, Point point, long timestampNanos) { - return LongPointData.create( - timestampNanos, - timestampNanos, - attributes, - point - .getValue() - .match(Double::longValue, arg -> arg, arg -> null, arg -> null, arg -> null)); - } - - @Nullable - @SuppressWarnings("unchecked") - private static MetricData toMetricData( - MetricDescriptor.Type type, - MetricDescriptor metricDescriptor, - List points) { - if (metricDescriptor.getType() == null) { - return null; - } - switch (type) { - case GAUGE_INT64: - return MetricData.createLongGauge( - Resource.getDefault(), - INSTRUMENTATION_LIBRARY_INFO, - metricDescriptor.getName(), - metricDescriptor.getDescription(), - metricDescriptor.getUnit(), - LongGaugeData.create((List) points)); - - case GAUGE_DOUBLE: - return MetricData.createDoubleGauge( - Resource.getDefault(), - INSTRUMENTATION_LIBRARY_INFO, - metricDescriptor.getName(), - metricDescriptor.getDescription(), - metricDescriptor.getUnit(), - DoubleGaugeData.create((List) points)); - - case CUMULATIVE_INT64: - return MetricData.createLongSum( - Resource.getDefault(), - INSTRUMENTATION_LIBRARY_INFO, - metricDescriptor.getName(), - metricDescriptor.getDescription(), - metricDescriptor.getUnit(), - LongSumData.create( - true, AggregationTemporality.CUMULATIVE, (List) points)); - case CUMULATIVE_DOUBLE: - return MetricData.createDoubleSum( - Resource.getDefault(), - INSTRUMENTATION_LIBRARY_INFO, - metricDescriptor.getName(), - metricDescriptor.getDescription(), - metricDescriptor.getUnit(), - DoubleSumData.create( - true, AggregationTemporality.CUMULATIVE, (List) points)); - case SUMMARY: - return MetricData.createDoubleSummary( - Resource.getDefault(), - INSTRUMENTATION_LIBRARY_INFO, - metricDescriptor.getName(), - metricDescriptor.getDescription(), - metricDescriptor.getUnit(), - DoubleSummaryData.create((List) points)); - default: - return null; - } - } } diff --git a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/MetricAdapter.java b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/MetricAdapter.java new file mode 100644 index 00000000000..c8f8bf11291 --- /dev/null +++ b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/MetricAdapter.java @@ -0,0 +1,358 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.opencensusshim.metrics; + +import io.opencensus.common.Timestamp; +import io.opencensus.metrics.LabelKey; +import io.opencensus.metrics.LabelValue; +import io.opencensus.metrics.data.Exemplar; +import io.opencensus.metrics.export.Distribution; +import io.opencensus.metrics.export.Metric; +import io.opencensus.metrics.export.MetricDescriptor; +import io.opencensus.metrics.export.Point; +import io.opencensus.metrics.export.Summary; +import io.opencensus.metrics.export.TimeSeries; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.common.AttributesBuilder; +import io.opentelemetry.sdk.common.InstrumentationLibraryInfo; +import io.opentelemetry.sdk.metrics.data.AggregationTemporality; +import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; +import io.opentelemetry.sdk.metrics.data.DoubleGaugeData; +import io.opentelemetry.sdk.metrics.data.DoubleHistogramData; +import io.opentelemetry.sdk.metrics.data.DoubleHistogramPointData; +import io.opentelemetry.sdk.metrics.data.DoublePointData; +import io.opentelemetry.sdk.metrics.data.DoubleSumData; +import io.opentelemetry.sdk.metrics.data.DoubleSummaryData; +import io.opentelemetry.sdk.metrics.data.DoubleSummaryPointData; +import io.opentelemetry.sdk.metrics.data.ExemplarData; +import io.opentelemetry.sdk.metrics.data.LongGaugeData; +import io.opentelemetry.sdk.metrics.data.LongPointData; +import io.opentelemetry.sdk.metrics.data.LongSumData; +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.metrics.data.ValueAtPercentile; +import io.opentelemetry.sdk.resources.Resource; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.TimeUnit; +import java.util.regex.MatchResult; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** Adapts an OpenCensus metric into the OpenTelemetry metric data API. */ +public final class MetricAdapter { + private MetricAdapter() {} + // All OpenCensus metrics come from this shim. + // VisibleForTesting. + static final InstrumentationLibraryInfo INSTRUMENTATION_LIBRARY_INFO = + InstrumentationLibraryInfo.create("io.opentelemetry.opencensusshim", null); + + /** + * Converts an open-census metric into the OTLP format. + * + * @param otelResource The resource associated with the opentelemetry SDK. + * @param censusMetric The OpenCensus metric to convert. + */ + public static MetricData convert(Resource otelResource, Metric censusMetric) { + // Note: we can't just adapt interfaces, we need to do full copy because OTel data API uses + // auto-value vs. pure interfaces. + switch (censusMetric.getMetricDescriptor().getType()) { + case GAUGE_INT64: + return MetricData.createLongGauge( + otelResource, + INSTRUMENTATION_LIBRARY_INFO, + censusMetric.getMetricDescriptor().getName(), + censusMetric.getMetricDescriptor().getDescription(), + censusMetric.getMetricDescriptor().getUnit(), + convertLongGauge(censusMetric)); + case GAUGE_DOUBLE: + return MetricData.createDoubleGauge( + otelResource, + INSTRUMENTATION_LIBRARY_INFO, + censusMetric.getMetricDescriptor().getName(), + censusMetric.getMetricDescriptor().getDescription(), + censusMetric.getMetricDescriptor().getUnit(), + convertDoubleGauge(censusMetric)); + case CUMULATIVE_INT64: + return MetricData.createLongSum( + otelResource, + INSTRUMENTATION_LIBRARY_INFO, + censusMetric.getMetricDescriptor().getName(), + censusMetric.getMetricDescriptor().getDescription(), + censusMetric.getMetricDescriptor().getUnit(), + convertLongSum(censusMetric)); + case CUMULATIVE_DOUBLE: + return MetricData.createDoubleSum( + otelResource, + INSTRUMENTATION_LIBRARY_INFO, + censusMetric.getMetricDescriptor().getName(), + censusMetric.getMetricDescriptor().getDescription(), + censusMetric.getMetricDescriptor().getUnit(), + convertDoubleSum(censusMetric)); + case CUMULATIVE_DISTRIBUTION: + return MetricData.createDoubleHistogram( + otelResource, + INSTRUMENTATION_LIBRARY_INFO, + censusMetric.getMetricDescriptor().getName(), + censusMetric.getMetricDescriptor().getDescription(), + censusMetric.getMetricDescriptor().getUnit(), + convertHistogram(censusMetric)); + case SUMMARY: + return MetricData.createDoubleSummary( + otelResource, + INSTRUMENTATION_LIBRARY_INFO, + censusMetric.getMetricDescriptor().getName(), + censusMetric.getMetricDescriptor().getDescription(), + censusMetric.getMetricDescriptor().getUnit(), + convertSummary(censusMetric)); + case GAUGE_DISTRIBUTION: + return MetricData.createDoubleHistogram( + otelResource, + INSTRUMENTATION_LIBRARY_INFO, + censusMetric.getMetricDescriptor().getName(), + censusMetric.getMetricDescriptor().getDescription(), + censusMetric.getMetricDescriptor().getUnit(), + convertGaugeHistogram(censusMetric)); + } + // Should be unreachable.... + throw new IllegalArgumentException( + "Unknown OpenCensus metric type: " + censusMetric.getMetricDescriptor().getType()); + } + + static LongGaugeData convertLongGauge(Metric censusMetric) { + return LongGaugeData.create(convertLongPoints(censusMetric)); + } + + static DoubleGaugeData convertDoubleGauge(Metric censusMetric) { + return DoubleGaugeData.create(convertDoublePoints(censusMetric)); + } + + static LongSumData convertLongSum(Metric censusMetric) { + return LongSumData.create( + true, AggregationTemporality.CUMULATIVE, convertLongPoints(censusMetric)); + } + + static DoubleSumData convertDoubleSum(Metric censusMetric) { + return DoubleSumData.create( + true, AggregationTemporality.CUMULATIVE, convertDoublePoints(censusMetric)); + } + + static DoubleHistogramData convertHistogram(Metric censusMetric) { + return DoubleHistogramData.create( + AggregationTemporality.CUMULATIVE, convertHistogramPoints(censusMetric)); + } + + static DoubleHistogramData convertGaugeHistogram(Metric censusMetric) { + return DoubleHistogramData.create( + AggregationTemporality.DELTA, convertHistogramPoints(censusMetric)); + } + + static DoubleSummaryData convertSummary(Metric censusMetric) { + return DoubleSummaryData.create(convertSummaryPoints(censusMetric)); + } + + static Collection convertLongPoints(Metric censusMetric) { + // TODO - preallocate array to correct size. + List result = new ArrayList<>(); + for (TimeSeries ts : censusMetric.getTimeSeriesList()) { + long startTimestamp = mapTimestamp(ts.getStartTimestamp()); + Attributes attributes = + mapAttributes(censusMetric.getMetricDescriptor().getLabelKeys(), ts.getLabelValues()); + for (Point point : ts.getPoints()) { + result.add( + LongPointData.create( + startTimestamp, mapTimestamp(point.getTimestamp()), attributes, longValue(point))); + } + } + return result; + } + + static Collection convertDoublePoints(Metric censusMetric) { + // TODO - preallocate array to correct size. + List result = new ArrayList<>(); + for (TimeSeries ts : censusMetric.getTimeSeriesList()) { + long startTimestamp = mapTimestamp(ts.getStartTimestamp()); + Attributes attributes = + mapAttributes(censusMetric.getMetricDescriptor().getLabelKeys(), ts.getLabelValues()); + for (Point point : ts.getPoints()) { + result.add( + DoublePointData.create( + startTimestamp, + mapTimestamp(point.getTimestamp()), + attributes, + doubleValue(point))); + } + } + return result; + } + + static Collection convertHistogramPoints(Metric censusMetric) { + boolean isGauge = + censusMetric.getMetricDescriptor().getType() == MetricDescriptor.Type.GAUGE_DISTRIBUTION; + // TODO - preallocate array to correct size. + List result = new ArrayList<>(); + for (TimeSeries ts : censusMetric.getTimeSeriesList()) { + long startTimestamp = mapTimestamp(ts.getStartTimestamp()); + Attributes attributes = + mapAttributes(censusMetric.getMetricDescriptor().getLabelKeys(), ts.getLabelValues()); + for (Point point : ts.getPoints()) { + long endTimestamp = mapTimestamp(point.getTimestamp()); + DoubleHistogramPointData otelPoint = + point + .getValue() + .match( + doubleValue -> null, + longValue -> null, + distribution -> + DoubleHistogramPointData.create( + // Report Gauge histograms as DELTA with "instantaneous" time window. + isGauge ? endTimestamp : startTimestamp, + endTimestamp, + attributes, + distribution.getSum(), + mapBoundaries(distribution.getBucketOptions()), + mapCounts(distribution.getBuckets()), + mapExemplars(distribution.getBuckets())), + sumamry -> null, + defaultValue -> null); + if (otelPoint != null) { + result.add(otelPoint); + } + } + } + return result; + } + + static Collection convertSummaryPoints(Metric censusMetric) { + List result = new ArrayList<>(); + for (TimeSeries ts : censusMetric.getTimeSeriesList()) { + long startTimestamp = mapTimestamp(ts.getStartTimestamp()); + Attributes attributes = + mapAttributes(censusMetric.getMetricDescriptor().getLabelKeys(), ts.getLabelValues()); + for (Point point : ts.getPoints()) { + DoubleSummaryPointData otelPoint = + point + .getValue() + .match( + dv -> null, + lv -> null, + distribution -> null, + summary -> + DoubleSummaryPointData.create( + startTimestamp, + mapTimestamp(point.getTimestamp()), + attributes, + summary.getCount(), + summary.getSum(), + mapValueAtPercentiles(summary.getSnapshot().getValueAtPercentiles())), + defaultValue -> null); + if (otelPoint != null) { + result.add(otelPoint); + } + } + } + return result; + } + + static Attributes mapAttributes(List labels, List values) { + AttributesBuilder result = Attributes.builder(); + for (int i = 0; i < labels.size(); i++) { + result.put(labels.get(i).getKey(), values.get(i).getValue()); + } + return result.build(); + } + + static long longValue(Point point) { + return point + .getValue() + .match( + Double::longValue, + lv -> lv, + // Ignore these cases (logic error) + distribution -> 0, + summary -> 0, + defaultValue -> 0) + .longValue(); + } + + static double doubleValue(Point point) { + return point + .getValue() + .match( + d -> d, + Long::doubleValue, + // Ignore these cases (logic error) + distribution -> 0, + summary -> 0, + defaultValue -> 0) + .doubleValue(); + } + + static List mapBoundaries(Distribution.BucketOptions censusBuckets) { + return censusBuckets.match( + explicit -> explicit.getBucketBoundaries(), defaultOption -> Collections.emptyList()); + } + + static List mapCounts(List buckets) { + List result = new ArrayList<>(buckets.size()); + for (Distribution.Bucket bucket : buckets) { + result.add(bucket.getCount()); + } + return result; + } + + static List mapExemplars(List buckets) { + List result = new ArrayList<>(); + for (Distribution.Bucket bucket : buckets) { + Exemplar exemplar = bucket.getExemplar(); + if (exemplar != null) { + result.add(mapExemplar(exemplar)); + } + } + return result; + } + + private static ExemplarData mapExemplar(Exemplar exemplar) { + // Look for trace/span id. + String spanId = null; + String traceId = null; + if (exemplar.getAttachments().containsKey("SpanContext")) { + // We need to use `io.opencensus.contrib.exemplar.util.AttachmentValueSpanContext` + // The `toString` will be the following: + // SpanContext(traceId={traceId}, spanId={spanId}, traceOptions={traceOptions}) + // We *attempt* parse it rather than pull in yet another dependency. + String spanContextToString = exemplar.getAttachments().get("SpanContext").getValue(); + Matcher m = + Pattern.compile("SpanContext\\(traceId=([0-9A-Ga-g]+), spanId=([0-9A-Ga-g]+),.*\\)") + .matcher(spanContextToString); + if (m.matches()) { + MatchResult mr = m.toMatchResult(); + traceId = mr.group(1); + spanId = mr.group(2); + } + } + return DoubleExemplarData.create( + Attributes.empty(), + mapTimestamp(exemplar.getTimestamp()), + spanId, + traceId, + exemplar.getValue()); + } + + static long mapTimestamp(Timestamp time) { + return TimeUnit.SECONDS.toNanos(time.getSeconds()) + time.getNanos(); + } + + private static List mapValueAtPercentiles( + List valueAtPercentiles) { + List result = new ArrayList<>(valueAtPercentiles.size()); + for (Summary.Snapshot.ValueAtPercentile censusValue : valueAtPercentiles) { + result.add(ValueAtPercentile.create(censusValue.getPercentile(), censusValue.getValue())); + } + return result; + } +} diff --git a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/MetricAdapterTest.java b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/MetricAdapterTest.java new file mode 100644 index 00000000000..2918a22f815 --- /dev/null +++ b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/MetricAdapterTest.java @@ -0,0 +1,368 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.opencensusshim.metrics; + +import static io.opentelemetry.sdk.testing.assertj.metrics.MetricAssertions.assertThat; +import static org.assertj.core.api.Assertions.assertThat; + +import io.opencensus.common.Timestamp; +import io.opencensus.metrics.LabelKey; +import io.opencensus.metrics.LabelValue; +import io.opencensus.metrics.data.AttachmentValue; +import io.opencensus.metrics.data.Exemplar; +import io.opencensus.metrics.export.Distribution; +import io.opencensus.metrics.export.Metric; +import io.opencensus.metrics.export.MetricDescriptor; +import io.opencensus.metrics.export.Point; +import io.opencensus.metrics.export.Summary; +import io.opencensus.metrics.export.TimeSeries; +import io.opencensus.metrics.export.Value; +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; +import io.opentelemetry.sdk.metrics.data.ValueAtPercentile; +import io.opentelemetry.sdk.resources.Resource; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import org.junit.jupiter.api.Test; + +class MetricAdapterTest { + + private static final Resource RESOURCE = + Resource.create(Attributes.of(AttributeKey.stringKey("test"), "resource")); + + @Test + void convertsTimeStamps() { + assertThat(MetricAdapter.mapTimestamp(Timestamp.create(1, 2))).isEqualTo(1000000002L); + } + + @Test + void convertsLongValue() { + assertThat(MetricAdapter.longValue(Point.create(Value.longValue(5), Timestamp.fromMillis(2)))) + .isEqualTo(5); + } + + @Test + void convertsDoubleValue() { + assertThat( + MetricAdapter.doubleValue(Point.create(Value.doubleValue(5), Timestamp.fromMillis(2)))) + .isEqualTo(5); + } + + @Test + void convertsLongGauge() { + Metric censusMetric = + Metric.createWithOneTimeSeries( + MetricDescriptor.create( + "name", + "description", + "unit", + MetricDescriptor.Type.GAUGE_INT64, + Arrays.asList(LabelKey.create("key1", "desc1"))), + TimeSeries.create( + Arrays.asList(LabelValue.create("value1")), + Arrays.asList(Point.create(Value.longValue(4), Timestamp.fromMillis(2000))), + Timestamp.fromMillis(1000))); + + assertThat(MetricAdapter.convert(RESOURCE, censusMetric)) + .hasResource(RESOURCE) + .hasInstrumentationLibrary(MetricAdapter.INSTRUMENTATION_LIBRARY_INFO) + .hasName("name") + .hasDescription("description") + .hasUnit("unit") + .hasLongGauge() + .points() + .satisfiesExactly( + point -> + assertThat(point) + .hasStartEpochNanos(1000000000) + .hasEpochNanos(2000000000) + .hasAttributes(Attributes.of(AttributeKey.stringKey("key1"), "value1")) + .hasValue(4)); + } + + @Test + void convertsDoubleGauge() { + Metric censusMetric = + Metric.createWithOneTimeSeries( + MetricDescriptor.create( + "name", + "description", + "unit", + MetricDescriptor.Type.GAUGE_DOUBLE, + Arrays.asList(LabelKey.create("key1", "desc1"))), + TimeSeries.create( + Arrays.asList(LabelValue.create("value1")), + Arrays.asList(Point.create(Value.doubleValue(4), Timestamp.fromMillis(2000))), + Timestamp.fromMillis(1000))); + + assertThat(MetricAdapter.convert(RESOURCE, censusMetric)) + .hasResource(RESOURCE) + .hasInstrumentationLibrary(MetricAdapter.INSTRUMENTATION_LIBRARY_INFO) + .hasName("name") + .hasDescription("description") + .hasUnit("unit") + .hasDoubleGauge() + .points() + .satisfiesExactly( + point -> + assertThat(point) + .hasStartEpochNanos(1000000000) + .hasEpochNanos(2000000000) + .hasAttributes(Attributes.of(AttributeKey.stringKey("key1"), "value1")) + .hasValue(4)); + } + + @Test + void convertsLongSum() { + Metric censusMetric = + Metric.createWithOneTimeSeries( + MetricDescriptor.create( + "name", + "description", + "unit", + MetricDescriptor.Type.CUMULATIVE_INT64, + Arrays.asList(LabelKey.create("key1", "desc1"))), + TimeSeries.create( + Arrays.asList(LabelValue.create("value1")), + Arrays.asList(Point.create(Value.longValue(4), Timestamp.fromMillis(2000))), + Timestamp.fromMillis(1000))); + + assertThat(MetricAdapter.convert(RESOURCE, censusMetric)) + .hasResource(RESOURCE) + .hasInstrumentationLibrary(MetricAdapter.INSTRUMENTATION_LIBRARY_INFO) + .hasName("name") + .hasDescription("description") + .hasUnit("unit") + .hasLongSum() + .isCumulative() + .isMonotonic() + .points() + .satisfiesExactly( + point -> + assertThat(point) + .hasStartEpochNanos(1000000000) + .hasEpochNanos(2000000000) + .hasAttributes(Attributes.of(AttributeKey.stringKey("key1"), "value1")) + .hasValue(4)); + } + + @Test + void convertsDoubleSum() { + Metric censusMetric = + Metric.createWithOneTimeSeries( + MetricDescriptor.create( + "name", + "description", + "unit", + MetricDescriptor.Type.CUMULATIVE_DOUBLE, + Arrays.asList(LabelKey.create("key1", "desc1"))), + TimeSeries.create( + Arrays.asList(LabelValue.create("value1")), + Arrays.asList(Point.create(Value.doubleValue(4), Timestamp.fromMillis(2000))), + Timestamp.fromMillis(1000))); + + assertThat(MetricAdapter.convert(RESOURCE, censusMetric)) + .hasResource(RESOURCE) + .hasInstrumentationLibrary(MetricAdapter.INSTRUMENTATION_LIBRARY_INFO) + .hasName("name") + .hasDescription("description") + .hasUnit("unit") + .hasDoubleSum() + .isCumulative() + .isMonotonic() + .points() + .satisfiesExactly( + point -> + assertThat(point) + .hasStartEpochNanos(1000000000) + .hasEpochNanos(2000000000) + .hasAttributes(Attributes.of(AttributeKey.stringKey("key1"), "value1")) + .hasValue(4)); + } + + @Test + void convertHistogram() { + Map exemplarAttachements = new HashMap<>(); + // TODO - Import opencensus util for a code-dependent test on common exemplar-trace usage. + exemplarAttachements.put( + "SpanContext", + AttachmentValue.AttachmentValueString.create( + "SpanContext(traceId=1234, spanId=5678, others=stuff)")); + Metric censusMetric = + Metric.createWithOneTimeSeries( + MetricDescriptor.create( + "name", + "description", + "unit", + MetricDescriptor.Type.CUMULATIVE_DISTRIBUTION, + Arrays.asList(LabelKey.create("key1", "desc1"))), + TimeSeries.create( + Arrays.asList(LabelValue.create("value1")), + Arrays.asList( + Point.create( + Value.distributionValue( + Distribution.create( + 10, + 5, + 2, // Sum of squared deviations, ignored + Distribution.BucketOptions.explicitOptions(Arrays.asList(2.0, 5.0)), + Arrays.asList( + Distribution.Bucket.create( + 2, + Exemplar.create( + 1.0, Timestamp.fromMillis(2), Collections.emptyMap())), + Distribution.Bucket.create( + 6, + Exemplar.create( + 4.0, Timestamp.fromMillis(1), exemplarAttachements)), + Distribution.Bucket.create(2)))), + Timestamp.fromMillis(2000))), + Timestamp.fromMillis(1000))); + + assertThat(MetricAdapter.convert(RESOURCE, censusMetric)) + .hasResource(RESOURCE) + .hasInstrumentationLibrary(MetricAdapter.INSTRUMENTATION_LIBRARY_INFO) + .hasName("name") + .hasDescription("description") + .hasUnit("unit") + .hasDoubleHistogram() + .isCumulative() + .points() + .satisfiesExactly( + point -> + assertThat(point) + .hasStartEpochNanos(1000000000) + .hasEpochNanos(2000000000) + .hasSum(5) + .hasCount(10) + .hasBucketBoundaries(2.0, 5.0) + .hasBucketCounts(2, 6, 2) + .hasExemplars( + DoubleExemplarData.create( + Attributes.empty(), + 2000000, + /* spanId= */ null, + /* traceId= */ null, + 1.0), + DoubleExemplarData.create( + Attributes.empty(), 1000000, "5678", "1234", 4.0))); + } + + @Test + void convertSummary() { + Metric censusMetric = + Metric.createWithOneTimeSeries( + MetricDescriptor.create( + "name", + "description", + "unit", + MetricDescriptor.Type.SUMMARY, + Arrays.asList(LabelKey.create("key1", "desc1"))), + TimeSeries.create( + Arrays.asList(LabelValue.create("value1")), + Arrays.asList( + Point.create( + Value.summaryValue( + Summary.create( + 10L, + 5d, + Summary.Snapshot.create( + 10L, + 5d, + Arrays.asList( + Summary.Snapshot.ValueAtPercentile.create(1.0, 200))))), + Timestamp.fromMillis(2000))), + Timestamp.fromMillis(1000))); + + assertThat(MetricAdapter.convert(RESOURCE, censusMetric)) + .hasResource(RESOURCE) + .hasInstrumentationLibrary(MetricAdapter.INSTRUMENTATION_LIBRARY_INFO) + .hasName("name") + .hasDescription("description") + .hasUnit("unit") + .hasDoubleSummary() + .points() + .satisfiesExactly( + point -> + assertThat(point) + .hasStartEpochNanos(1000000000) + .hasEpochNanos(2000000000) + .hasAttributes(Attributes.of(AttributeKey.stringKey("key1"), "value1")) + .hasCount(10) + .hasSum(5) + .hasPercentileValues(ValueAtPercentile.create(1.0, 200))); + } + + @Test + void convertGaugeHistogram() { + Map exemplarAttachements = new HashMap<>(); + // TODO - Import opencensus util for a code-dependent test on common exemplar-trace usage. + exemplarAttachements.put( + "SpanContext", + AttachmentValue.AttachmentValueString.create( + "SpanContext(traceId=1234, spanId=5678, others=stuff)")); + Metric censusMetric = + Metric.createWithOneTimeSeries( + MetricDescriptor.create( + "name", + "description", + "unit", + MetricDescriptor.Type.GAUGE_DISTRIBUTION, + Arrays.asList(LabelKey.create("key1", "desc1"))), + TimeSeries.create( + Arrays.asList(LabelValue.create("value1")), + Arrays.asList( + Point.create( + Value.distributionValue( + Distribution.create( + 10, + 5, + 2, // Sum of squared deviations, ignored + Distribution.BucketOptions.explicitOptions(Arrays.asList(2.0, 5.0)), + Arrays.asList( + Distribution.Bucket.create( + 2, + Exemplar.create( + 1.0, Timestamp.fromMillis(2), Collections.emptyMap())), + Distribution.Bucket.create( + 6, + Exemplar.create( + 4.0, Timestamp.fromMillis(1), exemplarAttachements)), + Distribution.Bucket.create(2)))), + Timestamp.fromMillis(2000))), + Timestamp.fromMillis(1000))); + assertThat(MetricAdapter.convert(RESOURCE, censusMetric)) + .hasResource(RESOURCE) + .hasInstrumentationLibrary(MetricAdapter.INSTRUMENTATION_LIBRARY_INFO) + .hasName("name") + .hasDescription("description") + .hasUnit("unit") + .hasDoubleHistogram() + .isDelta() + .points() + .satisfiesExactly( + point -> + assertThat(point) + .hasStartEpochNanos(2000000000) + .hasEpochNanos(2000000000) + .hasSum(5) + .hasCount(10) + .hasBucketBoundaries(2.0, 5.0) + .hasBucketCounts(2, 6, 2) + .hasExemplars( + DoubleExemplarData.create( + Attributes.empty(), + 2000000, + /* spanId= */ null, + /* traceId= */ null, + 1.0), + DoubleExemplarData.create( + Attributes.empty(), 1000000, "5678", "1234", 4.0))); + } +} From 858b7347c0e6a9b563cb367e2f6a6298782f53b4 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Sat, 6 Nov 2021 14:05:44 -0400 Subject: [PATCH 02/12] Create new mechanism to attach OpenCensus metrics to OpenTelemetry that's more amenable to autoconfiguring. --- opencensus-shim/README.md | 27 ++++---- .../OpenTelemetryMetricsExporter.java | 1 + .../metrics/MultiMetricProducer.java | 30 ++++++++ ...penCensusAttachingMetricReaderFactory.java | 29 ++++++++ .../metrics/OpenCensusMetricProducer.java | 54 +++++++++++++++ .../metrics/OpenCensusMetrics.java | 23 +++++++ .../metrics/OpenCensusMetricProducerTest.java | 68 +++++++++++++++++++ .../metrics/OpenCensusMetricsTest.java | 52 ++++++++++++++ 8 files changed, 271 insertions(+), 13 deletions(-) create mode 100644 opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/MultiMetricProducer.java create mode 100644 opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/OpenCensusAttachingMetricReaderFactory.java create mode 100644 opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricProducer.java create mode 100644 opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetrics.java create mode 100644 opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricProducerTest.java create mode 100644 opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricsTest.java diff --git a/opencensus-shim/README.md b/opencensus-shim/README.md index 511de247633..02639b9a55e 100644 --- a/opencensus-shim/README.md +++ b/opencensus-shim/README.md @@ -22,11 +22,14 @@ Applications only need to set up OpenTelemetry exporters, not OpenCensus. To allow the shim to work for metrics, add the shim as a dependency. -Applications also need to pass the configured metric exporter to the shim: +Applications also need to attach OpenCensus metrics to their metric readers on registration. ``` -OpenTelemetryMetricsExporter exporter = - OpenTelemetryMetricsExporter.createAndRegister(metricExporter); +SdkMeterProvider.builder() + .registerMetricReader( + OpenCensusMetrics.attachTo(readerFactory) + ) + .buildAndRegisterGlobal( ``` For example, if a logging exporter were configured, the following would be @@ -34,16 +37,14 @@ added: ``` LoggingMetricExporter metricExporter = new LoggingMetricExporter(); -OpenTelemetryMetricsExporter exporter = - OpenTelemetryMetricsExporter.createAndRegister(metricExporter); -``` - -The export interval can also be set: - -``` -OpenTelemetryMetricsExporter exporter = - OpenTelemetryMetricsExporter.createAndRegister(metricExporter, - Duration.create(0, 500)); +SdkMeterProvider.builder() + .registerMetricReader( + OpenCensusMetrics.attachTo( + PeriodicMetricReader.builder(metricExporter) + .build() + ) + ) + .buildAndRegisterGlobal( ``` ## Known Problems diff --git a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryMetricsExporter.java b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryMetricsExporter.java index 5e9aba571fa..555aab90845 100644 --- a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryMetricsExporter.java +++ b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryMetricsExporter.java @@ -23,6 +23,7 @@ import java.util.Set; import java.util.logging.Logger; +@Deprecated public final class OpenTelemetryMetricsExporter extends MetricExporter { private static final Logger LOGGER = Logger.getLogger(OpenTelemetryMetricsExporter.class.getName()); diff --git a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/MultiMetricProducer.java b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/MultiMetricProducer.java new file mode 100644 index 00000000000..b525383e3f1 --- /dev/null +++ b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/MultiMetricProducer.java @@ -0,0 +1,30 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.opencensusshim.metrics; + +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.metrics.export.MetricProducer; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +/** Class that wraps multiple metric producers into one. */ +final class MultiMetricProducer implements MetricProducer { + private final Collection producers; + + public MultiMetricProducer(Collection producers) { + this.producers = producers; + } + + @Override + public Collection collectAllMetrics() { + List result = new ArrayList<>(); + for (MetricProducer p : producers) { + result.addAll(p.collectAllMetrics()); + } + return result; + } +} diff --git a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/OpenCensusAttachingMetricReaderFactory.java b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/OpenCensusAttachingMetricReaderFactory.java new file mode 100644 index 00000000000..69d965ff94e --- /dev/null +++ b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/OpenCensusAttachingMetricReaderFactory.java @@ -0,0 +1,29 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.opencensusshim.metrics; + +import io.opentelemetry.sdk.metrics.export.MetricProducer; +import io.opentelemetry.sdk.metrics.export.MetricReader; +import io.opentelemetry.sdk.metrics.export.MetricReaderFactory; +import io.opentelemetry.sdk.resources.Resource; +import java.util.Arrays; + +/** MetricReaderFactory that appends OpenCensus metrics to anything read. */ +final class OpenCensusAttachingMetricReaderFactory implements MetricReaderFactory { + private final MetricReaderFactory adapted; + + OpenCensusAttachingMetricReaderFactory(MetricReaderFactory adapted) { + this.adapted = adapted; + } + + @Override + public MetricReader apply(MetricProducer producer) { + // TODO: Find a way to pull the resource off of the SDK. + return adapted.apply( + new MultiMetricProducer( + Arrays.asList(producer, OpenCensusMetricProducer.create(Resource.getDefault())))); + } +} diff --git a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricProducer.java b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricProducer.java new file mode 100644 index 00000000000..b0954d551c4 --- /dev/null +++ b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricProducer.java @@ -0,0 +1,54 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.opencensusshim.metrics; + +import io.opencensus.metrics.Metrics; +import io.opencensus.metrics.export.MetricProducerManager; +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.metrics.export.MetricProducer; +import io.opentelemetry.sdk.resources.Resource; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +/** + * A producer instance of OpenCensus metrics. + * + *

The idea here is we can register this reader with the OpenTelemetry SDK, allowing us to also + * pull metrics from OpenCensus backends on demand. + */ +final class OpenCensusMetricProducer implements MetricProducer { + private final Resource resource; + private final MetricProducerManager openCensusMetricStorage; + + OpenCensusMetricProducer(Resource resource, MetricProducerManager openCensusMetricStorage) { + this.resource = resource; + this.openCensusMetricStorage = openCensusMetricStorage; + } + + /** + * Constructs a new {@link OpenCensusMetricProducer} that reports against the given {@link + * Resource}. + */ + static MetricProducer create(Resource resource) { + return new OpenCensusMetricProducer( + resource, Metrics.getExportComponent().getMetricProducerManager()); + } + + @Override + public Collection collectAllMetrics() { + List result = new ArrayList<>(); + openCensusMetricStorage + .getAllMetricProducer() + .forEach( + producer -> { + producer + .getMetrics() + .forEach(metric -> result.add(MetricAdapter.convert(resource, metric))); + }); + return result; + } +} diff --git a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetrics.java b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetrics.java new file mode 100644 index 00000000000..5a7f5894d41 --- /dev/null +++ b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetrics.java @@ -0,0 +1,23 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.opencensusshim.metrics; + +import io.opentelemetry.sdk.metrics.export.MetricReaderFactory; + +/** Convenience methods for adapting OpenCensus metrics into OpenTelemetry. */ +public final class OpenCensusMetrics { + private OpenCensusMetrics() {} + + /** + * Attaches OpenCensus metrics to metrics read by the given input. + * + * @param input A {@link MetricReaderFactory} that will receive OpenCensus metrics. + * @return The adapted MetricReaderFactory. + */ + public static MetricReaderFactory attachTo(MetricReaderFactory input) { + return new OpenCensusAttachingMetricReaderFactory(input); + } +} diff --git a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricProducerTest.java b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricProducerTest.java new file mode 100644 index 00000000000..fb1342c7343 --- /dev/null +++ b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricProducerTest.java @@ -0,0 +1,68 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.opencensusshim.metrics; + +import static io.opentelemetry.sdk.testing.assertj.metrics.MetricAssertions.assertThat; +import static org.assertj.core.api.Assertions.assertThat; + +import io.opencensus.stats.Aggregation; +import io.opencensus.stats.BucketBoundaries; +import io.opencensus.stats.Measure; +import io.opencensus.stats.Stats; +import io.opencensus.stats.StatsRecorder; +import io.opencensus.stats.View; +import io.opentelemetry.sdk.metrics.export.MetricProducer; +import io.opentelemetry.sdk.resources.Resource; +import java.util.Arrays; +import java.util.Collections; +import org.junit.jupiter.api.Test; + +class OpenCensusMetricProducerTest { + private final MetricProducer openCensusMetrics = + OpenCensusMetricProducer.create(Resource.empty()); + + private static final Measure.MeasureLong LATENCY_MS = + Measure.MeasureLong.create("task_latency", "The task latency in milliseconds", "ms"); + // Latency in buckets: + // [>=0ms, >=100ms, >=200ms, >=400ms, >=1s, >=2s, >=4s] + private static final BucketBoundaries LATENCY_BOUNDARIES = + BucketBoundaries.create(Arrays.asList(0d, 100d, 200d, 400d, 1000d, 2000d, 4000d)); + private static final StatsRecorder STATS_RECORDER = Stats.getStatsRecorder(); + + @Test + void extractHistogram() throws InterruptedException { + View view = + View.create( + View.Name.create("task_latency_distribution"), + "The distribution of the task latencies.", + LATENCY_MS, + Aggregation.Distribution.create(LATENCY_BOUNDARIES), + Collections.emptyList()); + Stats.getViewManager().registerView(view); + STATS_RECORDER.newMeasureMap().put(LATENCY_MS, 50).record(); + // Wait for measurement to hit the aggregator. + Thread.sleep(1000); + + assertThat(openCensusMetrics.collectAllMetrics()) + .satisfiesExactly( + metric -> + assertThat(metric) + .hasName("task_latency_distribution") + .hasDescription("The distribution of the task latencies.") + .hasUnit("ms") + .hasDoubleHistogram() + .isCumulative() + .points() + .satisfiesExactly( + point -> + assertThat(point) + .hasSum(50) + .hasCount(1) + .hasBucketCounts(1, 0, 0, 0, 0, 0, 0) + .hasBucketBoundaries(100d, 200d, 400d, 1000d, 2000d, 4000d) + .hasExemplars())); + } +} diff --git a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricsTest.java b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricsTest.java new file mode 100644 index 00000000000..42e9d444762 --- /dev/null +++ b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricsTest.java @@ -0,0 +1,52 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.opencensusshim.metrics; + +import static io.opentelemetry.sdk.testing.assertj.metrics.MetricAssertions.assertThat; + +import io.opencensus.stats.Aggregation; +import io.opencensus.stats.Measure; +import io.opencensus.stats.Stats; +import io.opencensus.stats.StatsRecorder; +import io.opencensus.stats.View; +import io.opentelemetry.sdk.metrics.SdkMeterProvider; +import io.opentelemetry.sdk.metrics.testing.InMemoryMetricReader; +import java.util.Collections; +import org.junit.jupiter.api.Test; + +class OpenCensusMetricsTest { + private static final StatsRecorder STATS_RECORDER = Stats.getStatsRecorder(); + + @Test + void capturesOpenCensusAndOtelMetrics() throws InterruptedException { + InMemoryMetricReader reader = InMemoryMetricReader.create(); + SdkMeterProvider otelMetrics = + SdkMeterProvider.builder() + .registerMetricReader(OpenCensusMetrics.attachTo(reader)) + .buildAndRegisterGlobal(); + // Record an otel metric. + otelMetrics.meterBuilder("otel").build().counterBuilder("otel.sum").build().add(1); + // Record an OpenCensus metric. + Measure.MeasureLong MEASURE = Measure.MeasureLong.create("oc.measure", "oc.desc", "oc.unit"); + Stats.getViewManager() + .registerView( + View.create( + View.Name.create("oc.sum"), + "oc.desc", + MEASURE, + Aggregation.Count.create(), + Collections.emptyList())); + STATS_RECORDER.newMeasureMap().put(MEASURE, 1).record(); + + // Wait for OpenCensus propagation. + Thread.sleep(1000); + + assertThat(reader.collectAllMetrics()) + .satisfiesExactly( + metric -> assertThat(metric).hasName("otel.sum").hasLongSum(), + metric -> assertThat(metric).hasName("oc.sum").hasLongSum()); + } +} From 0fa695716d699bfb455410e92e3d96294f393a90 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Sat, 6 Nov 2021 14:27:19 -0400 Subject: [PATCH 03/12] Fix interoperability test failures. --- opencensus-shim/README.md | 6 +- .../opencensusshim/metrics/MetricAdapter.java | 7 ++- .../opencensusshim/InteroperabilityTest.java | 62 +++++-------------- 3 files changed, 23 insertions(+), 52 deletions(-) diff --git a/opencensus-shim/README.md b/opencensus-shim/README.md index 02639b9a55e..9e54ba4adac 100644 --- a/opencensus-shim/README.md +++ b/opencensus-shim/README.md @@ -29,7 +29,7 @@ SdkMeterProvider.builder() .registerMetricReader( OpenCensusMetrics.attachTo(readerFactory) ) - .buildAndRegisterGlobal( + .buildAndRegisterGlobal(); ``` For example, if a logging exporter were configured, the following would be @@ -41,10 +41,10 @@ SdkMeterProvider.builder() .registerMetricReader( OpenCensusMetrics.attachTo( PeriodicMetricReader.builder(metricExporter) - .build() + .newMetricReaderFactory() ) ) - .buildAndRegisterGlobal( + .buildAndRegisterGlobal(); ``` ## Known Problems diff --git a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/MetricAdapter.java b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/MetricAdapter.java index c8f8bf11291..e84ebaf6f7a 100644 --- a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/MetricAdapter.java +++ b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/MetricAdapter.java @@ -42,6 +42,7 @@ import java.util.regex.MatchResult; import java.util.regex.Matcher; import java.util.regex.Pattern; +import javax.annotation.Nullable; /** Adapts an OpenCensus metric into the OpenTelemetry metric data API. */ public final class MetricAdapter { @@ -343,7 +344,11 @@ private static ExemplarData mapExemplar(Exemplar exemplar) { exemplar.getValue()); } - static long mapTimestamp(Timestamp time) { + static long mapTimestamp(@Nullable Timestamp time) { + // Treat all empty timestamps as "0" (proto3) + if (time == null) { + return 0; + } return TimeUnit.SECONDS.toNanos(time.getSeconds()) + time.getNanos(); } diff --git a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/InteroperabilityTest.java b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/InteroperabilityTest.java index 05aab36720e..5517f6150df 100644 --- a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/InteroperabilityTest.java +++ b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/InteroperabilityTest.java @@ -18,9 +18,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import io.opencensus.common.Duration; import io.opencensus.stats.Aggregation; -import io.opencensus.stats.BucketBoundaries; import io.opencensus.stats.Measure; import io.opencensus.stats.Stats; import io.opencensus.stats.StatsRecorder; @@ -50,9 +48,12 @@ import io.opentelemetry.api.trace.Tracer; import io.opentelemetry.context.Context; import io.opentelemetry.context.Scope; +import io.opentelemetry.opencensusshim.metrics.OpenCensusMetrics; import io.opentelemetry.sdk.OpenTelemetrySdk; import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.metrics.SdkMeterProvider; import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.metrics.export.PeriodicMetricReader; import io.opentelemetry.sdk.trace.SdkTracerProvider; import io.opentelemetry.sdk.trace.SpanProcessor; import io.opentelemetry.sdk.trace.data.SpanData; @@ -357,7 +358,7 @@ public void testNoRecordDoesNotExport() { @Test @SuppressWarnings({"deprecation", "unchecked"}) // Summary is deprecated in census - void testSupportedMetricsExportedCorrectly() { + void testSupportedMetricsExportedCorrectly() throws InterruptedException { Tagger tagger = Tags.getTagger(); Measure.MeasureLong latency = Measure.MeasureLong.create("task_latency", "The task latency in milliseconds", "ms"); @@ -399,9 +400,15 @@ void testSupportedMetricsExportedCorrectly() { viewManager.registerView(longGaugeView); viewManager.registerView(doubleSumView); viewManager.registerView(doubleGaugeView); + // Create Otel SDK that also reads from OpenCensus. FakeMetricExporter metricExporter = new FakeMetricExporter(); - OpenTelemetryMetricsExporter.createAndRegister(metricExporter, Duration.create(0, 5000)); - + SdkMeterProvider.builder() + .registerMetricReader( + OpenCensusMetrics.attachTo( + PeriodicMetricReader.builder(metricExporter) + .setInterval(java.time.Duration.ofNanos(5000)) + .newMetricReaderFactory())) + .buildAndRegisterGlobal(); TagContext tagContext = tagger .emptyBuilder() @@ -411,6 +418,8 @@ void testSupportedMetricsExportedCorrectly() { statsRecorder.newMeasureMap().put(latency, 50).record(); statsRecorder.newMeasureMap().put(latency2, 60).record(); } + // Slow down for OpenCensus to catch up. + Thread.sleep(500); List> exported = metricExporter.waitForNumberOfExports(3); List metricData = exported.get(2).stream() @@ -464,49 +473,6 @@ void testSupportedMetricsExportedCorrectly() { .containsEntry(tagKey.getName(), tagValue.asString())); } - @Test - void testUnsupportedMetricsDoesNotGetExported() throws InterruptedException { - Tagger tagger = Tags.getTagger(); - Measure.MeasureLong latency = - Measure.MeasureLong.create( - "task_latency_distribution", "The task latency in milliseconds", "ms"); - StatsRecorder statsRecorder = Stats.getStatsRecorder(); - TagKey tagKey = TagKey.create("tagKey"); - TagValue tagValue = TagValue.create("tagValue"); - View view = - View.create( - View.Name.create("task_latency_distribution"), - "The distribution of the task latencies.", - latency, - Aggregation.Distribution.create( - BucketBoundaries.create(ImmutableList.of(100.0, 150.0, 200.0))), - ImmutableList.of(tagKey)); - ViewManager viewManager = Stats.getViewManager(); - viewManager.registerView(view); - FakeMetricExporter metricExporter = new FakeMetricExporter(); - OpenTelemetryMetricsExporter.createAndRegister(metricExporter, Duration.create(0, 500)); - - TagContext tagContext = - tagger - .emptyBuilder() - .put(tagKey, tagValue, TagMetadata.create(TagMetadata.TagTtl.UNLIMITED_PROPAGATION)) - .build(); - try (io.opencensus.common.Scope ss = tagger.withTagContext(tagContext)) { - statsRecorder.newMeasureMap().put(latency, 50).record(); - } - // Sleep so that there is time for export() to be called. - Thread.sleep(2); - // This is 0 in case this test gets run first, or by itself. - // If other views have already been registered in other tests, they will produce metric data, so - // we are testing for the absence of this particular view's metric data. - List> allExports = metricExporter.waitForNumberOfExports(0); - if (!allExports.isEmpty()) { - for (MetricData metricData : allExports.get(allExports.size() - 1)) { - assertThat(metricData.getName()).isNotEqualTo("task_latency_distribution"); - } - } - } - private static void createOpenCensusScopedSpanWithChildSpan( boolean withInnerOpenTelemetrySpan, boolean withInnerOpenCensusSpan) { io.opencensus.trace.Tracer tracer = Tracing.getTracer(); From ec6233972a3a25aebb58991b52ac20e38e655217 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Sat, 6 Nov 2021 14:33:07 -0400 Subject: [PATCH 04/12] Fix style issue. --- .../opencensusshim/metrics/OpenCensusMetricsTest.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricsTest.java b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricsTest.java index 42e9d444762..fd57e38d81a 100644 --- a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricsTest.java +++ b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricsTest.java @@ -30,16 +30,16 @@ void capturesOpenCensusAndOtelMetrics() throws InterruptedException { // Record an otel metric. otelMetrics.meterBuilder("otel").build().counterBuilder("otel.sum").build().add(1); // Record an OpenCensus metric. - Measure.MeasureLong MEASURE = Measure.MeasureLong.create("oc.measure", "oc.desc", "oc.unit"); + Measure.MeasureLong measure = Measure.MeasureLong.create("oc.measure", "oc.desc", "oc.unit"); Stats.getViewManager() .registerView( View.create( View.Name.create("oc.sum"), "oc.desc", - MEASURE, + measure, Aggregation.Count.create(), Collections.emptyList())); - STATS_RECORDER.newMeasureMap().put(MEASURE, 1).record(); + STATS_RECORDER.newMeasureMap().put(measure, 1).record(); // Wait for OpenCensus propagation. Thread.sleep(1000); From dbf681b251f3b0f35a45dbe9b91997afc152a2e4 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Sat, 6 Nov 2021 14:58:52 -0400 Subject: [PATCH 05/12] Force tests to be in different JVM instnaces to account for OpenCEnsus global pollution. --- opencensus-shim/build.gradle.kts | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/opencensus-shim/build.gradle.kts b/opencensus-shim/build.gradle.kts index 4522825899b..fc164c485e0 100644 --- a/opencensus-shim/build.gradle.kts +++ b/opencensus-shim/build.gradle.kts @@ -22,3 +22,10 @@ dependencies { testImplementation("org.slf4j:slf4j-simple") testImplementation("io.opencensus:opencensus-impl") } + +tasks.named("test") { + // We must force a fork per-test class because OpenCensus pollutes globals with no restorative + // methods available. + setForkEvery(1) + maxParallelForks = 3 +} From 911b4dcf065142dd2cc9c92e851d4960af9cf113 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Sat, 6 Nov 2021 15:30:30 -0400 Subject: [PATCH 06/12] Bump timeout for disruptor queue in tests, for now. --- .../io/opentelemetry/opencensusshim/InteroperabilityTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/InteroperabilityTest.java b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/InteroperabilityTest.java index 5517f6150df..bc831c4082c 100644 --- a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/InteroperabilityTest.java +++ b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/InteroperabilityTest.java @@ -419,7 +419,7 @@ void testSupportedMetricsExportedCorrectly() throws InterruptedException { statsRecorder.newMeasureMap().put(latency2, 60).record(); } // Slow down for OpenCensus to catch up. - Thread.sleep(500); + Thread.sleep(1000); List> exported = metricExporter.waitForNumberOfExports(3); List metricData = exported.get(2).stream() From 4f93ecb74564a9e90cb59ca4aef47d0ef87a15c4 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Sun, 7 Nov 2021 10:16:51 -0500 Subject: [PATCH 07/12] Fix up old interoperability tests and move them to their own file. --- .../opencensusshim/FakeMetricExporter.java | 82 ------- .../opencensusshim/InteroperabilityTest.java | 137 ----------- .../OpenTelemetryMetricExporterTest.java | 222 ++++++++++++++++++ 3 files changed, 222 insertions(+), 219 deletions(-) delete mode 100644 opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/FakeMetricExporter.java create mode 100644 opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/OpenTelemetryMetricExporterTest.java diff --git a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/FakeMetricExporter.java b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/FakeMetricExporter.java deleted file mode 100644 index e6ea235699c..00000000000 --- a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/FakeMetricExporter.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -// Includes work from: -/* - * Copyright 2018, OpenCensus Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.opentelemetry.opencensusshim; - -import com.google.errorprone.annotations.concurrent.GuardedBy; -import io.opentelemetry.sdk.common.CompletableResultCode; -import io.opentelemetry.sdk.metrics.data.MetricData; -import io.opentelemetry.sdk.metrics.export.MetricExporter; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import javax.annotation.Nullable; - -class FakeMetricExporter implements MetricExporter { - - private final Object monitor = new Object(); - - @GuardedBy("monitor") - private List> exportedMetrics = new ArrayList<>(); - - /** - * Waits until export is called for numberOfExports times. Returns the list of exported lists of - * metrics - */ - @Nullable - List> waitForNumberOfExports(int numberOfExports) { - List> ret; - synchronized (monitor) { - while (exportedMetrics.size() < numberOfExports) { - try { - monitor.wait(); - } catch (InterruptedException e) { - // Preserve the interruption status as per guidance. - Thread.currentThread().interrupt(); - return null; - } - } - ret = exportedMetrics; - exportedMetrics = new ArrayList<>(); - } - return ret; - } - - @Override - public CompletableResultCode export(Collection metrics) { - synchronized (monitor) { - this.exportedMetrics.add(new ArrayList<>(metrics)); - monitor.notifyAll(); - } - return CompletableResultCode.ofSuccess(); - } - - @Override - public CompletableResultCode flush() { - return CompletableResultCode.ofSuccess(); - } - - @Override - public CompletableResultCode shutdown() { - return CompletableResultCode.ofSuccess(); - } -} diff --git a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/InteroperabilityTest.java b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/InteroperabilityTest.java index bc831c4082c..174ff2814d6 100644 --- a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/InteroperabilityTest.java +++ b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/InteroperabilityTest.java @@ -5,7 +5,6 @@ package io.opentelemetry.opencensusshim; -import static io.opentelemetry.sdk.testing.assertj.metrics.MetricAssertions.assertThat; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.ArgumentMatchers.anyCollection; import static org.mockito.ArgumentMatchers.anyList; @@ -18,18 +17,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import io.opencensus.stats.Aggregation; -import io.opencensus.stats.Measure; -import io.opencensus.stats.Stats; -import io.opencensus.stats.StatsRecorder; -import io.opencensus.stats.View; -import io.opencensus.stats.ViewManager; -import io.opencensus.tags.TagContext; -import io.opencensus.tags.TagKey; -import io.opencensus.tags.TagMetadata; -import io.opencensus.tags.TagValue; -import io.opencensus.tags.Tagger; -import io.opencensus.tags.Tags; import io.opencensus.trace.Annotation; import io.opencensus.trace.AttributeValue; import io.opencensus.trace.Link; @@ -48,12 +35,8 @@ import io.opentelemetry.api.trace.Tracer; import io.opentelemetry.context.Context; import io.opentelemetry.context.Scope; -import io.opentelemetry.opencensusshim.metrics.OpenCensusMetrics; import io.opentelemetry.sdk.OpenTelemetrySdk; import io.opentelemetry.sdk.common.CompletableResultCode; -import io.opentelemetry.sdk.metrics.SdkMeterProvider; -import io.opentelemetry.sdk.metrics.data.MetricData; -import io.opentelemetry.sdk.metrics.export.PeriodicMetricReader; import io.opentelemetry.sdk.trace.SdkTracerProvider; import io.opentelemetry.sdk.trace.SpanProcessor; import io.opentelemetry.sdk.trace.data.SpanData; @@ -61,10 +44,7 @@ import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; import io.opentelemetry.sdk.trace.export.SpanExporter; import java.util.Collection; -import java.util.Comparator; -import java.util.List; import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -356,123 +336,6 @@ public void testNoRecordDoesNotExport() { verify(spanExporter, never()).export(anyCollection()); } - @Test - @SuppressWarnings({"deprecation", "unchecked"}) // Summary is deprecated in census - void testSupportedMetricsExportedCorrectly() throws InterruptedException { - Tagger tagger = Tags.getTagger(); - Measure.MeasureLong latency = - Measure.MeasureLong.create("task_latency", "The task latency in milliseconds", "ms"); - Measure.MeasureDouble latency2 = - Measure.MeasureDouble.create("task_latency_2", "The task latency in milliseconds 2", "ms"); - StatsRecorder statsRecorder = Stats.getStatsRecorder(); - TagKey tagKey = TagKey.create("tagKey"); - TagValue tagValue = TagValue.create("tagValue"); - View longSumView = - View.create( - View.Name.create("long_sum"), - "long sum", - latency, - Aggregation.Sum.create(), - ImmutableList.of(tagKey)); - View longGaugeView = - View.create( - View.Name.create("long_gauge"), - "long gauge", - latency, - Aggregation.LastValue.create(), - ImmutableList.of(tagKey)); - View doubleSumView = - View.create( - View.Name.create("double_sum"), - "double sum", - latency2, - Aggregation.Sum.create(), - ImmutableList.of()); - View doubleGaugeView = - View.create( - View.Name.create("double_gauge"), - "double gauge", - latency2, - Aggregation.LastValue.create(), - ImmutableList.of()); - ViewManager viewManager = Stats.getViewManager(); - viewManager.registerView(longSumView); - viewManager.registerView(longGaugeView); - viewManager.registerView(doubleSumView); - viewManager.registerView(doubleGaugeView); - // Create Otel SDK that also reads from OpenCensus. - FakeMetricExporter metricExporter = new FakeMetricExporter(); - SdkMeterProvider.builder() - .registerMetricReader( - OpenCensusMetrics.attachTo( - PeriodicMetricReader.builder(metricExporter) - .setInterval(java.time.Duration.ofNanos(5000)) - .newMetricReaderFactory())) - .buildAndRegisterGlobal(); - TagContext tagContext = - tagger - .emptyBuilder() - .put(tagKey, tagValue, TagMetadata.create(TagMetadata.TagTtl.UNLIMITED_PROPAGATION)) - .build(); - try (io.opencensus.common.Scope ss = tagger.withTagContext(tagContext)) { - statsRecorder.newMeasureMap().put(latency, 50).record(); - statsRecorder.newMeasureMap().put(latency2, 60).record(); - } - // Slow down for OpenCensus to catch up. - Thread.sleep(1000); - List> exported = metricExporter.waitForNumberOfExports(3); - List metricData = - exported.get(2).stream() - .sorted(Comparator.comparing(MetricData::getName)) - .collect(Collectors.toList()); - assertThat(metricData.size()).isEqualTo(4); - - MetricData metric = metricData.get(0); - assertThat(metric) - .hasName("double_gauge") - .hasDescription("double gauge") - .hasUnit("ms") - .hasDoubleGauge() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(60).attributes().hasSize(0)); - metric = metricData.get(1); - assertThat(metric) - .hasName("double_sum") - .hasDescription("double sum") - .hasUnit("ms") - .hasDoubleSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(60).attributes().hasSize(0)); - metric = metricData.get(2); - assertThat(metric) - .hasName("long_gauge") - .hasDescription("long gauge") - .hasUnit("ms") - .hasLongGauge() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(50) - .attributes() - .hasSize(1) - .containsEntry(tagKey.getName(), tagValue.asString())); - metric = metricData.get(3); - assertThat(metric) - .hasName("long_sum") - .hasDescription("long sum") - .hasUnit("ms") - .hasLongSum() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(50) - .attributes() - .hasSize(1) - .containsEntry(tagKey.getName(), tagValue.asString())); - } - private static void createOpenCensusScopedSpanWithChildSpan( boolean withInnerOpenTelemetrySpan, boolean withInnerOpenCensusSpan) { io.opencensus.trace.Tracer tracer = Tracing.getTracer(); diff --git a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/OpenTelemetryMetricExporterTest.java b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/OpenTelemetryMetricExporterTest.java new file mode 100644 index 00000000000..56a86d313fb --- /dev/null +++ b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/OpenTelemetryMetricExporterTest.java @@ -0,0 +1,222 @@ +package io.opentelemetry.opencensusshim; + +import static io.opentelemetry.sdk.testing.assertj.metrics.MetricAssertions.assertThat; +import static org.assertj.core.api.Assertions.assertThat; + + +import com.google.common.collect.ImmutableList; +import io.opencensus.common.Duration; +import io.opencensus.stats.Aggregation; +import io.opencensus.stats.Measure; +import io.opencensus.stats.Stats; +import io.opencensus.stats.StatsRecorder; +import io.opencensus.stats.View; +import io.opencensus.stats.ViewManager; +import io.opencensus.tags.TagContext; +import io.opencensus.tags.TagKey; +import io.opencensus.tags.TagMetadata; +import io.opencensus.tags.TagValue; +import io.opencensus.tags.Tagger; +import io.opencensus.tags.Tags; +import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.metrics.data.AggregationTemporality; +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.metrics.export.MetricExporter; +import io.opentelemetry.sdk.testing.assertj.metrics.MetricAssertions; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Comparator; +import java.util.EnumSet; +import java.util.List; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.stream.Collectors; +import org.junit.jupiter.api.Test; +import javax.annotation.Nullable; + +class OpenTelemetryMetricExporterTest { + + @Test + @SuppressWarnings({"deprecation", "unchecked"}) // Summary is deprecated in census + void testSupportedMetricsExportedCorrectly() throws Exception { + Tagger tagger = Tags.getTagger(); + Measure.MeasureLong latency = + Measure.MeasureLong.create("task_latency", "The task latency in milliseconds", "ms"); + Measure.MeasureDouble latency2 = + Measure.MeasureDouble.create("task_latency_2", "The task latency in milliseconds 2", "ms"); + StatsRecorder statsRecorder = Stats.getStatsRecorder(); + TagKey tagKey = TagKey.create("tagKey"); + TagValue tagValue = TagValue.create("tagValue"); + View longSumView = + View.create( + View.Name.create("long_sum"), + "long sum", + latency, + Aggregation.Sum.create(), + ImmutableList.of(tagKey)); + View longGaugeView = + View.create( + View.Name.create("long_gauge"), + "long gauge", + latency, + Aggregation.LastValue.create(), + ImmutableList.of(tagKey)); + View doubleSumView = + View.create( + View.Name.create("double_sum"), + "double sum", + latency2, + Aggregation.Sum.create(), + ImmutableList.of()); + View doubleGaugeView = + View.create( + View.Name.create("double_gauge"), + "double gauge", + latency2, + Aggregation.LastValue.create(), + ImmutableList.of()); + ViewManager viewManager = Stats.getViewManager(); + viewManager.registerView(longSumView); + viewManager.registerView(longGaugeView); + viewManager.registerView(doubleSumView); + viewManager.registerView(doubleGaugeView); + // Create OpenCensus -> OpenTelemetry Exporter bridge + WaitingMetricExporter exporter = new WaitingMetricExporter(); + OpenTelemetryMetricsExporter otelExporter = + OpenTelemetryMetricsExporter.createAndRegister(exporter, Duration.create(0, 5000)); + try { + TagContext tagContext = + tagger + .emptyBuilder() + .put(tagKey, tagValue, TagMetadata.create(TagMetadata.TagTtl.UNLIMITED_PROPAGATION)) + .build(); + try (io.opencensus.common.Scope ss = tagger.withTagContext(tagContext)) { + statsRecorder.newMeasureMap().put(latency, 50).record(); + statsRecorder.newMeasureMap().put(latency2, 60).record(); + } + // Slow down for OpenCensus to catch up. + List> result = exporter.waitForNumberOfExports(3); + // Just look at last export. + List metricData = result.get(2).stream() + .sorted(Comparator.comparing(MetricData::getName)) + .collect(Collectors.toList()); + assertThat(metricData.size()).isEqualTo(4); + + MetricData metric = metricData.get(0); + MetricAssertions.assertThat(metric) + .hasName("double_gauge") + .hasDescription("double gauge") + .hasUnit("ms") + .hasDoubleGauge() + .points() + .satisfiesExactly( + point -> MetricAssertions.assertThat(point).hasValue(60).attributes().hasSize(0)); + metric = metricData.get(1); + MetricAssertions.assertThat(metric) + .hasName("double_sum") + .hasDescription("double sum") + .hasUnit("ms") + .hasDoubleSum() + .points() + .satisfiesExactly( + point -> MetricAssertions.assertThat(point).hasValue(60).attributes().hasSize(0)); + metric = metricData.get(2); + MetricAssertions.assertThat(metric) + .hasName("long_gauge") + .hasDescription("long gauge") + .hasUnit("ms") + .hasLongGauge() + .points() + .satisfiesExactly( + point -> + MetricAssertions.assertThat(point) + .hasValue(50) + .attributes() + .hasSize(1) + .containsEntry(tagKey.getName(), tagValue.asString())); + metric = metricData.get(3); + MetricAssertions.assertThat(metric) + .hasName("long_sum") + .hasDescription("long sum") + .hasUnit("ms") + .hasLongSum() + .points() + .satisfiesExactly( + point -> + MetricAssertions.assertThat(point) + .hasValue(50) + .attributes() + .hasSize(1) + .containsEntry(tagKey.getName(), tagValue.asString())); + } finally { + otelExporter.stop(); + } + } + + // Straight copy-paste from PeriodicMetricReaderTest. Should likely move into metrics-testing. + private static class WaitingMetricExporter implements MetricExporter { + + private final AtomicBoolean hasShutdown = new AtomicBoolean(false); + private final boolean shouldThrow; + private final BlockingQueue> queue = new LinkedBlockingQueue<>(); + private final List exportTimes = Collections.synchronizedList(new ArrayList<>()); + + private WaitingMetricExporter() { + this(false); + } + + private WaitingMetricExporter(boolean shouldThrow) { + this.shouldThrow = shouldThrow; + } + + @Override + public EnumSet getSupportedTemporality() { + return EnumSet.allOf(AggregationTemporality.class); + } + + @Override + public AggregationTemporality getPreferredTemporality() { + return null; + } + + @Override + public CompletableResultCode export(Collection metricList) { + exportTimes.add(System.currentTimeMillis()); + queue.offer(new ArrayList<>(metricList)); + + if (shouldThrow) { + throw new RuntimeException("Export Failed!"); + } + return CompletableResultCode.ofSuccess(); + } + + @Override + public CompletableResultCode flush() { + return CompletableResultCode.ofSuccess(); + } + + @Override + public CompletableResultCode shutdown() { + hasShutdown.set(true); + return CompletableResultCode.ofSuccess(); + } + + /** + * Waits until export is called for numberOfExports times. Returns the list of exported lists of + * metrics. + */ + @Nullable + List> waitForNumberOfExports(int numberOfExports) throws Exception { + List> result = new ArrayList<>(); + while (result.size() < numberOfExports) { + List export = queue.poll(5, TimeUnit.SECONDS); + assertThat(export).isNotNull(); + result.add(export); + } + return result; + } + } +} From 3b47e4806cfa945934a98c87437bbee74a8058a0 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Sun, 7 Nov 2021 10:54:12 -0500 Subject: [PATCH 08/12] Fix import wonkyness --- .../OpenTelemetryMetricExporterTest.java | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/OpenTelemetryMetricExporterTest.java b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/OpenTelemetryMetricExporterTest.java index 56a86d313fb..8088e8b209c 100644 --- a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/OpenTelemetryMetricExporterTest.java +++ b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/OpenTelemetryMetricExporterTest.java @@ -1,9 +1,13 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + package io.opentelemetry.opencensusshim; import static io.opentelemetry.sdk.testing.assertj.metrics.MetricAssertions.assertThat; import static org.assertj.core.api.Assertions.assertThat; - import com.google.common.collect.ImmutableList; import io.opencensus.common.Duration; import io.opencensus.stats.Aggregation; @@ -34,8 +38,8 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; -import org.junit.jupiter.api.Test; import javax.annotation.Nullable; +import org.junit.jupiter.api.Test; class OpenTelemetryMetricExporterTest { @@ -86,7 +90,7 @@ void testSupportedMetricsExportedCorrectly() throws Exception { // Create OpenCensus -> OpenTelemetry Exporter bridge WaitingMetricExporter exporter = new WaitingMetricExporter(); OpenTelemetryMetricsExporter otelExporter = - OpenTelemetryMetricsExporter.createAndRegister(exporter, Duration.create(0, 5000)); + OpenTelemetryMetricsExporter.createAndRegister(exporter, Duration.create(0, 5000)); try { TagContext tagContext = tagger @@ -100,9 +104,10 @@ void testSupportedMetricsExportedCorrectly() throws Exception { // Slow down for OpenCensus to catch up. List> result = exporter.waitForNumberOfExports(3); // Just look at last export. - List metricData = result.get(2).stream() - .sorted(Comparator.comparing(MetricData::getName)) - .collect(Collectors.toList()); + List metricData = + result.get(2).stream() + .sorted(Comparator.comparing(MetricData::getName)) + .collect(Collectors.toList()); assertThat(metricData.size()).isEqualTo(4); MetricData metric = metricData.get(0); From c4c5b1c1a792aec17813ed912f875acbfa19c37e Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Mon, 8 Nov 2021 08:04:41 -0500 Subject: [PATCH 09/12] Move to Awaitility --- .../OpenTelemetryMetricExporterTest.java | 205 ++++++------------ .../metrics/OpenCensusMetricProducerTest.java | 42 ++-- .../metrics/OpenCensusMetricsTest.java | 16 +- 3 files changed, 105 insertions(+), 158 deletions(-) diff --git a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/OpenTelemetryMetricExporterTest.java b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/OpenTelemetryMetricExporterTest.java index 8088e8b209c..bde21651cb0 100644 --- a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/OpenTelemetryMetricExporterTest.java +++ b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/OpenTelemetryMetricExporterTest.java @@ -22,23 +22,13 @@ import io.opencensus.tags.TagValue; import io.opencensus.tags.Tagger; import io.opencensus.tags.Tags; -import io.opentelemetry.sdk.common.CompletableResultCode; -import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.MetricData; -import io.opentelemetry.sdk.metrics.export.MetricExporter; -import io.opentelemetry.sdk.testing.assertj.metrics.MetricAssertions; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; +import io.opentelemetry.sdk.metrics.testing.InMemoryMetricExporter; import java.util.Comparator; -import java.util.EnumSet; -import java.util.List; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; +import java.util.HashSet; +import java.util.Set; import java.util.stream.Collectors; -import javax.annotation.Nullable; +import org.awaitility.Awaitility; import org.junit.jupiter.api.Test; class OpenTelemetryMetricExporterTest { @@ -88,9 +78,9 @@ void testSupportedMetricsExportedCorrectly() throws Exception { viewManager.registerView(doubleSumView); viewManager.registerView(doubleGaugeView); // Create OpenCensus -> OpenTelemetry Exporter bridge - WaitingMetricExporter exporter = new WaitingMetricExporter(); + InMemoryMetricExporter exporter = InMemoryMetricExporter.create(); OpenTelemetryMetricsExporter otelExporter = - OpenTelemetryMetricsExporter.createAndRegister(exporter, Duration.create(0, 5000)); + OpenTelemetryMetricsExporter.createAndRegister(exporter, Duration.create(1, 0)); try { TagContext tagContext = tagger @@ -101,127 +91,74 @@ void testSupportedMetricsExportedCorrectly() throws Exception { statsRecorder.newMeasureMap().put(latency, 50).record(); statsRecorder.newMeasureMap().put(latency2, 60).record(); } + Set allowedMetrics = new HashSet<>(); + allowedMetrics.add("double_gauge"); + allowedMetrics.add("double_sum"); + allowedMetrics.add("long_gauge"); + allowedMetrics.add("long_sum"); // Slow down for OpenCensus to catch up. - List> result = exporter.waitForNumberOfExports(3); - // Just look at last export. - List metricData = - result.get(2).stream() - .sorted(Comparator.comparing(MetricData::getName)) - .collect(Collectors.toList()); - assertThat(metricData.size()).isEqualTo(4); - - MetricData metric = metricData.get(0); - MetricAssertions.assertThat(metric) - .hasName("double_gauge") - .hasDescription("double gauge") - .hasUnit("ms") - .hasDoubleGauge() - .points() - .satisfiesExactly( - point -> MetricAssertions.assertThat(point).hasValue(60).attributes().hasSize(0)); - metric = metricData.get(1); - MetricAssertions.assertThat(metric) - .hasName("double_sum") - .hasDescription("double sum") - .hasUnit("ms") - .hasDoubleSum() - .points() - .satisfiesExactly( - point -> MetricAssertions.assertThat(point).hasValue(60).attributes().hasSize(0)); - metric = metricData.get(2); - MetricAssertions.assertThat(metric) - .hasName("long_gauge") - .hasDescription("long gauge") - .hasUnit("ms") - .hasLongGauge() - .points() - .satisfiesExactly( - point -> - MetricAssertions.assertThat(point) - .hasValue(50) - .attributes() - .hasSize(1) - .containsEntry(tagKey.getName(), tagValue.asString())); - metric = metricData.get(3); - MetricAssertions.assertThat(metric) - .hasName("long_sum") - .hasDescription("long sum") - .hasUnit("ms") - .hasLongSum() - .points() - .satisfiesExactly( - point -> - MetricAssertions.assertThat(point) - .hasValue(50) - .attributes() - .hasSize(1) - .containsEntry(tagKey.getName(), tagValue.asString())); + Awaitility.await() + .atMost(java.time.Duration.ofSeconds(10)) + .untilAsserted( + () -> + assertThat( + exporter.getFinishedMetricItems().stream() + .filter(metric -> allowedMetrics.contains(metric.getName())) + .sorted(Comparator.comparing(MetricData::getName)) + .collect(Collectors.toList())) + .satisfiesExactly( + metric -> + assertThat(metric) + .hasName("double_gauge") + .hasDescription("double gauge") + .hasUnit("ms") + .hasDoubleGauge() + .points() + .satisfiesExactly( + point -> + assertThat(point).hasValue(60).attributes().hasSize(0)), + metric -> + assertThat(metric) + .hasName("double_sum") + .hasDescription("double sum") + .hasUnit("ms") + .hasDoubleSum() + .points() + .satisfiesExactly( + point -> + assertThat(point).hasValue(60).attributes().hasSize(0)), + metric -> + assertThat(metric) + .hasName("long_gauge") + .hasDescription("long gauge") + .hasUnit("ms") + .hasLongGauge() + .points() + .satisfiesExactly( + point -> + assertThat(point) + .hasValue(50) + .attributes() + .hasSize(1) + .containsEntry( + tagKey.getName(), tagValue.asString())), + metric -> + assertThat(metric) + .hasName("long_sum") + .hasDescription("long sum") + .hasUnit("ms") + .hasLongSum() + .points() + .satisfiesExactly( + point -> + assertThat(point) + .hasValue(50) + .attributes() + .hasSize(1) + .containsEntry( + tagKey.getName(), tagValue.asString())))); } finally { otelExporter.stop(); } } - - // Straight copy-paste from PeriodicMetricReaderTest. Should likely move into metrics-testing. - private static class WaitingMetricExporter implements MetricExporter { - - private final AtomicBoolean hasShutdown = new AtomicBoolean(false); - private final boolean shouldThrow; - private final BlockingQueue> queue = new LinkedBlockingQueue<>(); - private final List exportTimes = Collections.synchronizedList(new ArrayList<>()); - - private WaitingMetricExporter() { - this(false); - } - - private WaitingMetricExporter(boolean shouldThrow) { - this.shouldThrow = shouldThrow; - } - - @Override - public EnumSet getSupportedTemporality() { - return EnumSet.allOf(AggregationTemporality.class); - } - - @Override - public AggregationTemporality getPreferredTemporality() { - return null; - } - - @Override - public CompletableResultCode export(Collection metricList) { - exportTimes.add(System.currentTimeMillis()); - queue.offer(new ArrayList<>(metricList)); - - if (shouldThrow) { - throw new RuntimeException("Export Failed!"); - } - return CompletableResultCode.ofSuccess(); - } - - @Override - public CompletableResultCode flush() { - return CompletableResultCode.ofSuccess(); - } - - @Override - public CompletableResultCode shutdown() { - hasShutdown.set(true); - return CompletableResultCode.ofSuccess(); - } - - /** - * Waits until export is called for numberOfExports times. Returns the list of exported lists of - * metrics. - */ - @Nullable - List> waitForNumberOfExports(int numberOfExports) throws Exception { - List> result = new ArrayList<>(); - while (result.size() < numberOfExports) { - List export = queue.poll(5, TimeUnit.SECONDS); - assertThat(export).isNotNull(); - result.add(export); - } - return result; - } - } } diff --git a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricProducerTest.java b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricProducerTest.java index fb1342c7343..a991c603a3e 100644 --- a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricProducerTest.java +++ b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricProducerTest.java @@ -16,8 +16,10 @@ import io.opencensus.stats.View; import io.opentelemetry.sdk.metrics.export.MetricProducer; import io.opentelemetry.sdk.resources.Resource; +import java.time.Duration; import java.util.Arrays; import java.util.Collections; +import org.awaitility.Awaitility; import org.junit.jupiter.api.Test; class OpenCensusMetricProducerTest { @@ -44,25 +46,29 @@ void extractHistogram() throws InterruptedException { Stats.getViewManager().registerView(view); STATS_RECORDER.newMeasureMap().put(LATENCY_MS, 50).record(); // Wait for measurement to hit the aggregator. - Thread.sleep(1000); - assertThat(openCensusMetrics.collectAllMetrics()) - .satisfiesExactly( - metric -> - assertThat(metric) - .hasName("task_latency_distribution") - .hasDescription("The distribution of the task latencies.") - .hasUnit("ms") - .hasDoubleHistogram() - .isCumulative() - .points() + Awaitility.await() + .atMost(Duration.ofSeconds(10)) + .untilAsserted( + () -> + assertThat(openCensusMetrics.collectAllMetrics()) .satisfiesExactly( - point -> - assertThat(point) - .hasSum(50) - .hasCount(1) - .hasBucketCounts(1, 0, 0, 0, 0, 0, 0) - .hasBucketBoundaries(100d, 200d, 400d, 1000d, 2000d, 4000d) - .hasExemplars())); + metric -> + assertThat(metric) + .hasName("task_latency_distribution") + .hasDescription("The distribution of the task latencies.") + .hasUnit("ms") + .hasDoubleHistogram() + .isCumulative() + .points() + .satisfiesExactly( + point -> + assertThat(point) + .hasSum(50) + .hasCount(1) + .hasBucketCounts(1, 0, 0, 0, 0, 0, 0) + .hasBucketBoundaries( + 100d, 200d, 400d, 1000d, 2000d, 4000d) + .hasExemplars()))); } } diff --git a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricsTest.java b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricsTest.java index fd57e38d81a..7c8b0e7f9c0 100644 --- a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricsTest.java +++ b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricsTest.java @@ -14,7 +14,9 @@ import io.opencensus.stats.View; import io.opentelemetry.sdk.metrics.SdkMeterProvider; import io.opentelemetry.sdk.metrics.testing.InMemoryMetricReader; +import java.time.Duration; import java.util.Collections; +import org.awaitility.Awaitility; import org.junit.jupiter.api.Test; class OpenCensusMetricsTest { @@ -42,11 +44,13 @@ void capturesOpenCensusAndOtelMetrics() throws InterruptedException { STATS_RECORDER.newMeasureMap().put(measure, 1).record(); // Wait for OpenCensus propagation. - Thread.sleep(1000); - - assertThat(reader.collectAllMetrics()) - .satisfiesExactly( - metric -> assertThat(metric).hasName("otel.sum").hasLongSum(), - metric -> assertThat(metric).hasName("oc.sum").hasLongSum()); + Awaitility.await() + .atMost(Duration.ofSeconds(5)) + .untilAsserted( + () -> + assertThat(reader.collectAllMetrics()) + .satisfiesExactly( + metric -> assertThat(metric).hasName("otel.sum").hasLongSum(), + metric -> assertThat(metric).hasName("oc.sum").hasLongSum())); } } From 27b94ed52dcdb438e8fd3799bccf4a2bcbb3629f Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Mon, 8 Nov 2021 08:12:27 -0500 Subject: [PATCH 10/12] Fixes from review. --- .../opencensusshim/metrics/MetricAdapter.java | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/MetricAdapter.java b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/MetricAdapter.java index e84ebaf6f7a..bfb3c82df19 100644 --- a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/MetricAdapter.java +++ b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/MetricAdapter.java @@ -52,6 +52,9 @@ private MetricAdapter() {} static final InstrumentationLibraryInfo INSTRUMENTATION_LIBRARY_INFO = InstrumentationLibraryInfo.create("io.opentelemetry.opencensusshim", null); + // Parser for string value of `io.opencensus.contrib.exemplar.util.AttachmentValueSpanContext` + private static final Pattern OPENCENSUS_TRACE_ATTACHMENT_PATTERN = + Pattern.compile("SpanContext\\(traceId=([0-9A-Ga-g]+), spanId=([0-9A-Ga-g]+),.*\\)"); /** * Converts an open-census metric into the OTLP format. * @@ -218,7 +221,7 @@ static Collection convertHistogramPoints(Metric census mapBoundaries(distribution.getBucketOptions()), mapCounts(distribution.getBuckets()), mapExemplars(distribution.getBuckets())), - sumamry -> null, + summary -> null, defaultValue -> null); if (otelPoint != null) { result.add(otelPoint); @@ -327,9 +330,7 @@ private static ExemplarData mapExemplar(Exemplar exemplar) { // SpanContext(traceId={traceId}, spanId={spanId}, traceOptions={traceOptions}) // We *attempt* parse it rather than pull in yet another dependency. String spanContextToString = exemplar.getAttachments().get("SpanContext").getValue(); - Matcher m = - Pattern.compile("SpanContext\\(traceId=([0-9A-Ga-g]+), spanId=([0-9A-Ga-g]+),.*\\)") - .matcher(spanContextToString); + Matcher m = OPENCENSUS_TRACE_ATTACHMENT_PATTERN.matcher(spanContextToString); if (m.matches()) { MatchResult mr = m.toMatchResult(); traceId = mr.group(1); From e5f5cf0dc970a86df93e4d5c57a005f20734f5c9 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Mon, 8 Nov 2021 08:15:55 -0500 Subject: [PATCH 11/12] Move MetricAdapter to internal package. --- .../opencensusshim/OpenTelemetryMetricsExporter.java | 2 +- .../{ => internal}/metrics/MetricAdapter.java | 9 +++++++-- .../opencensusshim/metrics/OpenCensusMetricProducer.java | 1 + .../{ => internal}/metrics/MetricAdapterTest.java | 2 +- 4 files changed, 10 insertions(+), 4 deletions(-) rename opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/{ => internal}/metrics/MetricAdapter.java (98%) rename opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/{ => internal}/metrics/MetricAdapterTest.java (99%) diff --git a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryMetricsExporter.java b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryMetricsExporter.java index 555aab90845..32783892204 100644 --- a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryMetricsExporter.java +++ b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryMetricsExporter.java @@ -13,7 +13,7 @@ import io.opencensus.metrics.Metrics; import io.opencensus.metrics.export.Metric; import io.opencensus.metrics.export.MetricDescriptor; -import io.opentelemetry.opencensusshim.metrics.MetricAdapter; +import io.opentelemetry.opencensusshim.internal.metrics.MetricAdapter; import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.resources.Resource; import java.util.ArrayList; diff --git a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/MetricAdapter.java b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/internal/metrics/MetricAdapter.java similarity index 98% rename from opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/MetricAdapter.java rename to opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/internal/metrics/MetricAdapter.java index bfb3c82df19..1f72072bac0 100644 --- a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/MetricAdapter.java +++ b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/internal/metrics/MetricAdapter.java @@ -3,7 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ -package io.opentelemetry.opencensusshim.metrics; +package io.opentelemetry.opencensusshim.internal.metrics; import io.opencensus.common.Timestamp; import io.opencensus.metrics.LabelKey; @@ -44,7 +44,12 @@ import java.util.regex.Pattern; import javax.annotation.Nullable; -/** Adapts an OpenCensus metric into the OpenTelemetry metric data API. */ +/** + * Adapts an OpenCensus metric into the OpenTelemetry metric data API. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ public final class MetricAdapter { private MetricAdapter() {} // All OpenCensus metrics come from this shim. diff --git a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricProducer.java b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricProducer.java index b0954d551c4..8fced4f8cf1 100644 --- a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricProducer.java +++ b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricProducer.java @@ -7,6 +7,7 @@ import io.opencensus.metrics.Metrics; import io.opencensus.metrics.export.MetricProducerManager; +import io.opentelemetry.opencensusshim.internal.metrics.MetricAdapter; import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.export.MetricProducer; import io.opentelemetry.sdk.resources.Resource; diff --git a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/MetricAdapterTest.java b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/internal/metrics/MetricAdapterTest.java similarity index 99% rename from opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/MetricAdapterTest.java rename to opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/internal/metrics/MetricAdapterTest.java index 2918a22f815..1368cb99fd9 100644 --- a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/MetricAdapterTest.java +++ b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/internal/metrics/MetricAdapterTest.java @@ -3,7 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ -package io.opentelemetry.opencensusshim.metrics; +package io.opentelemetry.opencensusshim.internal.metrics; import static io.opentelemetry.sdk.testing.assertj.metrics.MetricAssertions.assertThat; import static org.assertj.core.api.Assertions.assertThat; From 674db2ba176895e7b994214adbea1ea03cc70ca9 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Mon, 8 Nov 2021 08:42:54 -0500 Subject: [PATCH 12/12] Fix test against finding span-context in exemplars. --- dependencyManagement/build.gradle.kts | 3 +- opencensus-shim/build.gradle.kts | 1 + .../internal/metrics/MetricAdapter.java | 6 ++-- .../internal/metrics/MetricAdapterTest.java | 4 +-- .../metrics/OpenCensusMetricProducerTest.java | 30 +++++++++++++++++-- 5 files changed, 37 insertions(+), 7 deletions(-) diff --git a/dependencyManagement/build.gradle.kts b/dependencyManagement/build.gradle.kts index 57691b5245d..390624dfff8 100644 --- a/dependencyManagement/build.gradle.kts +++ b/dependencyManagement/build.gradle.kts @@ -41,7 +41,8 @@ val DEPENDENCY_SETS = listOf( "opencensus-api", "opencensus-impl-core", "opencensus-impl", - "opencensus-exporter-metrics-util" + "opencensus-exporter-metrics-util", + "opencensus-contrib-exemplar-util" ) ), DependencySet( diff --git a/opencensus-shim/build.gradle.kts b/opencensus-shim/build.gradle.kts index fc164c485e0..5e0d50cfedf 100644 --- a/opencensus-shim/build.gradle.kts +++ b/opencensus-shim/build.gradle.kts @@ -21,6 +21,7 @@ dependencies { testImplementation("org.slf4j:slf4j-simple") testImplementation("io.opencensus:opencensus-impl") + testImplementation("io.opencensus:opencensus-contrib-exemplar-util") } tasks.named("test") { diff --git a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/internal/metrics/MetricAdapter.java b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/internal/metrics/MetricAdapter.java index 1f72072bac0..5d458f069c9 100644 --- a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/internal/metrics/MetricAdapter.java +++ b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/internal/metrics/MetricAdapter.java @@ -58,8 +58,10 @@ private MetricAdapter() {} InstrumentationLibraryInfo.create("io.opentelemetry.opencensusshim", null); // Parser for string value of `io.opencensus.contrib.exemplar.util.AttachmentValueSpanContext` + // // SpanContext{traceId=TraceId{traceId=(id))}, spanId=SpanId{spanId=(id), ...} private static final Pattern OPENCENSUS_TRACE_ATTACHMENT_PATTERN = - Pattern.compile("SpanContext\\(traceId=([0-9A-Ga-g]+), spanId=([0-9A-Ga-g]+),.*\\)"); + Pattern.compile( + "SpanContext\\{traceId=TraceId\\{traceId=([0-9A-Ga-g]+)\\}, spanId=SpanId\\{spanId=([0-9A-Ga-g]+)\\},.*\\}"); /** * Converts an open-census metric into the OTLP format. * @@ -332,7 +334,7 @@ private static ExemplarData mapExemplar(Exemplar exemplar) { if (exemplar.getAttachments().containsKey("SpanContext")) { // We need to use `io.opencensus.contrib.exemplar.util.AttachmentValueSpanContext` // The `toString` will be the following: - // SpanContext(traceId={traceId}, spanId={spanId}, traceOptions={traceOptions}) + // SpanContext{traceId=TraceId{traceId=(id))}, spanId=SpanId{spanId=(id), ...} // We *attempt* parse it rather than pull in yet another dependency. String spanContextToString = exemplar.getAttachments().get("SpanContext").getValue(); Matcher m = OPENCENSUS_TRACE_ATTACHMENT_PATTERN.matcher(spanContextToString); diff --git a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/internal/metrics/MetricAdapterTest.java b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/internal/metrics/MetricAdapterTest.java index 1368cb99fd9..5659428da2b 100644 --- a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/internal/metrics/MetricAdapterTest.java +++ b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/internal/metrics/MetricAdapterTest.java @@ -193,7 +193,7 @@ void convertHistogram() { exemplarAttachements.put( "SpanContext", AttachmentValue.AttachmentValueString.create( - "SpanContext(traceId=1234, spanId=5678, others=stuff)")); + "SpanContext{traceId=TraceId{traceId=1234}, spanId=SpanId{spanId=5678}, others=stuff}")); Metric censusMetric = Metric.createWithOneTimeSeries( MetricDescriptor.create( @@ -306,7 +306,7 @@ void convertGaugeHistogram() { exemplarAttachements.put( "SpanContext", AttachmentValue.AttachmentValueString.create( - "SpanContext(traceId=1234, spanId=5678, others=stuff)")); + "SpanContext{traceId=TraceId{traceId=1234}, spanId=SpanId{spanId=5678}, others=stuff}")); Metric censusMetric = Metric.createWithOneTimeSeries( MetricDescriptor.create( diff --git a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricProducerTest.java b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricProducerTest.java index a991c603a3e..51fa36ec4a4 100644 --- a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricProducerTest.java +++ b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricProducerTest.java @@ -8,17 +8,26 @@ import static io.opentelemetry.sdk.testing.assertj.metrics.MetricAssertions.assertThat; import static org.assertj.core.api.Assertions.assertThat; +import io.opencensus.contrib.exemplar.util.ExemplarUtils; import io.opencensus.stats.Aggregation; import io.opencensus.stats.BucketBoundaries; import io.opencensus.stats.Measure; +import io.opencensus.stats.MeasureMap; import io.opencensus.stats.Stats; import io.opencensus.stats.StatsRecorder; import io.opencensus.stats.View; +import io.opencensus.trace.SpanContext; +import io.opencensus.trace.SpanId; +import io.opencensus.trace.TraceId; +import io.opencensus.trace.TraceOptions; +import io.opencensus.trace.Tracestate; +import io.opentelemetry.api.common.Attributes; import io.opentelemetry.sdk.metrics.export.MetricProducer; import io.opentelemetry.sdk.resources.Resource; import java.time.Duration; import java.util.Arrays; import java.util.Collections; +import java.util.Random; import org.awaitility.Awaitility; import org.junit.jupiter.api.Test; @@ -34,6 +43,13 @@ class OpenCensusMetricProducerTest { BucketBoundaries.create(Arrays.asList(0d, 100d, 200d, 400d, 1000d, 2000d, 4000d)); private static final StatsRecorder STATS_RECORDER = Stats.getStatsRecorder(); + // For Exemplar + private static final Random RANDOM = new Random(1234); + private static final TraceId TRACE_ID = TraceId.generateRandomId(RANDOM); + private static final SpanId SPAN_ID = SpanId.generateRandomId(RANDOM); + private static final SpanContext SPAN_CONTEXT = + SpanContext.create(TRACE_ID, SPAN_ID, TraceOptions.DEFAULT, Tracestate.builder().build()); + @Test void extractHistogram() throws InterruptedException { View view = @@ -44,7 +60,10 @@ void extractHistogram() throws InterruptedException { Aggregation.Distribution.create(LATENCY_BOUNDARIES), Collections.emptyList()); Stats.getViewManager().registerView(view); - STATS_RECORDER.newMeasureMap().put(LATENCY_MS, 50).record(); + + MeasureMap recorder = STATS_RECORDER.newMeasureMap(); + ExemplarUtils.putSpanContextAttachments(recorder, SPAN_CONTEXT); + recorder.put(LATENCY_MS, 50).record(); // Wait for measurement to hit the aggregator. Awaitility.await() @@ -69,6 +88,13 @@ void extractHistogram() throws InterruptedException { .hasBucketCounts(1, 0, 0, 0, 0, 0, 0) .hasBucketBoundaries( 100d, 200d, 400d, 1000d, 2000d, 4000d) - .hasExemplars()))); + .exemplars() + .satisfiesExactly( + exemplar -> + assertThat(exemplar) + .hasFilteredAttributes(Attributes.empty()) + .hasValue(50) + .hasTraceId(TRACE_ID.toLowerBase16()) + .hasSpanId(SPAN_ID.toLowerBase16()))))); } }