use of io.opentelemetry.api.common.Attributes in project opentelemetry-java by open-telemetry.
the class BeanstalkResourceTest method testCreateAttributes.
@Test
void testCreateAttributes(@TempDir File tempFolder) throws IOException {
File file = new File(tempFolder, "beanstalk.config");
String content = "{\"noise\": \"noise\", \"deployment_id\":4,\"" + "version_label\":\"2\",\"environment_name\":\"HttpSubscriber-env\"}";
Files.write(content.getBytes(Charsets.UTF_8), file);
Resource resource = BeanstalkResource.buildResource(file.getPath());
Attributes attributes = resource.getAttributes();
assertThat(attributes).containsOnly(entry(ResourceAttributes.CLOUD_PROVIDER, "aws"), entry(ResourceAttributes.CLOUD_PLATFORM, "aws_elastic_beanstalk"), entry(ResourceAttributes.SERVICE_INSTANCE_ID, "4"), entry(ResourceAttributes.SERVICE_VERSION, "2"), entry(ResourceAttributes.SERVICE_NAMESPACE, "HttpSubscriber-env"));
assertThat(resource.getSchemaUrl()).isEqualTo(ResourceAttributes.SCHEMA_URL);
}
use of io.opentelemetry.api.common.Attributes in project opentelemetry-java by open-telemetry.
the class SdkDoubleCounterTest method collectMetrics_WithMultipleCollects.
@Test
void collectMetrics_WithMultipleCollects() {
long startTime = testClock.now();
DoubleCounter doubleCounter = sdkMeter.counterBuilder("testCounter").ofDoubles().build();
BoundDoubleCounter bound = ((SdkDoubleCounter) doubleCounter).bind(Attributes.builder().put("K", "V").build());
try {
// Do some records using bounds and direct calls and bindings.
doubleCounter.add(12.1d, Attributes.empty());
bound.add(123.3d);
doubleCounter.add(21.4d, Attributes.empty());
// Advancing time here should not matter.
testClock.advance(Duration.ofNanos(SECOND_NANOS));
bound.add(321.5d);
doubleCounter.add(111.1d, Attributes.builder().put("K", "V").build());
assertThat(sdkMeterReader.collectAllMetrics()).satisfiesExactly(metric -> assertThat(metric).hasResource(RESOURCE).hasInstrumentationLibrary(INSTRUMENTATION_LIBRARY_INFO).hasName("testCounter").hasDescription("").hasUnit("1").hasDoubleSum().isMonotonic().isCumulative().points().allSatisfy(point -> assertThat(point).hasStartEpochNanos(startTime).hasEpochNanos(testClock.now())).satisfiesExactlyInAnyOrder(point -> assertThat(point).hasAttributes(Attributes.empty()).hasValue(33.5), point -> assertThat(point).hasValue(555.9).attributes().hasSize(1).containsEntry("K", "V")));
// Repeat to prove we keep previous values.
testClock.advance(Duration.ofNanos(SECOND_NANOS));
bound.add(222d);
doubleCounter.add(11d, Attributes.empty());
assertThat(sdkMeterReader.collectAllMetrics()).satisfiesExactly(metric -> assertThat(metric).hasDoubleSum().isCumulative().points().allSatisfy(point -> assertThat(point).hasStartEpochNanos(startTime).hasEpochNanos(testClock.now())).satisfiesExactlyInAnyOrder(point -> assertThat(point).hasAttributes(Attributes.empty()).hasValue(44.5), point -> assertThat(point).hasAttributes(Attributes.of(stringKey("K"), "V")).hasValue(777.9)));
} finally {
bound.unbind();
}
}
use of io.opentelemetry.api.common.Attributes in project opentelemetry-java by open-telemetry.
the class SdkObservableDoubleCounterTest method collectMetrics_DeltaSumAggregator.
@Test
void collectMetrics_DeltaSumAggregator() {
InMemoryMetricReader sdkMeterReader = InMemoryMetricReader.createDelta();
SdkMeterProvider sdkMeterProvider = sdkMeterProviderBuilder.registerMetricReader(sdkMeterReader).registerView(InstrumentSelector.builder().setType(InstrumentType.OBSERVABLE_COUNTER).build(), View.builder().setAggregation(Aggregation.sum()).build()).build();
sdkMeterProvider.get(getClass().getName()).counterBuilder("testObserver").ofDoubles().setDescription("My own DoubleSumObserver").setUnit("ms").buildWithCallback(result -> result.record(12.1d, Attributes.builder().put("k", "v").build()));
testClock.advance(Duration.ofNanos(SECOND_NANOS));
assertThat(sdkMeterReader.collectAllMetrics()).satisfiesExactly(metric -> assertThat(metric).hasResource(RESOURCE).hasInstrumentationLibrary(INSTRUMENTATION_LIBRARY_INFO).hasName("testObserver").hasDescription("My own DoubleSumObserver").hasUnit("ms").hasDoubleSum().isDelta().isMonotonic().points().satisfiesExactlyInAnyOrder(point -> assertThat(point).hasStartEpochNanos(testClock.now() - SECOND_NANOS).hasEpochNanos(testClock.now()).hasValue(12.1).attributes().hasSize(1).containsEntry("k", "v")));
testClock.advance(Duration.ofNanos(SECOND_NANOS));
assertThat(sdkMeterReader.collectAllMetrics()).satisfiesExactly(metric -> assertThat(metric).hasResource(RESOURCE).hasInstrumentationLibrary(INSTRUMENTATION_LIBRARY_INFO).hasName("testObserver").hasDescription("My own DoubleSumObserver").hasUnit("ms").hasDoubleSum().isDelta().isMonotonic().points().satisfiesExactlyInAnyOrder(point -> assertThat(point).hasStartEpochNanos(testClock.now() - SECOND_NANOS).hasEpochNanos(testClock.now()).hasValue(0).attributes().hasSize(1).containsEntry("k", "v")));
}
use of io.opentelemetry.api.common.Attributes in project opentelemetry-java by open-telemetry.
the class DefaultSynchronousMetricStorage method collectAndReset.
@Override
public MetricData collectAndReset(CollectionInfo collectionInfo, Resource resource, InstrumentationLibraryInfo instrumentationLibraryInfo, long startEpochNanos, long epochNanos, boolean suppressSynchronousCollection) {
AggregationTemporality temporality = TemporalityUtils.resolveTemporality(collectionInfo.getPreferredAggregation());
Map<Attributes, T> result = deltaMetricStorage.collectFor(collectionInfo.getCollector(), collectionInfo.getAllCollectors(), suppressSynchronousCollection);
return temporalMetricStorage.buildMetricFor(collectionInfo.getCollector(), resource, instrumentationLibraryInfo, getMetricDescriptor(), temporality, result, startEpochNanos, epochNanos);
}
use of io.opentelemetry.api.common.Attributes in project opentelemetry-java by open-telemetry.
the class MetricAdapter method toSamples.
// Converts a list of points from MetricData to a list of Prometheus Samples.
static List<Sample> toSamples(String name, MetricDataType type, Collection<? extends PointData> points) {
List<Sample> samples = new ArrayList<>(estimateNumSamples(points.size(), type));
for (PointData pointData : points) {
Attributes attributes = pointData.getAttributes();
List<String> labelNames = new ArrayList<>(attributes.size());
List<String> labelValues = new ArrayList<>(attributes.size());
attributes.forEach((key, value) -> {
String sanitizedLabelName = sanitizer.apply(key.getKey());
labelNames.add(sanitizedLabelName);
// TODO: We want to create an error-log if there is overlap in toString of attribute
// values for the same key name.
labelValues.add(value == null ? "" : value.toString());
});
switch(type) {
case DOUBLE_SUM:
case DOUBLE_GAUGE:
DoublePointData doublePoint = (DoublePointData) pointData;
samples.add(createSample(name, labelNames, labelValues, doublePoint.getValue(), // Prometheus doesn't support exemplars on SUM/GAUGE
null, doublePoint.getEpochNanos()));
break;
case LONG_SUM:
case LONG_GAUGE:
LongPointData longPoint = (LongPointData) pointData;
samples.add(createSample(name, labelNames, labelValues, longPoint.getValue(), // Prometheus doesn't support exemplars on SUM/GAUGE
null, longPoint.getEpochNanos()));
break;
case SUMMARY:
addSummarySamples((SummaryPointData) pointData, name, labelNames, labelValues, samples);
break;
case HISTOGRAM:
addHistogramSamples((HistogramPointData) pointData, name, labelNames, labelValues, samples);
break;
case EXPONENTIAL_HISTOGRAM:
// todo
break;
}
}
return samples;
}
Aggregations