use of io.opentelemetry.sdk.metrics.data.LongPointData in project opentelemetry-plugin by jenkinsci.
the class JenkinsOtelPluginIntegrationTest method testMetricsWithDiskUsagePlugin.
@Ignore("Lifecycle problem, the InMemoryMetricExporter gets reset too much and the disk usage is not captured")
@Test
@WithPlugin("cloudbees-disk-usage-simple")
public void testMetricsWithDiskUsagePlugin() throws Exception {
LOGGER.log(Level.INFO, "testMetricsWithDiskUsagePlugin...");
// WORKAROUND because we don't know how to force the IntervalMetricReader to collect metrics
// FIXME
Thread.sleep(100);
LOGGER.log(Level.INFO, "slept");
openTelemetrySdkProvider.getOpenTelemetrySdk().getSdkMeterProvider().forceFlush();
LOGGER.log(Level.INFO, "InMemoryMetricExporterProvider.LAST_CREATED_INSTANCE: " + InMemoryMetricExporterProvider.LAST_CREATED_INSTANCE);
Map<String, MetricData> exportedMetrics = InMemoryMetricExporterUtils.getLastExportedMetricByMetricName(InMemoryMetricExporterProvider.LAST_CREATED_INSTANCE.getFinishedMetricItems());
dumpMetrics(exportedMetrics);
MetricData diskUsageData = exportedMetrics.get(JenkinsSemanticMetrics.JENKINS_DISK_USAGE_BYTES);
MatcherAssert.assertThat(diskUsageData, CoreMatchers.notNullValue());
// TODO TEST METRICS WITH PROPER RESET BETWEEN TESTS
MatcherAssert.assertThat(diskUsageData.getType(), CoreMatchers.is(MetricDataType.LONG_GAUGE));
Collection<LongPointData> metricPoints = diskUsageData.getLongGaugeData().getPoints();
MatcherAssert.assertThat(Iterables.getLast(metricPoints).getValue(), CoreMatchers.notNullValue());
}
use of io.opentelemetry.sdk.metrics.data.LongPointData in project opentelemetry-plugin by jenkinsci.
the class JenkinsOtelPluginIntegrationTest method testSimplePipeline.
@Test
public void testSimplePipeline() throws Exception {
assumeFalse(SystemUtils.IS_OS_WINDOWS);
// BEFORE
String pipelineScript = "def xsh(cmd) {if (isUnix()) {sh cmd} else {bat cmd}};\n" + "node() {\n" + " stage('ze-stage1') {\n" + " xsh (label: 'shell-1', script: 'echo ze-echo-1') \n" + " }\n" + " stage('ze-stage2') {\n" + " xsh (label: 'shell-2', script: 'echo ze-echo-2') \n" + " }\n" + "}";
final Node agent = jenkinsRule.createOnlineSlave();
final String jobName = "test-simple-pipeline-" + jobNameSuffix.incrementAndGet();
WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, jobName);
pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true));
WorkflowRun build = jenkinsRule.assertBuildStatus(Result.SUCCESS, pipeline.scheduleBuild2(0));
String rootSpanName = JenkinsOtelSemanticAttributes.CI_PIPELINE_RUN_ROOT_SPAN_NAME_PREFIX + jobName;
final Tree<SpanDataWrapper> spans = getGeneratedSpans();
checkChainOfSpans(spans, "Phase: Start", rootSpanName);
checkChainOfSpans(spans, JenkinsOtelSemanticAttributes.AGENT_ALLOCATION_UI, JenkinsOtelSemanticAttributes.AGENT_UI, "Phase: Run", rootSpanName);
checkChainOfSpans(spans, "shell-1", "Stage: ze-stage1", JenkinsOtelSemanticAttributes.AGENT_UI, "Phase: Run", rootSpanName);
checkChainOfSpans(spans, "shell-2", "Stage: ze-stage2", JenkinsOtelSemanticAttributes.AGENT_UI, "Phase: Run", rootSpanName);
checkChainOfSpans(spans, "Phase: Finalise", rootSpanName);
MatcherAssert.assertThat(spans.cardinality(), CoreMatchers.is(10L));
// WORKAROUND because we don't know how to force the IntervalMetricReader to collect metrics
openTelemetrySdkProvider.getOpenTelemetrySdk().getSdkMeterProvider().forceFlush();
Map<String, MetricData> exportedMetrics = InMemoryMetricExporterUtils.getLastExportedMetricByMetricName(InMemoryMetricExporterProvider.LAST_CREATED_INSTANCE.getFinishedMetricItems());
dumpMetrics(exportedMetrics);
MetricData runStartedCounterData = exportedMetrics.get(JenkinsSemanticMetrics.CI_PIPELINE_RUN_STARTED);
MatcherAssert.assertThat(runStartedCounterData, CoreMatchers.notNullValue());
// TODO TEST METRICS WITH PROPER RESET BETWEEN TESTS
MatcherAssert.assertThat(runStartedCounterData.getType(), CoreMatchers.is(MetricDataType.LONG_SUM));
Collection<LongPointData> metricPoints = runStartedCounterData.getLongSumData().getPoints();
// MatcherAssert.assertThat(Iterables.getLast(metricPoints).getValue(), CoreMatchers.is(1L));
// we dont test the metric CI_PIPELINE_RUN_COMPLETED because there is flakiness on it
}
use of io.opentelemetry.sdk.metrics.data.LongPointData in project opentelemetry-java by open-telemetry.
the class SdkMeterProviderTest method removeAsyncInstrument.
@Test
void removeAsyncInstrument() {
InMemoryMetricReader reader = InMemoryMetricReader.create();
Meter meter = sdkMeterProviderBuilder.registerMetricReader(reader).build().get(getClass().getName());
ObservableLongCounter observableCounter1 = meter.counterBuilder("foo").buildWithCallback(measurement -> measurement.record(10, Attributes.builder().put("callback", "one").build()));
ObservableLongCounter observableCounter2 = meter.counterBuilder("foo").buildWithCallback(measurement -> measurement.record(10, Attributes.builder().put("callback", "two").build()));
assertThat(reader.collectAllMetrics()).hasSize(1).satisfiesExactly(metricData -> assertThat(metricData).hasLongSum().points().hasSize(2).satisfiesExactlyInAnyOrder(pointData -> assertThat(pointData).hasAttributes(Attributes.builder().put("callback", "one").build()), (Consumer<LongPointData>) longPointData -> assertThat(longPointData).hasAttributes(Attributes.builder().put("callback", "two").build())));
observableCounter1.close();
assertThat(reader.collectAllMetrics()).hasSize(1).satisfiesExactly(metricData -> assertThat(metricData).hasLongSum().points().hasSize(1).satisfiesExactlyInAnyOrder((Consumer<LongPointData>) longPointData -> assertThat(longPointData).hasAttributes(Attributes.builder().put("callback", "two").build())));
observableCounter2.close();
assertThat(reader.collectAllMetrics()).hasSize(0);
}
use of io.opentelemetry.sdk.metrics.data.LongPointData in project opentelemetry-java by open-telemetry.
the class MetricAdapter method convertLongPoints.
static Collection<LongPointData> convertLongPoints(Metric censusMetric) {
// TODO - preallocate array to correct size.
List<LongPointData> result = new ArrayList<>();
for (TimeSeries ts : censusMetric.getTimeSeriesList()) {
long startTimestamp = mapTimestamp(ts.getStartTimestamp());
Attributes attributes = mapAttributes(censusMetric.getMetricDescriptor().getLabelKeys(), ts.getLabelValues());
for (Point point : ts.getPoints()) {
result.add(ImmutableLongPointData.create(startTimestamp, mapTimestamp(point.getTimestamp()), attributes, longValue(point)));
}
}
return result;
}
Aggregations