use of org.apache.gobblin.metrics.MetricContext in project incubator-gobblin by apache.
the class InfluxDBReporterTest method testWithoutTags.
@Test
public void testWithoutTags() throws IOException {
try (MetricContext metricContext = MetricContext.builder(this.getClass().getCanonicalName() + ".testInfluxDBReporter").build();
InfluxDBReporter influxDBReporter = InfluxDBReporter.Factory.newBuilder().withInfluxDBPusher(influxDBPusher).withMetricContextName(CONTEXT_NAME).build(new Properties())) {
ContextAwareGauge<Long> contextAwareGauge = metricContext.newContextAwareGauge("com.linkedin.example.gauge", new Gauge<Long>() {
@Override
public Long getValue() {
return 1000l;
}
});
metricContext.register(MetricRegistry.name(METRIC_PREFIX, GAUGE), contextAwareGauge);
Counter counter = metricContext.counter(MetricRegistry.name(METRIC_PREFIX, COUNTER));
Meter meter = metricContext.meter(MetricRegistry.name(METRIC_PREFIX, METER));
Histogram histogram = metricContext.histogram(MetricRegistry.name(METRIC_PREFIX, HISTOGRAM));
Timer timer = metricContext.timer(MetricRegistry.name(METRIC_PREFIX, TIMER));
counter.inc(3l);
meter.mark(1l);
meter.mark(2l);
meter.mark(3l);
histogram.update(1);
histogram.update(1);
histogram.update(2);
timer.update(1, TimeUnit.SECONDS);
timer.update(2, TimeUnit.SECONDS);
timer.update(3, TimeUnit.SECONDS);
influxDBReporter.report(metricContext.getGauges(), metricContext.getCounters(), metricContext.getHistograms(), metricContext.getMeters(), metricContext.getTimers(), metricContext.getTagMap());
// InfluxDB converts all values to float64 internally
Assert.assertEquals(getMetricValue(COUNTER, Measurements.COUNT), Float.toString(3f));
Assert.assertEquals(getMetricValue(GAUGE, null), Float.toString(1000l));
Assert.assertTrue(getMetricTimestamp(GAUGE, null) <= System.currentTimeMillis());
Assert.assertEquals(getMetricValue(HISTOGRAM, Measurements.PERCENTILE_75TH), Float.toString(2f));
Assert.assertEquals(getMetricValue(HISTOGRAM, Measurements.PERCENTILE_98TH), Float.toString(2f));
Assert.assertEquals(getMetricValue(HISTOGRAM, Measurements.PERCENTILE_99TH), Float.toString(2f));
Assert.assertEquals(getMetricValue(HISTOGRAM, Measurements.PERCENTILE_999TH), Float.toString(2f));
Assert.assertEquals(getMetricValue(HISTOGRAM, Measurements.COUNT), Float.toString(3f));
Assert.assertEquals(getMetricValue(HISTOGRAM, Measurements.MIN), Float.toString(1f));
Assert.assertEquals(getMetricValue(HISTOGRAM, Measurements.MAX), Float.toString(2f));
Assert.assertEquals(getMetricValue(HISTOGRAM, Measurements.MEDIAN), Float.toString(1f));
Assert.assertTrue(Double.valueOf(getMetricValue(HISTOGRAM, Measurements.MEAN)) > 1f);
Assert.assertTrue(Double.valueOf(getMetricValue(HISTOGRAM, Measurements.STDDEV)) < 0.5f);
Assert.assertEquals(getMetricValue(METER, Measurements.RATE_1MIN), Float.toString(0f));
Assert.assertEquals(getMetricValue(METER, Measurements.RATE_5MIN), Float.toString(0f));
Assert.assertEquals(getMetricValue(METER, Measurements.COUNT), Float.toString(6f));
Assert.assertTrue(Double.valueOf(getMetricValue(METER, Measurements.MEAN_RATE)) > 0f);
Assert.assertEquals(getMetricValue(TIMER, Measurements.RATE_1MIN), Float.toString(0f));
Assert.assertEquals(getMetricValue(TIMER, Measurements.RATE_5MIN), Float.toString(0f));
Assert.assertEquals(getMetricValue(TIMER, Measurements.PERCENTILE_75TH), Float.toString(3000f));
Assert.assertEquals(getMetricValue(TIMER, Measurements.PERCENTILE_98TH), Float.toString(3000f));
Assert.assertEquals(getMetricValue(TIMER, Measurements.PERCENTILE_99TH), Float.toString(3000f));
Assert.assertEquals(getMetricValue(TIMER, Measurements.PERCENTILE_999TH), Float.toString(3000f));
Assert.assertEquals(getMetricValue(TIMER, Measurements.COUNT), Float.toString(3f));
Assert.assertEquals(getMetricValue(TIMER, Measurements.MIN), Float.toString(1000f));
Assert.assertEquals(getMetricValue(TIMER, Measurements.MAX), Float.toString(3000f));
Assert.assertEquals(getMetricValue(TIMER, Measurements.MEAN), Float.toString(2000f));
Assert.assertEquals(getMetricValue(TIMER, Measurements.MEDIAN), Float.toString(2000f));
Assert.assertTrue(Double.valueOf(getMetricValue(TIMER, Measurements.MEAN_RATE)) > 0f);
Assert.assertTrue(Double.valueOf(getMetricValue(TIMER, Measurements.STDDEV)) > 0f);
}
}
use of org.apache.gobblin.metrics.MetricContext in project incubator-gobblin by apache.
the class GraphiteEventReporterTest method testMultiPartEvent.
@Test
public void testMultiPartEvent() throws IOException {
try (MetricContext metricContext = MetricContext.builder(this.getClass().getCanonicalName() + ".testGraphiteReporter2").build();
GraphiteEventReporter graphiteEventReporter = getBuilder(metricContext).withEmitValueAsKey(true).build()) {
Map<String, String> metadata = Maps.newHashMap();
metadata.put(JobEvent.METADATA_JOB_ID, "job2");
metadata.put(TaskEvent.METADATA_TASK_ID, "task2");
metadata.put(EventSubmitter.EVENT_TYPE, "JobStateEvent");
metadata.put(JobEvent.METADATA_JOB_START_TIME, "1457736710521");
metadata.put(JobEvent.METADATA_JOB_END_TIME, "1457736710734");
metadata.put(JobEvent.METADATA_JOB_LAUNCHED_TASKS, "3");
metadata.put(JobEvent.METADATA_JOB_COMPLETED_TASKS, "2");
metadata.put(JobEvent.METADATA_JOB_STATE, "FAILED");
metricContext.submitEvent(GobblinTrackingEvent.newBuilder().setName(MultiPartEvent.JOBSTATE_EVENT.getEventName()).setNamespace(NAMESPACE).setMetadata(metadata).build());
try {
Thread.sleep(100);
} catch (InterruptedException ex) {
Thread.currentThread().interrupt();
}
graphiteEventReporter.report();
try {
Thread.sleep(100);
} catch (InterruptedException ex) {
Thread.currentThread().interrupt();
}
String prefix = "gobblin.metrics.job2.task2.events.JobStateEvent";
Assert.assertEquals(graphiteSender.getMetric(prefix + ".jobBeginTime").getValue(), "1457736710521");
Assert.assertEquals(graphiteSender.getMetric(prefix + ".jobEndTime").getValue(), "1457736710734");
Assert.assertEquals(graphiteSender.getMetric(prefix + ".jobLaunchedTasks").getValue(), "3");
Assert.assertEquals(graphiteSender.getMetric(prefix + ".jobCompletedTasks").getValue(), "2");
Assert.assertNotNull(graphiteSender.getMetric(prefix + ".jobState.FAILED"));
}
}
use of org.apache.gobblin.metrics.MetricContext in project incubator-gobblin by apache.
the class GraphiteEventReporterTest method testSimpleEvent.
@Test
public void testSimpleEvent() throws IOException {
try (MetricContext metricContext = MetricContext.builder(this.getClass().getCanonicalName() + ".testGraphiteReporter1").build();
GraphiteEventReporter graphiteEventReporter = getBuilder(metricContext).withEmitValueAsKey(false).build()) {
Map<String, String> metadata = Maps.newHashMap();
metadata.put(JobEvent.METADATA_JOB_ID, "job1");
metadata.put(TaskEvent.METADATA_TASK_ID, "task1");
metricContext.submitEvent(GobblinTrackingEvent.newBuilder().setName(JobEvent.TASKS_SUBMITTED).setNamespace(NAMESPACE).setMetadata(metadata).build());
try {
Thread.sleep(100);
} catch (InterruptedException ex) {
Thread.currentThread().interrupt();
}
graphiteEventReporter.report();
try {
Thread.sleep(100);
} catch (InterruptedException ex) {
Thread.currentThread().interrupt();
}
TimestampedValue retrievedEvent = graphiteSender.getMetric("gobblin.metrics.job1.task1.events.TasksSubmitted");
Assert.assertEquals(retrievedEvent.getValue(), "0");
Assert.assertTrue(retrievedEvent.getTimestamp() <= (System.currentTimeMillis() / 1000l));
}
}
use of org.apache.gobblin.metrics.MetricContext in project incubator-gobblin by apache.
the class KafkaEventReporterTest method testKafkaEventReporter.
@Test
public void testKafkaEventReporter() throws IOException {
MetricContext context = MetricContext.builder("context").build();
MockKafkaPusher pusher = new MockKafkaPusher();
KafkaEventReporter kafkaReporter = getBuilder(context, pusher).build("localhost:0000", "topic");
String namespace = "gobblin.metrics.test";
String eventName = "testEvent";
GobblinTrackingEvent event = new GobblinTrackingEvent();
event.setName(eventName);
event.setNamespace(namespace);
Map<String, String> metadata = Maps.newHashMap();
metadata.put("m1", "v1");
metadata.put("m2", null);
event.setMetadata(metadata);
context.submitEvent(event);
try {
Thread.sleep(100);
} catch (InterruptedException ex) {
Thread.currentThread().interrupt();
}
kafkaReporter.report();
try {
Thread.sleep(100);
} catch (InterruptedException ex) {
Thread.currentThread().interrupt();
}
GobblinTrackingEvent retrievedEvent = nextEvent(pusher.messageIterator());
Assert.assertEquals(retrievedEvent.getNamespace(), namespace);
Assert.assertEquals(retrievedEvent.getName(), eventName);
Assert.assertEquals(retrievedEvent.getMetadata().size(), 4);
}
use of org.apache.gobblin.metrics.MetricContext in project incubator-gobblin by apache.
the class KafkaReporterTest method testKafkaReporter.
@Test
public void testKafkaReporter() throws IOException {
MetricContext metricContext = MetricContext.builder(this.getClass().getCanonicalName() + ".testKafkaReporter").build();
Counter counter = metricContext.counter("com.linkedin.example.counter");
Meter meter = metricContext.meter("com.linkedin.example.meter");
Histogram histogram = metricContext.histogram("com.linkedin.example.histogram");
MockKafkaPusher pusher = new MockKafkaPusher();
KafkaReporter kafkaReporter = getBuilder(pusher).build("localhost:0000", "topic", new Properties());
counter.inc();
meter.mark(2);
histogram.update(1);
histogram.update(1);
histogram.update(2);
kafkaReporter.report(metricContext);
try {
Thread.sleep(1000);
} catch (InterruptedException ex) {
Thread.currentThread().interrupt();
}
Map<String, Double> expected = new HashMap<>();
expected.put("com.linkedin.example.counter." + Measurements.COUNT, 1.0);
expected.put("com.linkedin.example.meter." + Measurements.COUNT, 2.0);
expected.put("com.linkedin.example.histogram." + Measurements.COUNT, 3.0);
MetricReport nextReport = nextReport(pusher.messageIterator());
expectMetricsWithValues(nextReport, expected);
kafkaReporter.report(metricContext);
try {
Thread.sleep(1000);
} catch (InterruptedException ex) {
Thread.currentThread().interrupt();
}
Set<String> expectedSet = new HashSet<>();
expectedSet.add("com.linkedin.example.counter." + Measurements.COUNT);
expectedSet.add("com.linkedin.example.meter." + Measurements.COUNT);
expectedSet.add("com.linkedin.example.meter." + Measurements.MEAN_RATE);
expectedSet.add("com.linkedin.example.meter." + Measurements.RATE_1MIN);
expectedSet.add("com.linkedin.example.meter." + Measurements.RATE_5MIN);
expectedSet.add("com.linkedin.example.meter." + Measurements.RATE_15MIN);
expectedSet.add("com.linkedin.example.histogram." + Measurements.MEAN);
expectedSet.add("com.linkedin.example.histogram." + Measurements.MIN);
expectedSet.add("com.linkedin.example.histogram." + Measurements.MAX);
expectedSet.add("com.linkedin.example.histogram." + Measurements.MEDIAN);
expectedSet.add("com.linkedin.example.histogram." + Measurements.PERCENTILE_75TH);
expectedSet.add("com.linkedin.example.histogram." + Measurements.PERCENTILE_95TH);
expectedSet.add("com.linkedin.example.histogram." + Measurements.PERCENTILE_99TH);
expectedSet.add("com.linkedin.example.histogram." + Measurements.PERCENTILE_999TH);
expectedSet.add("com.linkedin.example.histogram." + Measurements.COUNT);
nextReport = nextReport(pusher.messageIterator());
expectMetrics(nextReport, expectedSet, true);
kafkaReporter.close();
}
Aggregations