use of org.apache.gobblin.metrics.reporter.util.NoopSchemaVersionWriter in project incubator-gobblin by apache.
the class GobblinMetricsPinotFlattenerConverterTest method test.
@Test
public void test() throws Exception {
MetricReport metricReport = new MetricReport();
metricReport.setTags(ImmutableMap.of("tag", "value", "tag2", "value2"));
metricReport.setTimestamp(10L);
metricReport.setMetrics(Lists.newArrayList(new Metric("metric", 1.0), new Metric("metric2", 2.0)));
AvroSerializer<MetricReport> serializer = new AvroBinarySerializer<>(MetricReport.SCHEMA$, new NoopSchemaVersionWriter());
serializer.serializeRecord(metricReport);
Schema metricReportUtf8 = new Schema.Parser().parse(this.getClass().getClassLoader().getResourceAsStream("MetricReport.avsc"));
GenericRecord genericRecordMetric = AvroUtils.slowDeserializeGenericRecord(serializer.serializeRecord(metricReport), metricReportUtf8);
GobblinMetricsPinotFlattenerConverter converter = new GobblinMetricsPinotFlattenerConverter();
Schema outputSchema = converter.convertSchema(MetricReport.SCHEMA$, new WorkUnitState());
Iterable<GenericRecord> converted = converter.convertRecord(outputSchema, genericRecordMetric, new WorkUnitState());
List<GenericRecord> convertedList = Lists.newArrayList(converted);
Assert.assertEquals(convertedList.size(), 2);
Assert.assertEquals(Sets.newHashSet((List<Utf8>) convertedList.get(0).get("tags")), Sets.newHashSet("tag:value", "tag2:value2"));
Assert.assertEquals(convertedList.get(0).get("timestamp"), 10L);
Assert.assertEquals(convertedList.get(0).get("metricName").toString(), "metric");
Assert.assertEquals(convertedList.get(0).get("metricValue"), 1.0);
Assert.assertEquals(Sets.newHashSet((List<Utf8>) convertedList.get(1).get("tags")), Sets.newHashSet("tag:value", "tag2:value2"));
Assert.assertEquals(convertedList.get(1).get("timestamp"), 10L);
Assert.assertEquals(convertedList.get(1).get("metricName").toString(), "metric2");
Assert.assertEquals(convertedList.get(1).get("metricValue"), 2.0);
}
use of org.apache.gobblin.metrics.reporter.util.NoopSchemaVersionWriter in project incubator-gobblin by apache.
the class KafkaAvroJobMonitorTest method testWrongSchema.
@Test
public void testWrongSchema() throws Exception {
TestKafkaAvroJobMonitor monitor = new TestKafkaAvroJobMonitor(GobblinTrackingEvent.SCHEMA$, new NoopSchemaVersionWriter());
monitor.buildMetricsContextAndMetrics();
AvroSerializer<MetricReport> serializer = new AvroBinarySerializer<>(MetricReport.SCHEMA$, new NoopSchemaVersionWriter());
MetricReport event = new MetricReport(Maps.<String, String>newHashMap(), 0L, Lists.<Metric>newArrayList());
Collection<Either<JobSpec, URI>> results = monitor.parseJobSpec(serializer.serializeRecord(event));
Assert.assertEquals(results.size(), 0);
Assert.assertEquals(monitor.events.size(), 0);
Assert.assertEquals(monitor.getMessageParseFailures().getCount(), 1);
monitor.shutdownMetrics();
}
use of org.apache.gobblin.metrics.reporter.util.NoopSchemaVersionWriter in project incubator-gobblin by apache.
the class KafkaAvroJobMonitorTest method testWrongSchemaVersionWriter.
@Test
public void testWrongSchemaVersionWriter() throws Exception {
TestKafkaAvroJobMonitor monitor = new TestKafkaAvroJobMonitor(GobblinTrackingEvent.SCHEMA$, new NoopSchemaVersionWriter());
monitor.buildMetricsContextAndMetrics();
AvroSerializer<GobblinTrackingEvent> serializer = new AvroBinarySerializer<>(GobblinTrackingEvent.SCHEMA$, new FixedSchemaVersionWriter());
GobblinTrackingEvent event = new GobblinTrackingEvent(0L, "namespace", "event", Maps.<String, String>newHashMap());
Collection<Either<JobSpec, URI>> results = monitor.parseJobSpec(serializer.serializeRecord(event));
Assert.assertEquals(results.size(), 0);
Assert.assertEquals(monitor.events.size(), 0);
Assert.assertEquals(monitor.getMessageParseFailures().getCount(), 1);
monitor.shutdownMetrics();
}
use of org.apache.gobblin.metrics.reporter.util.NoopSchemaVersionWriter in project incubator-gobblin by apache.
the class SLAEventKafkaJobMonitorTest method testParseJobSpec.
@Test
public void testParseJobSpec() throws Exception {
SLAEventKafkaJobMonitor monitor = new SLAEventKafkaJobMonitor("topic", null, new URI("/base/URI"), HighLevelConsumerTest.getSimpleConfig(Optional.of(KafkaJobMonitor.KAFKA_JOB_MONITOR_PREFIX)), new NoopSchemaVersionWriter(), Optional.<Pattern>absent(), Optional.<Pattern>absent(), this.templateURI, ImmutableMap.of("metadataKey1", "key1"));
monitor.buildMetricsContextAndMetrics();
GobblinTrackingEvent event = createSLAEvent("DatasetPublish", new URI("/data/myDataset"), ImmutableMap.of("metadataKey1", "value1", "key1", "value2"));
Collection<Either<JobSpec, URI>> jobSpecs = monitor.parseJobSpec(event);
Assert.assertEquals(jobSpecs.size(), 1);
JobSpec jobSpec = (JobSpec) jobSpecs.iterator().next().get();
Assert.assertEquals(jobSpec.getUri(), new URI("/base/URI/data/myDataset"));
Assert.assertEquals(jobSpec.getTemplateURI().get(), templateURI);
// should insert configuration from metadata
Assert.assertEquals(jobSpec.getConfig().getString("key1"), "value1");
monitor.shutdownMetrics();
}
use of org.apache.gobblin.metrics.reporter.util.NoopSchemaVersionWriter in project incubator-gobblin by apache.
the class SLAEventKafkaJobMonitorTest method testFilterByName.
@Test
public void testFilterByName() throws Exception {
SLAEventKafkaJobMonitor monitor = new SLAEventKafkaJobMonitor("topic", null, new URI("/base/URI"), HighLevelConsumerTest.getSimpleConfig(Optional.of(KafkaJobMonitor.KAFKA_JOB_MONITOR_PREFIX)), new NoopSchemaVersionWriter(), Optional.<Pattern>absent(), Optional.of(Pattern.compile("^accept.*")), this.templateURI, ImmutableMap.<String, String>of());
monitor.buildMetricsContextAndMetrics();
GobblinTrackingEvent event;
Collection<Either<JobSpec, URI>> jobSpecs;
event = createSLAEvent("acceptthis", new URI("/data/myDataset"), Maps.<String, String>newHashMap());
jobSpecs = monitor.parseJobSpec(event);
Assert.assertEquals(jobSpecs.size(), 1);
Assert.assertEquals(monitor.getRejectedEvents().getCount(), 0);
event = createSLAEvent("donotacceptthis", new URI("/data/myDataset"), Maps.<String, String>newHashMap());
jobSpecs = monitor.parseJobSpec(event);
Assert.assertEquals(jobSpecs.size(), 0);
Assert.assertEquals(monitor.getRejectedEvents().getCount(), 1);
monitor.shutdownMetrics();
}
Aggregations