use of io.druid.jackson.DefaultObjectMapper in project druid by druid-io.
the class KafkaIndexTaskTest method setUp.
@Before
public void setUp() throws Exception {
emitter = new ServiceEmitter("service", "host", new LoggingEmitter(log, LoggingEmitter.Level.ERROR, new DefaultObjectMapper()));
emitter.start();
EmittingLogger.registerEmitter(emitter);
makeToolboxFactory();
zkServer = new TestingCluster(1);
zkServer.start();
kafkaServer = new TestBroker(zkServer.getConnectString(), tempFolder.newFolder(), 1, ImmutableMap.of("num.partitions", "2"));
kafkaServer.start();
taskExec = MoreExecutors.listeningDecorator(Executors.newCachedThreadPool(Execs.makeThreadFactory("kafka-task-test-%d")));
handoffConditionTimeout = 0;
reportParseExceptions = false;
doHandoff = true;
}
use of io.druid.jackson.DefaultObjectMapper in project druid by druid-io.
the class NamespacedExtractorModuleTest method testListNamespaces.
@Test
public void testListNamespaces() throws Exception {
final File tmpFile = temporaryFolder.newFile();
try (OutputStreamWriter out = new FileWriter(tmpFile)) {
out.write(mapper.writeValueAsString(ImmutableMap.<String, String>of("foo", "bar")));
}
final URIExtractionNamespace namespace = new URIExtractionNamespace(tmpFile.toURI(), null, null, new URIExtractionNamespace.ObjectMapperFlatDataParser(URIExtractionNamespaceTest.registerTypes(new DefaultObjectMapper())), new Period(0), null);
try (CacheScheduler.Entry entry = scheduler.scheduleAndWait(namespace, 1_000)) {
Assert.assertNotNull(entry);
entry.awaitTotalUpdates(1);
Assert.assertEquals(1, scheduler.getActiveEntries());
}
}
use of io.druid.jackson.DefaultObjectMapper in project druid by druid-io.
the class NamespacedExtractorModuleTest method testDeleteNamespaces.
//(timeout = 10_000)
@Test
public void testDeleteNamespaces() throws Exception {
final File tmpFile = temporaryFolder.newFile();
try (OutputStreamWriter out = new FileWriter(tmpFile)) {
out.write(mapper.writeValueAsString(ImmutableMap.<String, String>of("foo", "bar")));
}
final URIExtractionNamespace namespace = new URIExtractionNamespace(tmpFile.toURI(), null, null, new URIExtractionNamespace.ObjectMapperFlatDataParser(URIExtractionNamespaceTest.registerTypes(new DefaultObjectMapper())), new Period(0), null);
try (CacheScheduler.Entry entry = scheduler.scheduleAndWait(namespace, 1_000)) {
Assert.assertNotNull(entry);
}
}
use of io.druid.jackson.DefaultObjectMapper in project druid by druid-io.
the class URIExtractionNamespaceTest method testExplicitJson.
@Test
public void testExplicitJson() throws IOException {
final ObjectMapper mapper = registerTypes(new DefaultObjectMapper());
URIExtractionNamespace namespace = mapper.readValue("{\"type\":\"uri\", \"uri\":\"file:/foo\", \"namespaceParseSpec\":{\"format\":\"simpleJson\"}, \"pollPeriod\":\"PT5M\"}", URIExtractionNamespace.class);
Assert.assertEquals(URIExtractionNamespace.ObjectMapperFlatDataParser.class.getCanonicalName(), namespace.getNamespaceParseSpec().getClass().getCanonicalName());
Assert.assertEquals("file:/foo", namespace.getUri().toString());
Assert.assertEquals(5L * 60_000L, namespace.getPollMs());
}
use of io.druid.jackson.DefaultObjectMapper in project druid by druid-io.
the class DatasourceRecordReaderTest method testSanity.
@Test
public void testSanity() throws Exception {
DataSegment segment = new DefaultObjectMapper().readValue(this.getClass().getClassLoader().getResource("test-segment/descriptor.json"), DataSegment.class).withLoadSpec(ImmutableMap.<String, Object>of("type", "local", "path", this.getClass().getClassLoader().getResource("test-segment/index.zip").getPath()));
InputSplit split = new DatasourceInputSplit(Lists.newArrayList(WindowedDataSegment.of(segment)), null);
Configuration config = new Configuration();
config.set(DatasourceInputFormat.CONF_DRUID_SCHEMA, HadoopDruidIndexerConfig.JSON_MAPPER.writeValueAsString(new DatasourceIngestionSpec(segment.getDataSource(), segment.getInterval(), null, null, null, null, segment.getDimensions(), segment.getMetrics(), false)));
TaskAttemptContext context = EasyMock.createNiceMock(TaskAttemptContext.class);
EasyMock.expect(context.getConfiguration()).andReturn(config).anyTimes();
EasyMock.replay(context);
DatasourceRecordReader rr = new DatasourceRecordReader();
rr.initialize(split, context);
Assert.assertEquals(0, rr.getProgress(), 0.0001);
List<InputRow> rows = Lists.newArrayList();
while (rr.nextKeyValue()) {
rows.add(rr.getCurrentValue());
}
verifyRows(rows);
Assert.assertEquals(1, rr.getProgress(), 0.0001);
rr.close();
}
Aggregations