use of io.druid.jackson.DefaultObjectMapper in project druid by druid-io.
the class SketchEstimateWithErrorBoundsTest method testSerde.
@Test
public void testSerde() throws IOException {
ObjectMapper mapper = new DefaultObjectMapper();
SketchEstimateWithErrorBounds est = new SketchEstimateWithErrorBounds(100.0, 101.5, 98.5, 2);
Assert.assertEquals(est, mapper.readValue(mapper.writeValueAsString(est), SketchEstimateWithErrorBounds.class));
}
use of io.druid.jackson.DefaultObjectMapper in project druid by druid-io.
the class AuthenticationKerberosConfigTest method testserde.
@Test
public void testserde() {
Injector injector = Guice.createInjector(new Module() {
@Override
public void configure(Binder binder) {
binder.install(new PropertiesModule(Arrays.asList("test.runtime.properties")));
binder.install(new ConfigModule());
binder.install(new DruidGuiceExtensions());
JsonConfigProvider.bind(binder, "druid.hadoop.security.kerberos", AuthenticationKerberosConfig.class);
}
@Provides
@LazySingleton
public ObjectMapper jsonMapper() {
return new DefaultObjectMapper();
}
});
Properties props = injector.getInstance(Properties.class);
AuthenticationKerberosConfig config = injector.getInstance(AuthenticationKerberosConfig.class);
Assert.assertEquals(props.getProperty("druid.hadoop.security.kerberos.principal"), config.getPrincipal());
Assert.assertEquals(props.getProperty("druid.hadoop.security.kerberos.keytab"), config.getKeytab());
}
use of io.druid.jackson.DefaultObjectMapper in project druid by druid-io.
the class HdfsDataSegmentPusherTest method testUsingScheme.
private void testUsingScheme(final String scheme) throws Exception {
Configuration conf = new Configuration(true);
// Create a mock segment on disk
File segmentDir = tempFolder.newFolder();
File tmp = new File(segmentDir, "version.bin");
final byte[] data = new byte[] { 0x0, 0x0, 0x0, 0x1 };
Files.write(data, tmp);
final long size = data.length;
HdfsDataSegmentPusherConfig config = new HdfsDataSegmentPusherConfig();
final File storageDirectory = tempFolder.newFolder();
config.setStorageDirectory(scheme != null ? String.format("%s://%s", scheme, storageDirectory.getAbsolutePath()) : storageDirectory.getAbsolutePath());
HdfsDataSegmentPusher pusher = new HdfsDataSegmentPusher(config, conf, new DefaultObjectMapper());
DataSegment segmentToPush = new DataSegment("foo", new Interval("2015/2016"), "0", Maps.<String, Object>newHashMap(), Lists.<String>newArrayList(), Lists.<String>newArrayList(), NoneShardSpec.instance(), 0, size);
DataSegment segment = pusher.push(segmentDir, segmentToPush);
String indexUri = String.format("%s/%s/%d_index.zip", FileSystem.newInstance(conf).makeQualified(new Path(config.getStorageDirectory())).toUri().toString(), DataSegmentPusherUtil.getHdfsStorageDir(segmentToPush), segmentToPush.getShardSpec().getPartitionNum());
Assert.assertEquals(segmentToPush.getSize(), segment.getSize());
Assert.assertEquals(segmentToPush, segment);
Assert.assertEquals(ImmutableMap.of("type", "hdfs", "path", indexUri), segment.getLoadSpec());
// rename directory after push
final String segmentPath = DataSegmentPusherUtil.getHdfsStorageDir(segment);
File indexFile = new File(String.format("%s/%s/%d_index.zip", storageDirectory, segmentPath, segment.getShardSpec().getPartitionNum()));
Assert.assertTrue(indexFile.exists());
File descriptorFile = new File(String.format("%s/%s/%d_descriptor.json", storageDirectory, segmentPath, segment.getShardSpec().getPartitionNum()));
Assert.assertTrue(descriptorFile.exists());
// push twice will fail and temp dir cleaned
File outDir = new File(String.format("%s/%s", config.getStorageDirectory(), segmentPath));
outDir.setReadOnly();
try {
pusher.push(segmentDir, segmentToPush);
} catch (IOException e) {
Assert.fail("should not throw exception");
}
}
use of io.druid.jackson.DefaultObjectMapper in project druid by druid-io.
the class InlineSchemaAvroBytesDecoderTest method testSerde.
@Test
public void testSerde() throws Exception {
String jsonStr = "{\n" + " \"type\": \"schema_inline\",\n" + " \"schema\": {\n" + " \"namespace\": \"io.druid.data.input\",\n" + " \"name\": \"SomeData\",\n" + " \"type\": \"record\",\n" + " \"fields\" : [\n" + " {\"name\":\"timestamp\",\"type\":\"long\"},\n" + " {\"name\":\"eventType\",\"type\":\"string\"},\n" + " {\"name\":\"id\",\"type\":\"long\"}\n" + " ]\n" + " }\n" + "}";
final ObjectMapper mapper = new DefaultObjectMapper();
mapper.setInjectableValues(new InjectableValues.Std().addValue(ObjectMapper.class, mapper));
InlineSchemaAvroBytesDecoder actual = (InlineSchemaAvroBytesDecoder) mapper.readValue(mapper.writeValueAsString(mapper.readValue(jsonStr, AvroBytesDecoder.class)), AvroBytesDecoder.class);
Assert.assertEquals(actual.getSchema().get("name"), "SomeData");
}
use of io.druid.jackson.DefaultObjectMapper in project druid by druid-io.
the class InlineSchemasAvroBytesDecoderTest method testSerde.
@Test
public void testSerde() throws Exception {
String jsonStr = "{\n" + " \"type\": \"multiple_schemas_inline\",\n" + " \"schemas\": {\n" + " \"5\": {\n" + " \"namespace\": \"io.druid.data.input\",\n" + " \"name\": \"name5\",\n" + " \"type\": \"record\",\n" + " \"fields\" : [\n" + " {\"name\":\"eventType\",\"type\":\"string\"},\n" + " {\"name\":\"id\",\"type\":\"long\"}\n" + " ]\n" + " },\n" + " \"8\": {\n" + " \"namespace\": \"io.druid.data.input\",\n" + " \"name\": \"name8\",\n" + " \"type\": \"record\",\n" + " \"fields\" : [\n" + " {\"name\":\"eventType\",\"type\":\"string\"},\n" + " {\"name\":\"id\",\"type\":\"long\"}\n" + " ]\n" + " }\n" + " }\n" + "}\n";
final ObjectMapper mapper = new DefaultObjectMapper();
mapper.setInjectableValues(new InjectableValues.Std().addValue(ObjectMapper.class, mapper));
InlineSchemasAvroBytesDecoder actual = (InlineSchemasAvroBytesDecoder) mapper.readValue(mapper.writeValueAsString(mapper.readValue(jsonStr, AvroBytesDecoder.class)), AvroBytesDecoder.class);
Assert.assertEquals(actual.getSchemas().get("5").get("name"), "name5");
Assert.assertEquals(actual.getSchemas().get("8").get("name"), "name8");
}
Aggregations