Search in sources :

Example 1 with DefaultObjectMapper

use of io.druid.jackson.DefaultObjectMapper in project druid by druid-io.

the class SketchEstimateWithErrorBoundsTest method testSerde.

@Test
public void testSerde() throws IOException {
    ObjectMapper mapper = new DefaultObjectMapper();
    SketchEstimateWithErrorBounds est = new SketchEstimateWithErrorBounds(100.0, 101.5, 98.5, 2);
    Assert.assertEquals(est, mapper.readValue(mapper.writeValueAsString(est), SketchEstimateWithErrorBounds.class));
}
Also used : DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) Test(org.junit.Test)

Example 2 with DefaultObjectMapper

use of io.druid.jackson.DefaultObjectMapper in project druid by druid-io.

the class AuthenticationKerberosConfigTest method testserde.

@Test
public void testserde() {
    Injector injector = Guice.createInjector(new Module() {

        @Override
        public void configure(Binder binder) {
            binder.install(new PropertiesModule(Arrays.asList("test.runtime.properties")));
            binder.install(new ConfigModule());
            binder.install(new DruidGuiceExtensions());
            JsonConfigProvider.bind(binder, "druid.hadoop.security.kerberos", AuthenticationKerberosConfig.class);
        }

        @Provides
        @LazySingleton
        public ObjectMapper jsonMapper() {
            return new DefaultObjectMapper();
        }
    });
    Properties props = injector.getInstance(Properties.class);
    AuthenticationKerberosConfig config = injector.getInstance(AuthenticationKerberosConfig.class);
    Assert.assertEquals(props.getProperty("druid.hadoop.security.kerberos.principal"), config.getPrincipal());
    Assert.assertEquals(props.getProperty("druid.hadoop.security.kerberos.keytab"), config.getKeytab());
}
Also used : DruidGuiceExtensions(io.druid.guice.DruidGuiceExtensions) ConfigModule(io.druid.guice.ConfigModule) Provides(com.google.inject.Provides) Properties(java.util.Properties) Binder(com.google.inject.Binder) LazySingleton(io.druid.guice.LazySingleton) Injector(com.google.inject.Injector) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) Module(com.google.inject.Module) PropertiesModule(io.druid.guice.PropertiesModule) ConfigModule(io.druid.guice.ConfigModule) PropertiesModule(io.druid.guice.PropertiesModule) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) Test(org.junit.Test)

Example 3 with DefaultObjectMapper

use of io.druid.jackson.DefaultObjectMapper in project druid by druid-io.

the class HdfsDataSegmentPusherTest method testUsingScheme.

private void testUsingScheme(final String scheme) throws Exception {
    Configuration conf = new Configuration(true);
    // Create a mock segment on disk
    File segmentDir = tempFolder.newFolder();
    File tmp = new File(segmentDir, "version.bin");
    final byte[] data = new byte[] { 0x0, 0x0, 0x0, 0x1 };
    Files.write(data, tmp);
    final long size = data.length;
    HdfsDataSegmentPusherConfig config = new HdfsDataSegmentPusherConfig();
    final File storageDirectory = tempFolder.newFolder();
    config.setStorageDirectory(scheme != null ? String.format("%s://%s", scheme, storageDirectory.getAbsolutePath()) : storageDirectory.getAbsolutePath());
    HdfsDataSegmentPusher pusher = new HdfsDataSegmentPusher(config, conf, new DefaultObjectMapper());
    DataSegment segmentToPush = new DataSegment("foo", new Interval("2015/2016"), "0", Maps.<String, Object>newHashMap(), Lists.<String>newArrayList(), Lists.<String>newArrayList(), NoneShardSpec.instance(), 0, size);
    DataSegment segment = pusher.push(segmentDir, segmentToPush);
    String indexUri = String.format("%s/%s/%d_index.zip", FileSystem.newInstance(conf).makeQualified(new Path(config.getStorageDirectory())).toUri().toString(), DataSegmentPusherUtil.getHdfsStorageDir(segmentToPush), segmentToPush.getShardSpec().getPartitionNum());
    Assert.assertEquals(segmentToPush.getSize(), segment.getSize());
    Assert.assertEquals(segmentToPush, segment);
    Assert.assertEquals(ImmutableMap.of("type", "hdfs", "path", indexUri), segment.getLoadSpec());
    // rename directory after push
    final String segmentPath = DataSegmentPusherUtil.getHdfsStorageDir(segment);
    File indexFile = new File(String.format("%s/%s/%d_index.zip", storageDirectory, segmentPath, segment.getShardSpec().getPartitionNum()));
    Assert.assertTrue(indexFile.exists());
    File descriptorFile = new File(String.format("%s/%s/%d_descriptor.json", storageDirectory, segmentPath, segment.getShardSpec().getPartitionNum()));
    Assert.assertTrue(descriptorFile.exists());
    // push twice will fail and temp dir cleaned
    File outDir = new File(String.format("%s/%s", config.getStorageDirectory(), segmentPath));
    outDir.setReadOnly();
    try {
        pusher.push(segmentDir, segmentToPush);
    } catch (IOException e) {
        Assert.fail("should not throw exception");
    }
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) IOException(java.io.IOException) File(java.io.File) DataSegment(io.druid.timeline.DataSegment) Interval(org.joda.time.Interval)

Example 4 with DefaultObjectMapper

use of io.druid.jackson.DefaultObjectMapper in project druid by druid-io.

the class InlineSchemaAvroBytesDecoderTest method testSerde.

@Test
public void testSerde() throws Exception {
    String jsonStr = "{\n" + "  \"type\": \"schema_inline\",\n" + "  \"schema\": {\n" + "    \"namespace\": \"io.druid.data.input\",\n" + "   \"name\": \"SomeData\",\n" + "    \"type\": \"record\",\n" + "    \"fields\" : [\n" + "      {\"name\":\"timestamp\",\"type\":\"long\"},\n" + "      {\"name\":\"eventType\",\"type\":\"string\"},\n" + "      {\"name\":\"id\",\"type\":\"long\"}\n" + "    ]\n" + "  }\n" + "}";
    final ObjectMapper mapper = new DefaultObjectMapper();
    mapper.setInjectableValues(new InjectableValues.Std().addValue(ObjectMapper.class, mapper));
    InlineSchemaAvroBytesDecoder actual = (InlineSchemaAvroBytesDecoder) mapper.readValue(mapper.writeValueAsString(mapper.readValue(jsonStr, AvroBytesDecoder.class)), AvroBytesDecoder.class);
    Assert.assertEquals(actual.getSchema().get("name"), "SomeData");
}
Also used : DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) Test(org.junit.Test) AvroStreamInputRowParserTest(io.druid.data.input.AvroStreamInputRowParserTest)

Example 5 with DefaultObjectMapper

use of io.druid.jackson.DefaultObjectMapper in project druid by druid-io.

the class InlineSchemasAvroBytesDecoderTest method testSerde.

@Test
public void testSerde() throws Exception {
    String jsonStr = "{\n" + "  \"type\": \"multiple_schemas_inline\",\n" + "  \"schemas\": {\n" + "    \"5\": {\n" + "      \"namespace\": \"io.druid.data.input\",\n" + "      \"name\": \"name5\",\n" + "      \"type\": \"record\",\n" + "      \"fields\" : [\n" + "        {\"name\":\"eventType\",\"type\":\"string\"},\n" + "        {\"name\":\"id\",\"type\":\"long\"}\n" + "      ]\n" + "    },\n" + "    \"8\": {\n" + "      \"namespace\": \"io.druid.data.input\",\n" + "      \"name\": \"name8\",\n" + "      \"type\": \"record\",\n" + "      \"fields\" : [\n" + "       {\"name\":\"eventType\",\"type\":\"string\"},\n" + "       {\"name\":\"id\",\"type\":\"long\"}\n" + "      ]\n" + "    }\n" + "  }\n" + "}\n";
    final ObjectMapper mapper = new DefaultObjectMapper();
    mapper.setInjectableValues(new InjectableValues.Std().addValue(ObjectMapper.class, mapper));
    InlineSchemasAvroBytesDecoder actual = (InlineSchemasAvroBytesDecoder) mapper.readValue(mapper.writeValueAsString(mapper.readValue(jsonStr, AvroBytesDecoder.class)), AvroBytesDecoder.class);
    Assert.assertEquals(actual.getSchemas().get("5").get("name"), "name5");
    Assert.assertEquals(actual.getSchemas().get("8").get("name"), "name8");
}
Also used : DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) Test(org.junit.Test) AvroStreamInputRowParserTest(io.druid.data.input.AvroStreamInputRowParserTest)

Aggregations

DefaultObjectMapper (io.druid.jackson.DefaultObjectMapper)164 Test (org.junit.Test)133 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)112 Interval (org.joda.time.Interval)24 DateTime (org.joda.time.DateTime)17 DataSegment (io.druid.timeline.DataSegment)16 File (java.io.File)16 AggregatorFactory (io.druid.query.aggregation.AggregatorFactory)14 Before (org.junit.Before)13 Map (java.util.Map)11 Period (org.joda.time.Period)11 Query (io.druid.query.Query)9 Result (io.druid.query.Result)9 CountAggregatorFactory (io.druid.query.aggregation.CountAggregatorFactory)9 IOException (java.io.IOException)9 DataSchema (io.druid.segment.indexing.DataSchema)8 ImmutableMap (com.google.common.collect.ImmutableMap)7 Sequence (io.druid.java.util.common.guava.Sequence)7 UniformGranularitySpec (io.druid.segment.indexing.granularity.UniformGranularitySpec)7 List (java.util.List)7