Search in sources :

Example 76 with DefaultObjectMapper

use of io.druid.jackson.DefaultObjectMapper in project druid by druid-io.

the class DatasourcePathSpecTest method testAddInputPaths.

@Test
public void testAddInputPaths() throws Exception {
    HadoopDruidIndexerConfig hadoopIndexerConfig = makeHadoopDruidIndexerConfig();
    ObjectMapper mapper = new DefaultObjectMapper();
    DatasourcePathSpec pathSpec = new DatasourcePathSpec(mapper, segments, ingestionSpec, null);
    Configuration config = new Configuration();
    Job job = EasyMock.createNiceMock(Job.class);
    EasyMock.expect(job.getConfiguration()).andReturn(config).anyTimes();
    EasyMock.replay(job);
    pathSpec.addInputPaths(hadoopIndexerConfig, job);
    List<WindowedDataSegment> actualSegments = mapper.readValue(config.get(DatasourceInputFormat.CONF_INPUT_SEGMENTS), new TypeReference<List<WindowedDataSegment>>() {
    });
    Assert.assertEquals(segments, actualSegments);
    DatasourceIngestionSpec actualIngestionSpec = mapper.readValue(config.get(DatasourceInputFormat.CONF_DRUID_SCHEMA), DatasourceIngestionSpec.class);
    Assert.assertEquals(ingestionSpec.withDimensions(ImmutableList.of("product")).withMetrics(ImmutableList.of("visited_sum")), actualIngestionSpec);
}
Also used : DatasourceIngestionSpec(io.druid.indexer.hadoop.DatasourceIngestionSpec) WindowedDataSegment(io.druid.indexer.hadoop.WindowedDataSegment) Configuration(org.apache.hadoop.conf.Configuration) ImmutableList(com.google.common.collect.ImmutableList) List(java.util.List) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) HadoopDruidIndexerConfig(io.druid.indexer.HadoopDruidIndexerConfig) Job(org.apache.hadoop.mapreduce.Job) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) Test(org.junit.Test)

Example 77 with DefaultObjectMapper

use of io.druid.jackson.DefaultObjectMapper in project hive by apache.

the class TestDruidSerDe method deserializeQueryResults.

private static void deserializeQueryResults(DruidSerDe serDe, String queryType, String jsonQuery, String resultString, Object[][] records) throws SerDeException, JsonParseException, JsonMappingException, IOException, NoSuchFieldException, SecurityException, IllegalArgumentException, IllegalAccessException, InterruptedException, NoSuchMethodException, InvocationTargetException {
    // Initialize
    Query<?> query = null;
    DruidQueryRecordReader<?, ?> reader = null;
    List<?> resultsList = null;
    ObjectMapper mapper = new DefaultObjectMapper();
    switch(queryType) {
        case Query.TIMESERIES:
            query = mapper.readValue(jsonQuery, TimeseriesQuery.class);
            reader = new DruidTimeseriesQueryRecordReader();
            resultsList = mapper.readValue(resultString, new TypeReference<List<Result<TimeseriesResultValue>>>() {
            });
            break;
        case Query.TOPN:
            query = mapper.readValue(jsonQuery, TopNQuery.class);
            reader = new DruidTopNQueryRecordReader();
            resultsList = mapper.readValue(resultString, new TypeReference<List<Result<TopNResultValue>>>() {
            });
            break;
        case Query.GROUP_BY:
            query = mapper.readValue(jsonQuery, GroupByQuery.class);
            reader = new DruidGroupByQueryRecordReader();
            resultsList = mapper.readValue(resultString, new TypeReference<List<Row>>() {
            });
            break;
        case Query.SELECT:
            query = mapper.readValue(jsonQuery, SelectQuery.class);
            reader = new DruidSelectQueryRecordReader();
            resultsList = mapper.readValue(resultString, new TypeReference<List<Result<SelectResultValue>>>() {
            });
            break;
    }
    // Set query and fields access
    Field field1 = DruidQueryRecordReader.class.getDeclaredField("query");
    field1.setAccessible(true);
    field1.set(reader, query);
    if (reader instanceof DruidGroupByQueryRecordReader) {
        Method method1 = DruidGroupByQueryRecordReader.class.getDeclaredMethod("initExtractors");
        method1.setAccessible(true);
        method1.invoke(reader);
    }
    Field field2 = DruidQueryRecordReader.class.getDeclaredField("results");
    field2.setAccessible(true);
    // Get the row structure
    StructObjectInspector oi = (StructObjectInspector) serDe.getObjectInspector();
    List<? extends StructField> fieldRefs = oi.getAllStructFieldRefs();
    // Check mapred
    Iterator<?> results = resultsList.iterator();
    field2.set(reader, results);
    DruidWritable writable = new DruidWritable();
    int pos = 0;
    while (reader.next(NullWritable.get(), writable)) {
        Object row = serDe.deserialize(writable);
        Object[] expectedFieldsData = records[pos];
        assertEquals(expectedFieldsData.length, fieldRefs.size());
        for (int i = 0; i < fieldRefs.size(); i++) {
            Object fieldData = oi.getStructFieldData(row, fieldRefs.get(i));
            assertEquals("Field " + i, expectedFieldsData[i], fieldData);
        }
        pos++;
    }
    assertEquals(pos, records.length);
    // Check mapreduce
    results = resultsList.iterator();
    field2.set(reader, results);
    pos = 0;
    while (reader.nextKeyValue()) {
        Object row = serDe.deserialize(reader.getCurrentValue());
        Object[] expectedFieldsData = records[pos];
        assertEquals(expectedFieldsData.length, fieldRefs.size());
        for (int i = 0; i < fieldRefs.size(); i++) {
            Object fieldData = oi.getStructFieldData(row, fieldRefs.get(i));
            assertEquals("Field " + i, expectedFieldsData[i], fieldData);
        }
        pos++;
    }
    assertEquals(pos, records.length);
}
Also used : Result(io.druid.query.Result) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) Field(java.lang.reflect.Field) GroupByQuery(io.druid.query.groupby.GroupByQuery) TopNQuery(io.druid.query.topn.TopNQuery) TypeReference(com.fasterxml.jackson.core.type.TypeReference) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) DruidTopNQueryRecordReader(org.apache.hadoop.hive.druid.serde.DruidTopNQueryRecordReader) TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) DruidSelectQueryRecordReader(org.apache.hadoop.hive.druid.serde.DruidSelectQueryRecordReader) Method(java.lang.reflect.Method) SelectQuery(io.druid.query.select.SelectQuery) DruidWritable(org.apache.hadoop.hive.druid.serde.DruidWritable) DruidTimeseriesQueryRecordReader(org.apache.hadoop.hive.druid.serde.DruidTimeseriesQueryRecordReader) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) DruidGroupByQueryRecordReader(org.apache.hadoop.hive.druid.serde.DruidGroupByQueryRecordReader) Row(io.druid.data.input.Row) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 78 with DefaultObjectMapper

use of io.druid.jackson.DefaultObjectMapper in project druid by druid-io.

the class SpnegoFilterConfigTest method testserde.

@Test
public void testserde() {
    Injector injector = Guice.createInjector(new Module() {

        @Override
        public void configure(Binder binder) {
            binder.install(new PropertiesModule(Arrays.asList("test.runtime.properties")));
            binder.install(new ConfigModule());
            binder.install(new DruidGuiceExtensions());
            JsonConfigProvider.bind(binder, "druid.hadoop.security.spnego", SpnegoFilterConfig.class);
        }

        @Provides
        @LazySingleton
        public ObjectMapper jsonMapper() {
            return new DefaultObjectMapper();
        }
    });
    Properties props = injector.getInstance(Properties.class);
    SpnegoFilterConfig config = injector.getInstance(SpnegoFilterConfig.class);
    Assert.assertEquals(props.getProperty("druid.hadoop.security.spnego.principal"), config.getPrincipal());
    Assert.assertEquals(props.getProperty("druid.hadoop.security.spnego.keytab"), config.getKeytab());
    Assert.assertEquals(props.getProperty("druid.hadoop.security.spnego.authToLocal"), config.getAuthToLocal());
}
Also used : DruidGuiceExtensions(io.druid.guice.DruidGuiceExtensions) ConfigModule(io.druid.guice.ConfigModule) Provides(com.google.inject.Provides) Properties(java.util.Properties) Binder(com.google.inject.Binder) LazySingleton(io.druid.guice.LazySingleton) Injector(com.google.inject.Injector) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) Module(com.google.inject.Module) PropertiesModule(io.druid.guice.PropertiesModule) ConfigModule(io.druid.guice.ConfigModule) PropertiesModule(io.druid.guice.PropertiesModule) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) Test(org.junit.Test)

Example 79 with DefaultObjectMapper

use of io.druid.jackson.DefaultObjectMapper in project druid by druid-io.

the class HdfsDataSegmentPusherTest method testUsingSchemeForMultipleSegments.

private void testUsingSchemeForMultipleSegments(final String scheme, final int numberOfSegments) throws Exception {
    Configuration conf = new Configuration(true);
    DataSegment[] segments = new DataSegment[numberOfSegments];
    // Create a mock segment on disk
    File segmentDir = tempFolder.newFolder();
    File tmp = new File(segmentDir, "version.bin");
    final byte[] data = new byte[] { 0x0, 0x0, 0x0, 0x1 };
    Files.write(data, tmp);
    final long size = data.length;
    HdfsDataSegmentPusherConfig config = new HdfsDataSegmentPusherConfig();
    final File storageDirectory = tempFolder.newFolder();
    config.setStorageDirectory(scheme != null ? String.format("%s://%s", scheme, storageDirectory.getAbsolutePath()) : storageDirectory.getAbsolutePath());
    HdfsDataSegmentPusher pusher = new HdfsDataSegmentPusher(config, conf, new DefaultObjectMapper());
    for (int i = 0; i < numberOfSegments; i++) {
        segments[i] = new DataSegment("foo", new Interval("2015/2016"), "0", Maps.<String, Object>newHashMap(), Lists.<String>newArrayList(), Lists.<String>newArrayList(), new NumberedShardSpec(i, i), 0, size);
    }
    for (int i = 0; i < numberOfSegments; i++) {
        final DataSegment pushedSegment = pusher.push(segmentDir, segments[i]);
        String indexUri = String.format("%s/%s/%d_index.zip", FileSystem.newInstance(conf).makeQualified(new Path(config.getStorageDirectory())).toUri().toString(), DataSegmentPusherUtil.getHdfsStorageDir(segments[i]), segments[i].getShardSpec().getPartitionNum());
        Assert.assertEquals(segments[i].getSize(), pushedSegment.getSize());
        Assert.assertEquals(segments[i], pushedSegment);
        Assert.assertEquals(ImmutableMap.of("type", "hdfs", "path", indexUri), pushedSegment.getLoadSpec());
        // rename directory after push
        String segmentPath = DataSegmentPusherUtil.getHdfsStorageDir(pushedSegment);
        File indexFile = new File(String.format("%s/%s/%d_index.zip", storageDirectory, segmentPath, pushedSegment.getShardSpec().getPartitionNum()));
        Assert.assertTrue(indexFile.exists());
        File descriptorFile = new File(String.format("%s/%s/%d_descriptor.json", storageDirectory, segmentPath, pushedSegment.getShardSpec().getPartitionNum()));
        Assert.assertTrue(descriptorFile.exists());
        //read actual data from descriptor file.
        DataSegment fromDescriptorFileDataSegment = objectMapper.readValue(descriptorFile, DataSegment.class);
        Assert.assertEquals(segments[i].getSize(), pushedSegment.getSize());
        Assert.assertEquals(segments[i], pushedSegment);
        Assert.assertEquals(ImmutableMap.of("type", "hdfs", "path", indexUri), fromDescriptorFileDataSegment.getLoadSpec());
        // rename directory after push
        segmentPath = DataSegmentPusherUtil.getHdfsStorageDir(fromDescriptorFileDataSegment);
        indexFile = new File(String.format("%s/%s/%d_index.zip", storageDirectory, segmentPath, fromDescriptorFileDataSegment.getShardSpec().getPartitionNum()));
        Assert.assertTrue(indexFile.exists());
        // push twice will fail and temp dir cleaned
        File outDir = new File(String.format("%s/%s", config.getStorageDirectory(), segmentPath));
        outDir.setReadOnly();
        try {
            pusher.push(segmentDir, segments[i]);
        } catch (IOException e) {
            Assert.fail("should not throw exception");
        }
    }
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) IOException(java.io.IOException) DataSegment(io.druid.timeline.DataSegment) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) File(java.io.File) NumberedShardSpec(io.druid.timeline.partition.NumberedShardSpec) Interval(org.joda.time.Interval)

Example 80 with DefaultObjectMapper

use of io.druid.jackson.DefaultObjectMapper in project druid by druid-io.

the class CachingQueryRunnerTest method testCloseAndPopulate.

private void testCloseAndPopulate(List<Result> expectedRes, List<Result> expectedCacheRes, Query query, QueryToolChest toolchest) throws Exception {
    final AssertingClosable closable = new AssertingClosable();
    final Sequence resultSeq = Sequences.wrap(Sequences.simple(expectedRes), new SequenceWrapper() {

        @Override
        public void before() {
            Assert.assertFalse(closable.isClosed());
        }

        @Override
        public void after(boolean isDone, Throwable thrown) throws Exception {
            closable.close();
        }
    });
    final CountDownLatch cacheMustBePutOnce = new CountDownLatch(1);
    Cache cache = new Cache() {

        private final Map<NamedKey, byte[]> baseMap = new ConcurrentHashMap<>();

        @Override
        public byte[] get(NamedKey key) {
            return baseMap.get(key);
        }

        @Override
        public void put(NamedKey key, byte[] value) {
            baseMap.put(key, value);
            cacheMustBePutOnce.countDown();
        }

        @Override
        public Map<NamedKey, byte[]> getBulk(Iterable<NamedKey> keys) {
            return null;
        }

        @Override
        public void close(String namespace) {
        }

        @Override
        public CacheStats getStats() {
            return null;
        }

        @Override
        public boolean isLocal() {
            return true;
        }

        @Override
        public void doMonitor(ServiceEmitter emitter) {
        }
    };
    String segmentIdentifier = "segment";
    SegmentDescriptor segmentDescriptor = new SegmentDescriptor(new Interval("2011/2012"), "version", 0);
    DefaultObjectMapper objectMapper = new DefaultObjectMapper();
    CachingQueryRunner runner = new CachingQueryRunner(segmentIdentifier, segmentDescriptor, objectMapper, cache, toolchest, new QueryRunner() {

        @Override
        public Sequence run(Query query, Map responseContext) {
            return resultSeq;
        }
    }, backgroundExecutorService, new CacheConfig() {

        @Override
        public boolean isPopulateCache() {
            return true;
        }

        @Override
        public boolean isUseCache() {
            return true;
        }
    });
    CacheStrategy cacheStrategy = toolchest.getCacheStrategy(query);
    Cache.NamedKey cacheKey = CacheUtil.computeSegmentCacheKey(segmentIdentifier, segmentDescriptor, cacheStrategy.computeCacheKey(query));
    HashMap<String, Object> context = new HashMap<String, Object>();
    Sequence res = runner.run(query, context);
    // base sequence is not closed yet
    Assert.assertFalse("sequence must not be closed", closable.isClosed());
    Assert.assertNull("cache must be empty", cache.get(cacheKey));
    ArrayList results = Sequences.toList(res, new ArrayList());
    Assert.assertTrue(closable.isClosed());
    Assert.assertEquals(expectedRes.toString(), results.toString());
    // wait for background caching finish
    // wait at most 10 seconds to fail the test to avoid block overall tests
    Assert.assertTrue("cache must be populated", cacheMustBePutOnce.await(10, TimeUnit.SECONDS));
    byte[] cacheValue = cache.get(cacheKey);
    Assert.assertNotNull(cacheValue);
    Function<Object, Result> fn = cacheStrategy.pullFromCache();
    List<Result> cacheResults = Lists.newArrayList(Iterators.transform(objectMapper.readValues(objectMapper.getFactory().createParser(cacheValue), cacheStrategy.getCacheObjectClazz()), fn));
    Assert.assertEquals(expectedCacheRes.toString(), cacheResults.toString());
}
Also used : ServiceEmitter(com.metamx.emitter.service.ServiceEmitter) TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) Query(io.druid.query.Query) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) Result(io.druid.query.Result) SegmentDescriptor(io.druid.query.SegmentDescriptor) CacheConfig(io.druid.client.cache.CacheConfig) SequenceWrapper(io.druid.java.util.common.guava.SequenceWrapper) Sequence(io.druid.java.util.common.guava.Sequence) CountDownLatch(java.util.concurrent.CountDownLatch) IOException(java.io.IOException) QueryRunner(io.druid.query.QueryRunner) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) CacheStrategy(io.druid.query.CacheStrategy) Cache(io.druid.client.cache.Cache) MapCache(io.druid.client.cache.MapCache) Interval(org.joda.time.Interval)

Aggregations

DefaultObjectMapper (io.druid.jackson.DefaultObjectMapper)164 Test (org.junit.Test)133 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)112 Interval (org.joda.time.Interval)24 DateTime (org.joda.time.DateTime)17 DataSegment (io.druid.timeline.DataSegment)16 File (java.io.File)16 AggregatorFactory (io.druid.query.aggregation.AggregatorFactory)14 Before (org.junit.Before)13 Map (java.util.Map)11 Period (org.joda.time.Period)11 Query (io.druid.query.Query)9 Result (io.druid.query.Result)9 CountAggregatorFactory (io.druid.query.aggregation.CountAggregatorFactory)9 IOException (java.io.IOException)9 DataSchema (io.druid.segment.indexing.DataSchema)8 ImmutableMap (com.google.common.collect.ImmutableMap)7 Sequence (io.druid.java.util.common.guava.Sequence)7 UniformGranularitySpec (io.druid.segment.indexing.granularity.UniformGranularitySpec)7 List (java.util.List)7