Search in sources :

Example 1 with DataSchema

use of io.druid.segment.indexing.DataSchema in project hive by apache.

the class TestDruidRecordWriter method testWrite.

// This test need this patch https://github.com/druid-io/druid/pull/3483
@Ignore
@Test
public void testWrite() throws IOException, SegmentLoadingException {
    final String dataSourceName = "testDataSource";
    final File segmentOutputDir = temporaryFolder.newFolder();
    final File workingDir = temporaryFolder.newFolder();
    Configuration config = new Configuration();
    final InputRowParser inputRowParser = new MapInputRowParser(new TimeAndDimsParseSpec(new TimestampSpec(DruidTable.DEFAULT_TIMESTAMP_COLUMN, "auto", null), new DimensionsSpec(ImmutableList.<DimensionSchema>of(new StringDimensionSchema("host")), null, null)));
    final Map<String, Object> parserMap = objectMapper.convertValue(inputRowParser, Map.class);
    DataSchema dataSchema = new DataSchema(dataSourceName, parserMap, new AggregatorFactory[] { new LongSumAggregatorFactory("visited_sum", "visited_sum"), new HyperUniquesAggregatorFactory("unique_hosts", "unique_hosts") }, new UniformGranularitySpec(Granularity.DAY, QueryGranularities.NONE, ImmutableList.of(INTERVAL_FULL)), objectMapper);
    RealtimeTuningConfig tuningConfig = RealtimeTuningConfig.makeDefaultTuningConfig(temporaryFolder.newFolder());
    LocalFileSystem localFileSystem = FileSystem.getLocal(config);
    DataSegmentPusher dataSegmentPusher = new LocalDataSegmentPusher(new LocalDataSegmentPusherConfig() {

        @Override
        public File getStorageDirectory() {
            return segmentOutputDir;
        }
    }, objectMapper);
    Path segmentDescriptroPath = new Path(workingDir.getAbsolutePath(), DruidStorageHandler.SEGMENTS_DESCRIPTOR_DIR_NAME);
    druidRecordWriter = new DruidRecordWriter(dataSchema, tuningConfig, dataSegmentPusher, 20, segmentDescriptroPath, localFileSystem);
    List<DruidWritable> druidWritables = Lists.transform(expectedRows, new Function<ImmutableMap<String, Object>, DruidWritable>() {

        @Nullable
        @Override
        public DruidWritable apply(@Nullable ImmutableMap<String, Object> input) {
            return new DruidWritable(ImmutableMap.<String, Object>builder().putAll(input).put(Constants.DRUID_TIMESTAMP_GRANULARITY_COL_NAME, Granularity.DAY.truncate(new DateTime((long) input.get(DruidTable.DEFAULT_TIMESTAMP_COLUMN))).getMillis()).build());
        }
    });
    for (DruidWritable druidWritable : druidWritables) {
        druidRecordWriter.write(druidWritable);
    }
    druidRecordWriter.close(false);
    List<DataSegment> dataSegmentList = DruidStorageHandlerUtils.getPublishedSegments(segmentDescriptroPath, config);
    Assert.assertEquals(1, dataSegmentList.size());
    File tmpUnzippedSegmentDir = temporaryFolder.newFolder();
    new LocalDataSegmentPuller().getSegmentFiles(dataSegmentList.get(0), tmpUnzippedSegmentDir);
    final QueryableIndex queryableIndex = DruidStorageHandlerUtils.INDEX_IO.loadIndex(tmpUnzippedSegmentDir);
    QueryableIndexStorageAdapter adapter = new QueryableIndexStorageAdapter(queryableIndex);
    Firehose firehose = new IngestSegmentFirehose(ImmutableList.of(new WindowedStorageAdapter(adapter, adapter.getInterval())), ImmutableList.of("host"), ImmutableList.of("visited_sum", "unique_hosts"), null, QueryGranularities.NONE);
    List<InputRow> rows = Lists.newArrayList();
    while (firehose.hasMore()) {
        rows.add(firehose.nextRow());
    }
    verifyRows(expectedRows, rows);
}
Also used : IngestSegmentFirehose(io.druid.segment.realtime.firehose.IngestSegmentFirehose) LocalDataSegmentPusher(io.druid.segment.loading.LocalDataSegmentPusher) DataSegmentPusher(io.druid.segment.loading.DataSegmentPusher) Configuration(org.apache.hadoop.conf.Configuration) MapInputRowParser(io.druid.data.input.impl.MapInputRowParser) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) DataSegment(io.druid.timeline.DataSegment) DateTime(org.joda.time.DateTime) TimeAndDimsParseSpec(io.druid.data.input.impl.TimeAndDimsParseSpec) UniformGranularitySpec(io.druid.segment.indexing.granularity.UniformGranularitySpec) LocalDataSegmentPuller(io.druid.segment.loading.LocalDataSegmentPuller) TimestampSpec(io.druid.data.input.impl.TimestampSpec) WindowedStorageAdapter(io.druid.segment.realtime.firehose.WindowedStorageAdapter) Path(org.apache.hadoop.fs.Path) Firehose(io.druid.data.input.Firehose) IngestSegmentFirehose(io.druid.segment.realtime.firehose.IngestSegmentFirehose) LocalDataSegmentPusherConfig(io.druid.segment.loading.LocalDataSegmentPusherConfig) QueryableIndexStorageAdapter(io.druid.segment.QueryableIndexStorageAdapter) RealtimeTuningConfig(io.druid.segment.indexing.RealtimeTuningConfig) LocalDataSegmentPusher(io.druid.segment.loading.LocalDataSegmentPusher) ImmutableMap(com.google.common.collect.ImmutableMap) StringDimensionSchema(io.druid.data.input.impl.StringDimensionSchema) DataSchema(io.druid.segment.indexing.DataSchema) DruidWritable(org.apache.hadoop.hive.druid.serde.DruidWritable) LocalFileSystem(org.apache.hadoop.fs.LocalFileSystem) QueryableIndex(io.druid.segment.QueryableIndex) HyperUniquesAggregatorFactory(io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory) InputRow(io.druid.data.input.InputRow) DimensionsSpec(io.druid.data.input.impl.DimensionsSpec) MapInputRowParser(io.druid.data.input.impl.MapInputRowParser) InputRowParser(io.druid.data.input.impl.InputRowParser) File(java.io.File) DruidRecordWriter(org.apache.hadoop.hive.druid.io.DruidRecordWriter) Nullable(javax.annotation.Nullable) Ignore(org.junit.Ignore) Test(org.junit.Test)

Example 2 with DataSchema

use of io.druid.segment.indexing.DataSchema in project druid by druid-io.

the class RealtimeManagerTest method setUp.

@Before
public void setUp() throws Exception {
    final List<TestInputRowHolder> rows = Arrays.asList(makeRow(new DateTime("9000-01-01").getMillis()), makeRow(new ParseException("parse error")), null, makeRow(new DateTime().getMillis()));
    ObjectMapper jsonMapper = new DefaultObjectMapper();
    schema = new DataSchema("test", null, new AggregatorFactory[] { new CountAggregatorFactory("rows") }, new UniformGranularitySpec(Granularities.HOUR, Granularities.NONE, null), jsonMapper);
    schema2 = new DataSchema("testV2", null, new AggregatorFactory[] { new CountAggregatorFactory("rows") }, new UniformGranularitySpec(Granularities.HOUR, Granularities.NONE, null), jsonMapper);
    RealtimeIOConfig ioConfig = new RealtimeIOConfig(new FirehoseFactory() {

        @Override
        public Firehose connect(InputRowParser parser) throws IOException {
            return new TestFirehose(rows.iterator());
        }
    }, new PlumberSchool() {

        @Override
        public Plumber findPlumber(DataSchema schema, RealtimeTuningConfig config, FireDepartmentMetrics metrics) {
            return plumber;
        }
    }, null);
    RealtimeIOConfig ioConfig2 = new RealtimeIOConfig(null, new PlumberSchool() {

        @Override
        public Plumber findPlumber(DataSchema schema, RealtimeTuningConfig config, FireDepartmentMetrics metrics) {
            return plumber2;
        }
    }, new FirehoseFactoryV2() {

        @Override
        public FirehoseV2 connect(InputRowParser parser, Object arg1) throws IOException, ParseException {
            return new TestFirehoseV2(rows.iterator());
        }
    });
    RealtimeTuningConfig tuningConfig = new RealtimeTuningConfig(1, new Period("P1Y"), null, null, null, null, null, null, null, null, 0, 0, null, null);
    plumber = new TestPlumber(new Sink(new Interval("0/P5000Y"), schema, tuningConfig.getShardSpec(), new DateTime().toString(), tuningConfig.getMaxRowsInMemory(), tuningConfig.isReportParseExceptions()));
    realtimeManager = new RealtimeManager(Arrays.<FireDepartment>asList(new FireDepartment(schema, ioConfig, tuningConfig)), null);
    plumber2 = new TestPlumber(new Sink(new Interval("0/P5000Y"), schema2, tuningConfig.getShardSpec(), new DateTime().toString(), tuningConfig.getMaxRowsInMemory(), tuningConfig.isReportParseExceptions()));
    realtimeManager2 = new RealtimeManager(Arrays.<FireDepartment>asList(new FireDepartment(schema2, ioConfig2, tuningConfig)), null);
    tuningConfig_0 = new RealtimeTuningConfig(1, new Period("P1Y"), null, null, null, null, null, new LinearShardSpec(0), null, null, 0, 0, null, null);
    tuningConfig_1 = new RealtimeTuningConfig(1, new Period("P1Y"), null, null, null, null, null, new LinearShardSpec(1), null, null, 0, 0, null, null);
    schema3 = new DataSchema("testing", null, new AggregatorFactory[] { new CountAggregatorFactory("ignore") }, new UniformGranularitySpec(Granularities.HOUR, Granularities.NONE, null), jsonMapper);
    FireDepartment department_0 = new FireDepartment(schema3, ioConfig, tuningConfig_0);
    FireDepartment department_1 = new FireDepartment(schema3, ioConfig2, tuningConfig_1);
    QueryRunnerFactoryConglomerate conglomerate = new QueryRunnerFactoryConglomerate() {

        @Override
        public <T, QueryType extends Query<T>> QueryRunnerFactory<T, QueryType> findFactory(QueryType query) {
            return factory;
        }
    };
    chiefStartedLatch = new CountDownLatch(2);
    RealtimeManager.FireChief fireChief_0 = new RealtimeManager.FireChief(department_0, conglomerate) {

        @Override
        public void run() {
            super.initPlumber();
            chiefStartedLatch.countDown();
        }
    };
    RealtimeManager.FireChief fireChief_1 = new RealtimeManager.FireChief(department_1, conglomerate) {

        @Override
        public void run() {
            super.initPlumber();
            chiefStartedLatch.countDown();
        }
    };
    realtimeManager3 = new RealtimeManager(Arrays.asList(department_0, department_1), conglomerate, ImmutableMap.<String, Map<Integer, RealtimeManager.FireChief>>of("testing", ImmutableMap.of(0, fireChief_0, 1, fireChief_1)));
    startFireChiefWithPartitionNum(fireChief_0, 0);
    startFireChiefWithPartitionNum(fireChief_1, 1);
}
Also used : FirehoseV2(io.druid.data.input.FirehoseV2) RealtimeIOConfig(io.druid.segment.indexing.RealtimeIOConfig) BaseQuery(io.druid.query.BaseQuery) Query(io.druid.query.Query) GroupByQuery(io.druid.query.groupby.GroupByQuery) FirehoseFactory(io.druid.data.input.FirehoseFactory) LinearShardSpec(io.druid.timeline.partition.LinearShardSpec) DateTime(org.joda.time.DateTime) UniformGranularitySpec(io.druid.segment.indexing.granularity.UniformGranularitySpec) QueryRunnerFactoryConglomerate(io.druid.query.QueryRunnerFactoryConglomerate) Sink(io.druid.segment.realtime.plumber.Sink) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) Firehose(io.druid.data.input.Firehose) Period(org.joda.time.Period) IOException(java.io.IOException) PlumberSchool(io.druid.segment.realtime.plumber.PlumberSchool) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) RealtimeTuningConfig(io.druid.segment.indexing.RealtimeTuningConfig) CountDownLatch(java.util.concurrent.CountDownLatch) DataSchema(io.druid.segment.indexing.DataSchema) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) Plumber(io.druid.segment.realtime.plumber.Plumber) ParseException(io.druid.java.util.common.parsers.ParseException) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) InputRowParser(io.druid.data.input.impl.InputRowParser) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) FirehoseFactoryV2(io.druid.data.input.FirehoseFactoryV2) Interval(org.joda.time.Interval) Before(org.junit.Before)

Example 3 with DataSchema

use of io.druid.segment.indexing.DataSchema in project druid by druid-io.

the class RealtimePlumberSchoolTest method setUp.

@Before
public void setUp() throws Exception {
    tmpDir = Files.createTempDir();
    ObjectMapper jsonMapper = new DefaultObjectMapper();
    schema = new DataSchema("test", jsonMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("timestamp", "auto", null), new DimensionsSpec(null, null, null), null, null), null), Map.class), new AggregatorFactory[] { new CountAggregatorFactory("rows") }, new UniformGranularitySpec(Granularities.HOUR, Granularities.NONE, null), jsonMapper);
    schema2 = new DataSchema("test", jsonMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("timestamp", "auto", null), new DimensionsSpec(null, null, null), null, null), null), Map.class), new AggregatorFactory[] { new CountAggregatorFactory("rows") }, new UniformGranularitySpec(Granularities.YEAR, Granularities.NONE, null), jsonMapper);
    announcer = EasyMock.createMock(DataSegmentAnnouncer.class);
    announcer.announceSegment(EasyMock.<DataSegment>anyObject());
    EasyMock.expectLastCall().anyTimes();
    segmentPublisher = EasyMock.createNiceMock(SegmentPublisher.class);
    dataSegmentPusher = EasyMock.createNiceMock(DataSegmentPusher.class);
    handoffNotifierFactory = EasyMock.createNiceMock(SegmentHandoffNotifierFactory.class);
    handoffNotifier = EasyMock.createNiceMock(SegmentHandoffNotifier.class);
    EasyMock.expect(handoffNotifierFactory.createSegmentHandoffNotifier(EasyMock.anyString())).andReturn(handoffNotifier).anyTimes();
    EasyMock.expect(handoffNotifier.registerSegmentHandoffCallback(EasyMock.<SegmentDescriptor>anyObject(), EasyMock.<Executor>anyObject(), EasyMock.<Runnable>anyObject())).andReturn(true).anyTimes();
    emitter = EasyMock.createMock(ServiceEmitter.class);
    EasyMock.replay(announcer, segmentPublisher, dataSegmentPusher, handoffNotifierFactory, handoffNotifier, emitter);
    tuningConfig = new RealtimeTuningConfig(1, null, null, null, new IntervalStartVersioningPolicy(), rejectionPolicy, null, null, null, buildV9Directly, 0, 0, false, null);
    realtimePlumberSchool = new RealtimePlumberSchool(emitter, new DefaultQueryRunnerFactoryConglomerate(Maps.<Class<? extends Query>, QueryRunnerFactory>newHashMap()), dataSegmentPusher, announcer, segmentPublisher, handoffNotifierFactory, MoreExecutors.sameThreadExecutor(), TestHelper.getTestIndexMerger(), TestHelper.getTestIndexMergerV9(), TestHelper.getTestIndexIO(), MapCache.create(0), FireDepartmentTest.NO_CACHE_CONFIG, TestHelper.getObjectMapper());
    metrics = new FireDepartmentMetrics();
    plumber = (RealtimePlumber) realtimePlumberSchool.findPlumber(schema, tuningConfig, metrics);
}
Also used : ServiceEmitter(com.metamx.emitter.service.ServiceEmitter) DataSegmentPusher(io.druid.segment.loading.DataSegmentPusher) DataSegmentAnnouncer(io.druid.server.coordination.DataSegmentAnnouncer) DefaultQueryRunnerFactoryConglomerate(io.druid.query.DefaultQueryRunnerFactoryConglomerate) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) RealtimeTuningConfig(io.druid.segment.indexing.RealtimeTuningConfig) DataSchema(io.druid.segment.indexing.DataSchema) UniformGranularitySpec(io.druid.segment.indexing.granularity.UniformGranularitySpec) SegmentPublisher(io.druid.segment.realtime.SegmentPublisher) Executor(java.util.concurrent.Executor) FireDepartmentMetrics(io.druid.segment.realtime.FireDepartmentMetrics) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) SegmentDescriptor(io.druid.query.SegmentDescriptor) StringInputRowParser(io.druid.data.input.impl.StringInputRowParser) TimestampSpec(io.druid.data.input.impl.TimestampSpec) DimensionsSpec(io.druid.data.input.impl.DimensionsSpec) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) JSONParseSpec(io.druid.data.input.impl.JSONParseSpec) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) Before(org.junit.Before)

Example 4 with DataSchema

use of io.druid.segment.indexing.DataSchema in project druid by druid-io.

the class DruidJsonValidatorTest method testTaskValidator.

@Test
public void testTaskValidator() throws Exception {
    final ObjectMapper jsonMapper = new DefaultObjectMapper();
    for (final Module jacksonModule : new FirehoseModule().getJacksonModules()) {
        jsonMapper.registerModule(jacksonModule);
    }
    final RealtimeIndexTask task = new RealtimeIndexTask(null, new TaskResource("rofl", 2), new FireDepartment(new DataSchema("foo", null, new AggregatorFactory[0], new UniformGranularitySpec(Granularities.HOUR, Granularities.NONE, null), jsonMapper), new RealtimeIOConfig(new LocalFirehoseFactory(new File("lol"), "rofl", null), new PlumberSchool() {

        @Override
        public Plumber findPlumber(DataSchema schema, RealtimeTuningConfig config, FireDepartmentMetrics metrics) {
            return null;
        }
    }, null), new RealtimeTuningConfig(1, new Period("PT10M"), null, null, null, null, 1, NoneShardSpec.instance(), new IndexSpec(), null, 0, 0, true, null)), null);
    File tmp = temporaryFolder.newFile("test_task.json");
    jsonMapper.writeValue(tmp, task);
    parseCommand("validator", "-f", tmp.getAbsolutePath(), "-t", "task").run();
}
Also used : RealtimeIOConfig(io.druid.segment.indexing.RealtimeIOConfig) IndexSpec(io.druid.segment.IndexSpec) RealtimeIndexTask(io.druid.indexing.common.task.RealtimeIndexTask) TaskResource(io.druid.indexing.common.task.TaskResource) Period(org.joda.time.Period) PlumberSchool(io.druid.segment.realtime.plumber.PlumberSchool) LocalFirehoseFactory(io.druid.segment.realtime.firehose.LocalFirehoseFactory) RealtimeTuningConfig(io.druid.segment.indexing.RealtimeTuningConfig) DataSchema(io.druid.segment.indexing.DataSchema) FireDepartment(io.druid.segment.realtime.FireDepartment) UniformGranularitySpec(io.druid.segment.indexing.granularity.UniformGranularitySpec) FirehoseModule(io.druid.guice.FirehoseModule) FireDepartmentMetrics(io.druid.segment.realtime.FireDepartmentMetrics) Plumber(io.druid.segment.realtime.plumber.Plumber) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) Module(com.fasterxml.jackson.databind.Module) FirehoseModule(io.druid.guice.FirehoseModule) File(java.io.File) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) Test(org.junit.Test)

Example 5 with DataSchema

use of io.druid.segment.indexing.DataSchema in project druid by druid-io.

the class KafkaSupervisorTest method getDataSchema.

private DataSchema getDataSchema(String dataSource) {
    List<DimensionSchema> dimensions = new ArrayList<>();
    dimensions.add(StringDimensionSchema.create("dim1"));
    dimensions.add(StringDimensionSchema.create("dim2"));
    return new DataSchema(dataSource, objectMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("timestamp", "iso", null), new DimensionsSpec(dimensions, null, null), new JSONPathSpec(true, ImmutableList.<JSONPathFieldSpec>of()), ImmutableMap.<String, Boolean>of()), Charsets.UTF_8.name()), Map.class), new AggregatorFactory[] { new CountAggregatorFactory("rows") }, new UniformGranularitySpec(Granularities.HOUR, Granularities.NONE, ImmutableList.<Interval>of()), objectMapper);
}
Also used : ArrayList(java.util.ArrayList) EasyMock.anyString(org.easymock.EasyMock.anyString) StringDimensionSchema(io.druid.data.input.impl.StringDimensionSchema) DimensionSchema(io.druid.data.input.impl.DimensionSchema) DataSchema(io.druid.segment.indexing.DataSchema) UniformGranularitySpec(io.druid.segment.indexing.granularity.UniformGranularitySpec) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) StringInputRowParser(io.druid.data.input.impl.StringInputRowParser) TimestampSpec(io.druid.data.input.impl.TimestampSpec) DimensionsSpec(io.druid.data.input.impl.DimensionsSpec) JSONPathSpec(io.druid.data.input.impl.JSONPathSpec) JSONParseSpec(io.druid.data.input.impl.JSONParseSpec) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) Interval(org.joda.time.Interval)

Aggregations

DataSchema (io.druid.segment.indexing.DataSchema)34 UniformGranularitySpec (io.druid.segment.indexing.granularity.UniformGranularitySpec)29 Interval (org.joda.time.Interval)18 Test (org.junit.Test)18 RealtimeTuningConfig (io.druid.segment.indexing.RealtimeTuningConfig)12 File (java.io.File)11 DimensionsSpec (io.druid.data.input.impl.DimensionsSpec)10 TimestampSpec (io.druid.data.input.impl.TimestampSpec)10 AggregatorFactory (io.druid.query.aggregation.AggregatorFactory)10 LongSumAggregatorFactory (io.druid.query.aggregation.LongSumAggregatorFactory)9 DefaultObjectMapper (io.druid.jackson.DefaultObjectMapper)8 RealtimeIOConfig (io.druid.segment.indexing.RealtimeIOConfig)8 StringInputRowParser (io.druid.data.input.impl.StringInputRowParser)7 CountAggregatorFactory (io.druid.query.aggregation.CountAggregatorFactory)7 DoubleSumAggregatorFactory (io.druid.query.aggregation.DoubleSumAggregatorFactory)7 Before (org.junit.Before)7 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)6 ImmutableMap (com.google.common.collect.ImmutableMap)6 FireDepartment (io.druid.segment.realtime.FireDepartment)6 Period (org.joda.time.Period)6