Search in sources :

Example 31 with DimensionsSpec

use of io.druid.data.input.impl.DimensionsSpec in project druid by druid-io.

the class IngestSegmentFirehoseFactoryTest method constructorFeeder.

@Parameterized.Parameters(name = "{1}")
public static Collection<Object[]> constructorFeeder() throws IOException {
    final IndexSpec indexSpec = new IndexSpec();
    final HeapMemoryTaskStorage ts = new HeapMemoryTaskStorage(new TaskStorageConfig(null) {
    });
    final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder().withQueryGranularity(Granularities.NONE).withMinTimestamp(JodaUtils.MIN_INSTANT).withDimensionsSpec(ROW_PARSER).withMetrics(new AggregatorFactory[] { new LongSumAggregatorFactory(METRIC_LONG_NAME, DIM_LONG_NAME), new DoubleSumAggregatorFactory(METRIC_FLOAT_NAME, DIM_FLOAT_NAME) }).build();
    final OnheapIncrementalIndex index = new OnheapIncrementalIndex(schema, true, MAX_ROWS * MAX_SHARD_NUMBER);
    for (Integer i = 0; i < MAX_ROWS; ++i) {
        index.add(ROW_PARSER.parse(buildRow(i.longValue())));
    }
    if (!persistDir.mkdirs() && !persistDir.exists()) {
        throw new IOException(String.format("Could not create directory at [%s]", persistDir.getAbsolutePath()));
    }
    INDEX_MERGER.persist(index, persistDir, indexSpec);
    final TaskLockbox tl = new TaskLockbox(ts);
    final IndexerSQLMetadataStorageCoordinator mdc = new IndexerSQLMetadataStorageCoordinator(null, null, null) {

        private final Set<DataSegment> published = Sets.newHashSet();

        private final Set<DataSegment> nuked = Sets.newHashSet();

        @Override
        public List<DataSegment> getUsedSegmentsForInterval(String dataSource, Interval interval) throws IOException {
            return ImmutableList.copyOf(segmentSet);
        }

        @Override
        public List<DataSegment> getUsedSegmentsForIntervals(String dataSource, List<Interval> interval) throws IOException {
            return ImmutableList.copyOf(segmentSet);
        }

        @Override
        public List<DataSegment> getUnusedSegmentsForInterval(String dataSource, Interval interval) {
            return ImmutableList.of();
        }

        @Override
        public Set<DataSegment> announceHistoricalSegments(Set<DataSegment> segments) {
            Set<DataSegment> added = Sets.newHashSet();
            for (final DataSegment segment : segments) {
                if (published.add(segment)) {
                    added.add(segment);
                }
            }
            return ImmutableSet.copyOf(added);
        }

        @Override
        public void deleteSegments(Set<DataSegment> segments) {
            nuked.addAll(segments);
        }
    };
    final LocalTaskActionClientFactory tac = new LocalTaskActionClientFactory(ts, new TaskActionToolbox(tl, mdc, newMockEmitter(), EasyMock.createMock(SupervisorManager.class)));
    SegmentHandoffNotifierFactory notifierFactory = EasyMock.createNiceMock(SegmentHandoffNotifierFactory.class);
    EasyMock.replay(notifierFactory);
    final TaskToolboxFactory taskToolboxFactory = new TaskToolboxFactory(new TaskConfig(tmpDir.getAbsolutePath(), null, null, 50000, null, false, null, null), tac, newMockEmitter(), new DataSegmentPusher() {

        @Deprecated
        @Override
        public String getPathForHadoop(String dataSource) {
            return getPathForHadoop();
        }

        @Override
        public String getPathForHadoop() {
            throw new UnsupportedOperationException();
        }

        @Override
        public DataSegment push(File file, DataSegment segment) throws IOException {
            return segment;
        }
    }, new DataSegmentKiller() {

        @Override
        public void kill(DataSegment segments) throws SegmentLoadingException {
        }

        @Override
        public void killAll() throws IOException {
            throw new UnsupportedOperationException("not implemented");
        }
    }, new DataSegmentMover() {

        @Override
        public DataSegment move(DataSegment dataSegment, Map<String, Object> targetLoadSpec) throws SegmentLoadingException {
            return dataSegment;
        }
    }, new DataSegmentArchiver() {

        @Override
        public DataSegment archive(DataSegment segment) throws SegmentLoadingException {
            return segment;
        }

        @Override
        public DataSegment restore(DataSegment segment) throws SegmentLoadingException {
            return segment;
        }
    }, // segment announcer
    null, notifierFactory, // query runner factory conglomerate corporation unionized collective
    null, // query executor service
    null, // monitor scheduler
    null, new SegmentLoaderFactory(new SegmentLoaderLocalCacheManager(null, new SegmentLoaderConfig() {

        @Override
        public List<StorageLocationConfig> getLocations() {
            return Lists.newArrayList();
        }
    }, MAPPER)), MAPPER, INDEX_MERGER, INDEX_IO, null, null, INDEX_MERGER_V9);
    Collection<Object[]> values = new LinkedList<>();
    for (InputRowParser parser : Arrays.<InputRowParser>asList(ROW_PARSER, new MapInputRowParser(new JSONParseSpec(new TimestampSpec(TIME_COLUMN, "auto", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.<String>of()), ImmutableList.of(DIM_FLOAT_NAME, DIM_LONG_NAME), ImmutableList.<SpatialDimensionSchema>of()), null, null)))) {
        for (List<String> dim_names : Arrays.<List<String>>asList(null, ImmutableList.of(DIM_NAME))) {
            for (List<String> metric_names : Arrays.<List<String>>asList(null, ImmutableList.of(METRIC_LONG_NAME, METRIC_FLOAT_NAME))) {
                values.add(new Object[] { new IngestSegmentFirehoseFactory(DATA_SOURCE_NAME, FOREVER, new SelectorDimFilter(DIM_NAME, DIM_VALUE, null), dim_names, metric_names, Guice.createInjector(new Module() {

                    @Override
                    public void configure(Binder binder) {
                        binder.bind(TaskToolboxFactory.class).toInstance(taskToolboxFactory);
                    }
                }), INDEX_IO), String.format("DimNames[%s]MetricNames[%s]ParserDimNames[%s]", dim_names == null ? "null" : "dims", metric_names == null ? "null" : "metrics", parser == ROW_PARSER ? "dims" : "null"), parser });
            }
        }
    }
    return values;
}
Also used : DataSegmentArchiver(io.druid.segment.loading.DataSegmentArchiver) OnheapIncrementalIndex(io.druid.segment.incremental.OnheapIncrementalIndex) DataSegmentMover(io.druid.segment.loading.DataSegmentMover) List(java.util.List) ArrayList(java.util.ArrayList) ImmutableList(com.google.common.collect.ImmutableList) LinkedList(java.util.LinkedList) LocalTaskActionClientFactory(io.druid.indexing.common.actions.LocalTaskActionClientFactory) SegmentLoaderConfig(io.druid.segment.loading.SegmentLoaderConfig) SegmentLoaderFactory(io.druid.indexing.common.SegmentLoaderFactory) SegmentLoaderLocalCacheManager(io.druid.segment.loading.SegmentLoaderLocalCacheManager) JSONParseSpec(io.druid.data.input.impl.JSONParseSpec) IndexerSQLMetadataStorageCoordinator(io.druid.metadata.IndexerSQLMetadataStorageCoordinator) DoubleSumAggregatorFactory(io.druid.query.aggregation.DoubleSumAggregatorFactory) HeapMemoryTaskStorage(io.druid.indexing.overlord.HeapMemoryTaskStorage) LinkedList(java.util.LinkedList) MapInputRowParser(io.druid.data.input.impl.MapInputRowParser) InputRowParser(io.druid.data.input.impl.InputRowParser) Module(com.google.inject.Module) SimpleModule(com.fasterxml.jackson.databind.module.SimpleModule) File(java.io.File) Interval(org.joda.time.Interval) IndexSpec(io.druid.segment.IndexSpec) DataSegmentPusher(io.druid.segment.loading.DataSegmentPusher) ImmutableSet(com.google.common.collect.ImmutableSet) Set(java.util.Set) MapInputRowParser(io.druid.data.input.impl.MapInputRowParser) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) DataSegmentKiller(io.druid.segment.loading.DataSegmentKiller) TaskConfig(io.druid.indexing.common.config.TaskConfig) DataSegment(io.druid.timeline.DataSegment) Binder(com.google.inject.Binder) TaskToolboxFactory(io.druid.indexing.common.TaskToolboxFactory) SelectorDimFilter(io.druid.query.filter.SelectorDimFilter) TaskActionToolbox(io.druid.indexing.common.actions.TaskActionToolbox) TimestampSpec(io.druid.data.input.impl.TimestampSpec) IncrementalIndexSchema(io.druid.segment.incremental.IncrementalIndexSchema) SegmentLoadingException(io.druid.segment.loading.SegmentLoadingException) TaskStorageConfig(io.druid.indexing.common.config.TaskStorageConfig) IOException(java.io.IOException) DoubleSumAggregatorFactory(io.druid.query.aggregation.DoubleSumAggregatorFactory) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) SegmentHandoffNotifierFactory(io.druid.segment.realtime.plumber.SegmentHandoffNotifierFactory) SpatialDimensionSchema(io.druid.data.input.impl.SpatialDimensionSchema) TaskLockbox(io.druid.indexing.overlord.TaskLockbox) DimensionsSpec(io.druid.data.input.impl.DimensionsSpec)

Example 32 with DimensionsSpec

use of io.druid.data.input.impl.DimensionsSpec in project druid by druid-io.

the class StringDimensionHandlerTest method getAdapters.

private static Pair<IncrementalIndexAdapter, IncrementalIndexAdapter> getAdapters(List<String> dims, Map<String, Object> event1, Map<String, Object> event2) throws Exception {
    IncrementalIndex incrementalIndex1 = new OnheapIncrementalIndex(TEST_INTERVAL.getStartMillis(), Granularities.NONE, true, new DimensionsSpec(DimensionsSpec.getDefaultSchemas(dims), null, null), new AggregatorFactory[] { new CountAggregatorFactory("count") }, 1000);
    IncrementalIndex incrementalIndex2 = new OnheapIncrementalIndex(TEST_INTERVAL.getStartMillis(), Granularities.NONE, true, new DimensionsSpec(DimensionsSpec.getDefaultSchemas(dims), null, null), new AggregatorFactory[] { new CountAggregatorFactory("count") }, 1000);
    incrementalIndex1.add(new MapBasedInputRow(TEST_INTERVAL.getStartMillis(), dims, event1));
    incrementalIndex2.add(new MapBasedInputRow(TEST_INTERVAL.getStartMillis() + 3, dims, event2));
    IncrementalIndexAdapter adapter1 = new IncrementalIndexAdapter(TEST_INTERVAL, incrementalIndex1, INDEX_SPEC.getBitmapSerdeFactory().getBitmapFactory());
    IncrementalIndexAdapter adapter2 = new IncrementalIndexAdapter(TEST_INTERVAL, incrementalIndex2, INDEX_SPEC.getBitmapSerdeFactory().getBitmapFactory());
    return new Pair<>(adapter1, adapter2);
}
Also used : IncrementalIndexAdapter(io.druid.segment.incremental.IncrementalIndexAdapter) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) IncrementalIndex(io.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(io.druid.segment.incremental.OnheapIncrementalIndex) OnheapIncrementalIndex(io.druid.segment.incremental.OnheapIncrementalIndex) DimensionsSpec(io.druid.data.input.impl.DimensionsSpec) MapBasedInputRow(io.druid.data.input.MapBasedInputRow) Pair(io.druid.java.util.common.Pair)

Example 33 with DimensionsSpec

use of io.druid.data.input.impl.DimensionsSpec in project druid by druid-io.

the class IndexMergerV9WithSpatialIndexTest method makeMergedQueryableIndex.

private static QueryableIndex makeMergedQueryableIndex(IndexSpec indexSpec) {
    try {
        IncrementalIndex first = new OnheapIncrementalIndex(new IncrementalIndexSchema.Builder().withMinTimestamp(DATA_INTERVAL.getStartMillis()).withQueryGranularity(Granularities.DAY).withMetrics(METRIC_AGGS).withDimensionsSpec(new DimensionsSpec(null, null, Arrays.asList(new SpatialDimensionSchema("dim.geo", Arrays.asList("lat", "long")), new SpatialDimensionSchema("spatialIsRad", Arrays.asList("lat2", "long2"))))).build(), false, 1000);
        IncrementalIndex second = new OnheapIncrementalIndex(new IncrementalIndexSchema.Builder().withMinTimestamp(DATA_INTERVAL.getStartMillis()).withQueryGranularity(Granularities.DAY).withMetrics(METRIC_AGGS).withDimensionsSpec(new DimensionsSpec(null, null, Arrays.asList(new SpatialDimensionSchema("dim.geo", Arrays.asList("lat", "long")), new SpatialDimensionSchema("spatialIsRad", Arrays.asList("lat2", "long2"))))).build(), false, 1000);
        IncrementalIndex third = new OnheapIncrementalIndex(new IncrementalIndexSchema.Builder().withMinTimestamp(DATA_INTERVAL.getStartMillis()).withQueryGranularity(Granularities.DAY).withMetrics(METRIC_AGGS).withDimensionsSpec(new DimensionsSpec(null, null, Arrays.asList(new SpatialDimensionSchema("dim.geo", Arrays.asList("lat", "long")), new SpatialDimensionSchema("spatialIsRad", Arrays.asList("lat2", "long2"))))).build(), false, NUM_POINTS);
        first.add(new MapBasedInputRow(new DateTime("2013-01-01").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", new DateTime("2013-01-01").toString(), "dim", "foo", "lat", 0.0f, "long", 0.0f, "val", 17L)));
        first.add(new MapBasedInputRow(new DateTime("2013-01-02").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", new DateTime("2013-01-02").toString(), "dim", "foo", "lat", 1.0f, "long", 3.0f, "val", 29L)));
        first.add(new MapBasedInputRow(new DateTime("2013-01-03").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", new DateTime("2013-01-03").toString(), "dim", "foo", "lat", 4.0f, "long", 2.0f, "val", 13L)));
        first.add(new MapBasedInputRow(new DateTime("2013-01-05").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", new DateTime("2013-01-05").toString(), "dim", "foo", "lat", "_mmx.unknown", "long", "_mmx.unknown", "val", 101L)));
        first.add(new MapBasedInputRow(new DateTime("2013-01-05").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", new DateTime("2013-01-05").toString(), "dim", "foo", "dim.geo", "_mmx.unknown", "val", 501L)));
        second.add(new MapBasedInputRow(new DateTime("2013-01-04").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", new DateTime("2013-01-04").toString(), "dim", "foo", "lat", 7.0f, "long", 3.0f, "val", 91L)));
        second.add(new MapBasedInputRow(new DateTime("2013-01-05").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", new DateTime("2013-01-05").toString(), "dim", "foo", "lat", 8.0f, "long", 6.0f, "val", 47L)));
        second.add(new MapBasedInputRow(new DateTime("2013-01-05").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", new DateTime("2013-01-05").toString(), "lat2", 0.0f, "long2", 0.0f, "val", 13L)));
        // Add a bunch of random points
        Random rand = new Random();
        for (int i = 8; i < NUM_POINTS; i++) {
            third.add(new MapBasedInputRow(new DateTime("2013-01-01").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", new DateTime("2013-01-01").toString(), "dim", "boo", "lat", (float) (rand.nextFloat() * 10 + 10.0), "long", (float) (rand.nextFloat() * 10 + 10.0), "val", i)));
        }
        File tmpFile = File.createTempFile("yay", "who");
        tmpFile.delete();
        File firstFile = new File(tmpFile, "first");
        File secondFile = new File(tmpFile, "second");
        File thirdFile = new File(tmpFile, "third");
        File mergedFile = new File(tmpFile, "merged");
        firstFile.mkdirs();
        secondFile.mkdirs();
        thirdFile.mkdirs();
        mergedFile.mkdirs();
        INDEX_MERGER_V9.persist(first, DATA_INTERVAL, firstFile, indexSpec);
        INDEX_MERGER_V9.persist(second, DATA_INTERVAL, secondFile, indexSpec);
        INDEX_MERGER_V9.persist(third, DATA_INTERVAL, thirdFile, indexSpec);
        try {
            QueryableIndex mergedRealtime = INDEX_IO.loadIndex(INDEX_MERGER_V9.mergeQueryableIndex(Arrays.asList(INDEX_IO.loadIndex(firstFile), INDEX_IO.loadIndex(secondFile), INDEX_IO.loadIndex(thirdFile)), true, METRIC_AGGS, mergedFile, indexSpec));
            return mergedRealtime;
        } finally {
            FileUtils.deleteDirectory(firstFile);
            FileUtils.deleteDirectory(secondFile);
            FileUtils.deleteDirectory(thirdFile);
            FileUtils.deleteDirectory(mergedFile);
        }
    } catch (IOException e) {
        throw Throwables.propagate(e);
    }
}
Also used : IncrementalIndex(io.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(io.druid.segment.incremental.OnheapIncrementalIndex) OnheapIncrementalIndex(io.druid.segment.incremental.OnheapIncrementalIndex) IOException(java.io.IOException) DateTime(org.joda.time.DateTime) Random(java.util.Random) SpatialDimensionSchema(io.druid.data.input.impl.SpatialDimensionSchema) DimensionsSpec(io.druid.data.input.impl.DimensionsSpec) MapBasedInputRow(io.druid.data.input.MapBasedInputRow) File(java.io.File)

Example 34 with DimensionsSpec

use of io.druid.data.input.impl.DimensionsSpec in project druid by druid-io.

the class MultiValuedDimensionTest method setupClass.

@BeforeClass
public static void setupClass() throws Exception {
    incrementalIndex = new OnheapIncrementalIndex(0, Granularities.NONE, new AggregatorFactory[] { new CountAggregatorFactory("count") }, true, true, true, 5000);
    StringInputRowParser parser = new StringInputRowParser(new CSVParseSpec(new TimestampSpec("timestamp", "iso", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("product", "tags")), null, null), "\t", ImmutableList.of("timestamp", "product", "tags")), "UTF-8");
    String[] rows = new String[] { "2011-01-12T00:00:00.000Z,product_1,t1\tt2\tt3", "2011-01-13T00:00:00.000Z,product_2,t3\tt4\tt5", "2011-01-14T00:00:00.000Z,product_3,t5\tt6\tt7", "2011-01-14T00:00:00.000Z,product_4" };
    for (String row : rows) {
        incrementalIndex.add(parser.parse(row));
    }
    persistedSegmentDir = Files.createTempDir();
    TestHelper.getTestIndexMerger().persist(incrementalIndex, persistedSegmentDir, new IndexSpec());
    queryableIndex = TestHelper.getTestIndexIO().loadIndex(persistedSegmentDir);
}
Also used : IndexSpec(io.druid.segment.IndexSpec) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) CSVParseSpec(io.druid.data.input.impl.CSVParseSpec) OnheapIncrementalIndex(io.druid.segment.incremental.OnheapIncrementalIndex) StringInputRowParser(io.druid.data.input.impl.StringInputRowParser) TimestampSpec(io.druid.data.input.impl.TimestampSpec) DimensionsSpec(io.druid.data.input.impl.DimensionsSpec) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) BeforeClass(org.junit.BeforeClass)

Example 35 with DimensionsSpec

use of io.druid.data.input.impl.DimensionsSpec in project druid by druid-io.

the class ProtoBufInputRowParserTest method testParse.

/*
   * eventType = 1;
   *
   * required uint64 id = 2;
   * required string timestamp = 3;
   * optional uint32 someOtherId = 4;
   * optional bool isValid = 5;
   * optional string description = 6;
   *
   * optional float someFloatColumn = 7;
   * optional uint32 someIntColumn = 8;
   * optional uint64 someLongColumn = 9;
   */
@Test
public void testParse() throws Exception {
    //configure parser with desc file
    ProtoBufInputRowParser parser = new ProtoBufInputRowParser(new TimeAndDimsParseSpec(new TimestampSpec("timestamp", "iso", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(Arrays.asList(DIMENSIONS)), Arrays.<String>asList(), null)), "prototest.desc");
    //create binary of proto test event
    DateTime dateTime = new DateTime(2012, 07, 12, 9, 30);
    ProtoTestEventWrapper.ProtoTestEvent event = ProtoTestEventWrapper.ProtoTestEvent.newBuilder().setDescription("description").setEventType(ProtoTestEventWrapper.ProtoTestEvent.EventCategory.CATEGORY_ONE).setId(4711L).setIsValid(true).setSomeOtherId(4712).setTimestamp(dateTime.toString()).setSomeFloatColumn(47.11F).setSomeIntColumn(815).setSomeLongColumn(816L).build();
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    event.writeTo(out);
    InputRow row = parser.parse(ByteBuffer.wrap(out.toByteArray()));
    System.out.println(row);
    assertEquals(Arrays.asList(DIMENSIONS), row.getDimensions());
    assertEquals(dateTime.getMillis(), row.getTimestampFromEpoch());
    assertDimensionEquals(row, "id", "4711");
    assertDimensionEquals(row, "isValid", "true");
    assertDimensionEquals(row, "someOtherId", "4712");
    assertDimensionEquals(row, "description", "description");
    assertDimensionEquals(row, "eventType", ProtoTestEventWrapper.ProtoTestEvent.EventCategory.CATEGORY_ONE.name());
    assertEquals(47.11F, row.getFloatMetric("someFloatColumn"), 0.0);
    assertEquals(815.0F, row.getFloatMetric("someIntColumn"), 0.0);
    assertEquals(816.0F, row.getFloatMetric("someLongColumn"), 0.0);
}
Also used : TimeAndDimsParseSpec(io.druid.data.input.impl.TimeAndDimsParseSpec) TimestampSpec(io.druid.data.input.impl.TimestampSpec) DimensionsSpec(io.druid.data.input.impl.DimensionsSpec) ByteArrayOutputStream(java.io.ByteArrayOutputStream) DateTime(org.joda.time.DateTime) Test(org.junit.Test)

Aggregations

DimensionsSpec (io.druid.data.input.impl.DimensionsSpec)41 TimestampSpec (io.druid.data.input.impl.TimestampSpec)29 JSONParseSpec (io.druid.data.input.impl.JSONParseSpec)16 StringInputRowParser (io.druid.data.input.impl.StringInputRowParser)16 OnheapIncrementalIndex (io.druid.segment.incremental.OnheapIncrementalIndex)16 Test (org.junit.Test)14 IncrementalIndex (io.druid.segment.incremental.IncrementalIndex)13 AggregatorFactory (io.druid.query.aggregation.AggregatorFactory)11 MapBasedInputRow (io.druid.data.input.MapBasedInputRow)10 CountAggregatorFactory (io.druid.query.aggregation.CountAggregatorFactory)10 DateTime (org.joda.time.DateTime)9 DoubleSumAggregatorFactory (io.druid.query.aggregation.DoubleSumAggregatorFactory)8 LongSumAggregatorFactory (io.druid.query.aggregation.LongSumAggregatorFactory)8 DataSchema (io.druid.segment.indexing.DataSchema)8 UniformGranularitySpec (io.druid.segment.indexing.granularity.UniformGranularitySpec)8 File (java.io.File)8 Map (java.util.Map)8 SpatialDimensionSchema (io.druid.data.input.impl.SpatialDimensionSchema)7 CSVParseSpec (io.druid.data.input.impl.CSVParseSpec)6 StringDimensionSchema (io.druid.data.input.impl.StringDimensionSchema)6