Search in sources :

Example 41 with LinearShardSpec

use of org.apache.druid.timeline.partition.LinearShardSpec in project druid by druid-io.

the class CalciteTests method createMockWalker.

public static SpecificSegmentsQuerySegmentWalker createMockWalker(final QueryRunnerFactoryConglomerate conglomerate, final File tmpDir, final QueryScheduler scheduler, final JoinableFactory joinableFactory) {
    final QueryableIndex index1 = IndexBuilder.create().tmpDir(new File(tmpDir, "1")).segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance()).schema(INDEX_SCHEMA).rows(ROWS1).buildMMappedIndex();
    final QueryableIndex index2 = IndexBuilder.create().tmpDir(new File(tmpDir, "2")).segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance()).schema(INDEX_SCHEMA_DIFFERENT_DIM3_M1_TYPES).rows(ROWS2).buildMMappedIndex();
    final QueryableIndex forbiddenIndex = IndexBuilder.create().tmpDir(new File(tmpDir, "forbidden")).segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance()).schema(INDEX_SCHEMA).rows(FORBIDDEN_ROWS).buildMMappedIndex();
    final QueryableIndex indexNumericDims = IndexBuilder.create().tmpDir(new File(tmpDir, "3")).segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance()).schema(INDEX_SCHEMA_NUMERIC_DIMS).rows(ROWS1_WITH_NUMERIC_DIMS).buildMMappedIndex();
    final QueryableIndex index4 = IndexBuilder.create().tmpDir(new File(tmpDir, "4")).segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance()).schema(INDEX_SCHEMA).rows(ROWS1_WITH_FULL_TIMESTAMP).buildMMappedIndex();
    final QueryableIndex indexLotsOfColumns = IndexBuilder.create().tmpDir(new File(tmpDir, "5")).segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance()).schema(INDEX_SCHEMA_LOTS_O_COLUMNS).rows(ROWS_LOTS_OF_COLUMNS).buildMMappedIndex();
    final QueryableIndex someDatasourceIndex = IndexBuilder.create().tmpDir(new File(tmpDir, "6")).segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance()).schema(INDEX_SCHEMA).rows(ROWS1).buildMMappedIndex();
    final QueryableIndex someXDatasourceIndex = IndexBuilder.create().tmpDir(new File(tmpDir, "7")).segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance()).schema(INDEX_SCHEMA_WITH_X_COLUMNS).rows(RAW_ROWS1_X).buildMMappedIndex();
    final QueryableIndex userVisitIndex = IndexBuilder.create().tmpDir(new File(tmpDir, "8")).segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance()).schema(INDEX_SCHEMA).rows(USER_VISIT_ROWS).buildMMappedIndex();
    return new SpecificSegmentsQuerySegmentWalker(conglomerate, INJECTOR.getInstance(LookupExtractorFactoryContainerProvider.class), joinableFactory, scheduler).add(DataSegment.builder().dataSource(DATASOURCE1).interval(index1.getDataInterval()).version("1").shardSpec(new LinearShardSpec(0)).size(0).build(), index1).add(DataSegment.builder().dataSource(DATASOURCE2).interval(index2.getDataInterval()).version("1").shardSpec(new LinearShardSpec(0)).size(0).build(), index2).add(DataSegment.builder().dataSource(FORBIDDEN_DATASOURCE).interval(forbiddenIndex.getDataInterval()).version("1").shardSpec(new LinearShardSpec(0)).size(0).build(), forbiddenIndex).add(DataSegment.builder().dataSource(DATASOURCE3).interval(indexNumericDims.getDataInterval()).version("1").shardSpec(new LinearShardSpec(0)).size(0).build(), indexNumericDims).add(DataSegment.builder().dataSource(DATASOURCE4).interval(index4.getDataInterval()).version("1").shardSpec(new LinearShardSpec(0)).size(0).build(), index4).add(DataSegment.builder().dataSource(DATASOURCE5).interval(indexLotsOfColumns.getDataInterval()).version("1").shardSpec(new LinearShardSpec(0)).size(0).build(), indexLotsOfColumns).add(DataSegment.builder().dataSource(SOME_DATASOURCE).interval(indexLotsOfColumns.getDataInterval()).version("1").shardSpec(new LinearShardSpec(0)).size(0).build(), someDatasourceIndex).add(DataSegment.builder().dataSource(SOMEXDATASOURCE).interval(indexLotsOfColumns.getDataInterval()).version("1").shardSpec(new LinearShardSpec(0)).size(0).build(), someXDatasourceIndex).add(DataSegment.builder().dataSource(BROADCAST_DATASOURCE).interval(indexNumericDims.getDataInterval()).version("1").shardSpec(new LinearShardSpec(0)).size(0).build(), indexNumericDims).add(DataSegment.builder().dataSource(USERVISITDATASOURCE).interval(userVisitIndex.getDataInterval()).version("1").shardSpec(new LinearShardSpec(0)).size(0).build(), userVisitIndex);
}
Also used : QueryableIndex(org.apache.druid.segment.QueryableIndex) LinearShardSpec(org.apache.druid.timeline.partition.LinearShardSpec) LookupExtractorFactoryContainerProvider(org.apache.druid.query.lookup.LookupExtractorFactoryContainerProvider) File(java.io.File)

Example 42 with LinearShardSpec

use of org.apache.druid.timeline.partition.LinearShardSpec in project druid by druid-io.

the class DefaultOfflineAppenderatorFactoryTest method testBuild.

@Test
public void testBuild() throws IOException, SegmentNotWritableException {
    Injector injector = Initialization.makeInjectorWithModules(GuiceInjectors.makeStartupInjector(), ImmutableList.<Module>of(new Module() {

        @Override
        public void configure(Binder binder) {
            binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/tool");
            binder.bindConstant().annotatedWith(Names.named("servicePort")).to(9999);
            binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1);
            binder.bind(DruidProcessingConfig.class).toInstance(new DruidProcessingConfig() {

                @Override
                public String getFormatString() {
                    return "processing-%s";
                }

                @Override
                public int intermediateComputeSizeBytes() {
                    return 100 * 1024 * 1024;
                }

                @Override
                public int getNumThreads() {
                    return 1;
                }

                @Override
                public int columnCacheSizeBytes() {
                    return 25 * 1024 * 1024;
                }
            });
            binder.bind(ColumnConfig.class).to(DruidProcessingConfig.class);
        }
    }));
    ObjectMapper objectMapper = injector.getInstance(ObjectMapper.class);
    AppenderatorFactory defaultOfflineAppenderatorFactory = objectMapper.readerFor(AppenderatorFactory.class).readValue("{\"type\":\"offline\"}");
    final Map<String, Object> parserMap = objectMapper.convertValue(new MapInputRowParser(new JSONParseSpec(new TimestampSpec("ts", "auto", null), DimensionsSpec.EMPTY, null, null, null)), Map.class);
    DataSchema schema = new DataSchema("dataSourceName", parserMap, new AggregatorFactory[] { new CountAggregatorFactory("count"), new LongSumAggregatorFactory("met", "met") }, new UniformGranularitySpec(Granularities.MINUTE, Granularities.NONE, null), null, objectMapper);
    RealtimeTuningConfig tuningConfig = new RealtimeTuningConfig(null, 75000, null, null, null, null, temporaryFolder.newFolder(), null, null, null, null, null, null, 0, 0, null, null, null, null, null);
    Appenderator appenderator = defaultOfflineAppenderatorFactory.build(schema, tuningConfig, new FireDepartmentMetrics());
    try {
        Assert.assertEquals("dataSourceName", appenderator.getDataSource());
        Assert.assertEquals(null, appenderator.startJob());
        SegmentIdWithShardSpec identifier = new SegmentIdWithShardSpec("dataSourceName", Intervals.of("2000/2001"), "A", new LinearShardSpec(0));
        Assert.assertEquals(0, ((AppenderatorImpl) appenderator).getRowsInMemory());
        appenderator.add(identifier, StreamAppenderatorTest.ir("2000", "bar", 1), null);
        Assert.assertEquals(1, ((AppenderatorImpl) appenderator).getRowsInMemory());
        appenderator.add(identifier, StreamAppenderatorTest.ir("2000", "baz", 1), null);
        Assert.assertEquals(2, ((AppenderatorImpl) appenderator).getRowsInMemory());
        appenderator.close();
        Assert.assertEquals(0, ((AppenderatorImpl) appenderator).getRowsInMemory());
    } finally {
        appenderator.close();
    }
}
Also used : ColumnConfig(org.apache.druid.segment.column.ColumnConfig) MapInputRowParser(org.apache.druid.data.input.impl.MapInputRowParser) LinearShardSpec(org.apache.druid.timeline.partition.LinearShardSpec) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) RealtimeTuningConfig(org.apache.druid.segment.indexing.RealtimeTuningConfig) DataSchema(org.apache.druid.segment.indexing.DataSchema) Binder(com.google.inject.Binder) UniformGranularitySpec(org.apache.druid.segment.indexing.granularity.UniformGranularitySpec) FireDepartmentMetrics(org.apache.druid.segment.realtime.FireDepartmentMetrics) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) Injector(com.google.inject.Injector) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) Module(com.google.inject.Module) DruidProcessingConfig(org.apache.druid.query.DruidProcessingConfig) JSONParseSpec(org.apache.druid.data.input.impl.JSONParseSpec) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) Test(org.junit.Test)

Aggregations

LinearShardSpec (org.apache.druid.timeline.partition.LinearShardSpec)42 DataSegment (org.apache.druid.timeline.DataSegment)30 Test (org.junit.Test)18 QueryableIndex (org.apache.druid.segment.QueryableIndex)14 Interval (org.joda.time.Interval)14 GeneratorSchemaInfo (org.apache.druid.segment.generator.GeneratorSchemaInfo)12 SegmentGenerator (org.apache.druid.segment.generator.SegmentGenerator)12 SpecificSegmentsQuerySegmentWalker (org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker)12 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)11 DoubleSumAggregatorFactory (org.apache.druid.query.aggregation.DoubleSumAggregatorFactory)9 Setup (org.openjdk.jmh.annotations.Setup)9 SegmentIdWithShardSpec (org.apache.druid.segment.realtime.appenderator.SegmentIdWithShardSpec)8 MetadataStorageTablesConfig (org.apache.druid.metadata.MetadataStorageTablesConfig)7 IndexBuilder (org.apache.druid.segment.IndexBuilder)7 DataSegmentPusher (org.apache.druid.segment.loading.DataSegmentPusher)7 HdfsDataSegmentPusher (org.apache.druid.storage.hdfs.HdfsDataSegmentPusher)7 HdfsDataSegmentPusherConfig (org.apache.druid.storage.hdfs.HdfsDataSegmentPusherConfig)7 LocalFileSystem (org.apache.hadoop.fs.LocalFileSystem)7 Path (org.apache.hadoop.fs.Path)7 File (java.io.File)6