Search in sources :

Example 26 with LinearShardSpec

use of org.apache.druid.timeline.partition.LinearShardSpec in project druid by druid-io.

the class ExpressionAggregationBenchmark method setup.

@Setup(Level.Trial)
public void setup() {
    this.closer = Closer.create();
    final GeneratorSchemaInfo schemaInfo = new GeneratorSchemaInfo(ImmutableList.of(GeneratorColumnSchema.makeNormal("x", ValueType.FLOAT, false, 1, 0d, 0d, 10000d, false), GeneratorColumnSchema.makeNormal("y", ValueType.FLOAT, false, 1, 0d, 0d, 10000d, false)), ImmutableList.of(), Intervals.of("2000/P1D"), false);
    final DataSegment dataSegment = DataSegment.builder().dataSource("foo").interval(schemaInfo.getDataInterval()).version("1").shardSpec(new LinearShardSpec(0)).size(0).build();
    final SegmentGenerator segmentGenerator = closer.register(new SegmentGenerator());
    this.index = closer.register(segmentGenerator.generate(dataSegment, schemaInfo, Granularities.NONE, rowsPerSegment));
    this.javaScriptAggregatorFactory = new JavaScriptAggregatorFactory("name", ImmutableList.of("x", "y"), "function(current,x,y) { if (x > 0) { return current + x + 1 } else { return current + y + 1 } }", "function() { return 0 }", "function(a,b) { return a + b }", JavaScriptConfig.getEnabledInstance());
    this.expressionAggregatorFactory = new DoubleSumAggregatorFactory("name", null, "if(x>0,1.0+x,y+1)", TestExprMacroTable.INSTANCE);
}
Also used : SegmentGenerator(org.apache.druid.segment.generator.SegmentGenerator) DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) LinearShardSpec(org.apache.druid.timeline.partition.LinearShardSpec) GeneratorSchemaInfo(org.apache.druid.segment.generator.GeneratorSchemaInfo) JavaScriptAggregatorFactory(org.apache.druid.query.aggregation.JavaScriptAggregatorFactory) DataSegment(org.apache.druid.timeline.DataSegment) Setup(org.openjdk.jmh.annotations.Setup)

Example 27 with LinearShardSpec

use of org.apache.druid.timeline.partition.LinearShardSpec in project druid by druid-io.

the class DruidSchemaInternRowSignatureBenchmark method setup.

@Setup
public void setup() {
    druidSchema = new DruidSchemaForBenchmark(EasyMock.mock(QueryLifecycleFactory.class), EasyMock.mock(TimelineServerView.class), null, null, EasyMock.mock(PlannerConfig.class), null, null);
    DruidServerMetadata serverMetadata = new DruidServerMetadata("dummy", "dummy", "dummy", 42, ServerType.HISTORICAL, "tier-0", 0);
    DataSegment.Builder builder = DataSegment.builder().dataSource("dummy").shardSpec(new LinearShardSpec(0)).dimensions(ImmutableList.of("col1", "col2", "col3", "col4")).version("1").size(0);
    for (int i = 0; i < 10000; ++i) {
        DataSegment dataSegment = builder.interval(Intervals.of(i + "/" + (i + 1))).build();
        druidSchema.addSegment(serverMetadata, dataSegment);
    }
}
Also used : LinearShardSpec(org.apache.druid.timeline.partition.LinearShardSpec) DruidServerMetadata(org.apache.druid.server.coordination.DruidServerMetadata) DataSegment(org.apache.druid.timeline.DataSegment) Setup(org.openjdk.jmh.annotations.Setup)

Example 28 with LinearShardSpec

use of org.apache.druid.timeline.partition.LinearShardSpec in project druid by druid-io.

the class ExpressionFilterBenchmark method setup.

@Setup(Level.Trial)
public void setup() {
    this.closer = Closer.create();
    final GeneratorSchemaInfo schemaInfo = new GeneratorSchemaInfo(ImmutableList.of(GeneratorColumnSchema.makeEnumerated("x", ValueType.STRING, false, 3, null, Arrays.asList("Apple", "Orange", "Xylophone", "Corundum", null), Arrays.asList(0.2, 0.25, 0.15, 0.10, 0.3)), GeneratorColumnSchema.makeEnumerated("y", ValueType.STRING, false, 4, null, Arrays.asList("Hello", "World", "Foo", "Bar", "Baz"), Arrays.asList(0.2, 0.25, 0.15, 0.10, 0.3))), ImmutableList.of(), Intervals.of("2000/P1D"), false);
    final DataSegment dataSegment = DataSegment.builder().dataSource("foo").interval(schemaInfo.getDataInterval()).version("1").shardSpec(new LinearShardSpec(0)).size(0).build();
    final SegmentGenerator segmentGenerator = closer.register(new SegmentGenerator());
    this.index = closer.register(segmentGenerator.generate(dataSegment, schemaInfo, Granularities.NONE, rowsPerSegment));
    expressionFilter = new ExpressionDimFilter("array_contains(x, ['Orange', 'Xylophone'])", TestExprMacroTable.INSTANCE);
    nativeFilter = new AndDimFilter(new SelectorDimFilter("x", "Orange", null), new SelectorDimFilter("x", "Xylophone", null));
}
Also used : SegmentGenerator(org.apache.druid.segment.generator.SegmentGenerator) AndDimFilter(org.apache.druid.query.filter.AndDimFilter) LinearShardSpec(org.apache.druid.timeline.partition.LinearShardSpec) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) GeneratorSchemaInfo(org.apache.druid.segment.generator.GeneratorSchemaInfo) DataSegment(org.apache.druid.timeline.DataSegment) ExpressionDimFilter(org.apache.druid.query.filter.ExpressionDimFilter) Setup(org.openjdk.jmh.annotations.Setup)

Example 29 with LinearShardSpec

use of org.apache.druid.timeline.partition.LinearShardSpec in project druid by druid-io.

the class SqlExpressionBenchmark method setup.

@Setup(Level.Trial)
public void setup() {
    final GeneratorSchemaInfo schemaInfo = GeneratorBasicSchemas.SCHEMA_MAP.get("expression-testbench");
    final DataSegment dataSegment = DataSegment.builder().dataSource("foo").interval(schemaInfo.getDataInterval()).version("1").shardSpec(new LinearShardSpec(0)).size(0).build();
    final PlannerConfig plannerConfig = new PlannerConfig();
    final SegmentGenerator segmentGenerator = closer.register(new SegmentGenerator());
    log.info("Starting benchmark setup using cacheDir[%s], rows[%,d].", segmentGenerator.getCacheDir(), rowsPerSegment);
    final QueryableIndex index = segmentGenerator.generate(dataSegment, schemaInfo, Granularities.NONE, rowsPerSegment);
    final QueryRunnerFactoryConglomerate conglomerate = QueryStackTests.createQueryRunnerFactoryConglomerate(closer, PROCESSING_CONFIG);
    final SpecificSegmentsQuerySegmentWalker walker = new SpecificSegmentsQuerySegmentWalker(conglomerate).add(dataSegment, index);
    closer.register(walker);
    final DruidSchemaCatalog rootSchema = CalciteTests.createMockRootSchema(conglomerate, walker, plannerConfig, AuthTestUtils.TEST_AUTHORIZER_MAPPER);
    plannerFactory = new PlannerFactory(rootSchema, CalciteTests.createMockQueryMakerFactory(walker, conglomerate), CalciteTests.createOperatorTable(), CalciteTests.createExprMacroTable(), plannerConfig, AuthTestUtils.TEST_AUTHORIZER_MAPPER, CalciteTests.getJsonMapper(), CalciteTests.DRUID_SCHEMA_NAME);
    try {
        SqlVectorizedExpressionSanityTest.sanityTestVectorizedSqlQueries(plannerFactory, QUERIES.get(Integer.parseInt(query)));
    } catch (Throwable ignored) {
    // the show must go on
    }
}
Also used : SegmentGenerator(org.apache.druid.segment.generator.SegmentGenerator) QueryRunnerFactoryConglomerate(org.apache.druid.query.QueryRunnerFactoryConglomerate) SpecificSegmentsQuerySegmentWalker(org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker) LinearShardSpec(org.apache.druid.timeline.partition.LinearShardSpec) QueryableIndex(org.apache.druid.segment.QueryableIndex) GeneratorSchemaInfo(org.apache.druid.segment.generator.GeneratorSchemaInfo) PlannerConfig(org.apache.druid.sql.calcite.planner.PlannerConfig) DruidSchemaCatalog(org.apache.druid.sql.calcite.schema.DruidSchemaCatalog) PlannerFactory(org.apache.druid.sql.calcite.planner.PlannerFactory) DataSegment(org.apache.druid.timeline.DataSegment) Setup(org.openjdk.jmh.annotations.Setup)

Example 30 with LinearShardSpec

use of org.apache.druid.timeline.partition.LinearShardSpec in project druid by druid-io.

the class DoublesSketchSqlAggregatorTest method createQuerySegmentWalker.

@Override
public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker() throws IOException {
    DoublesSketchModule.registerSerde();
    final QueryableIndex index = IndexBuilder.create(CalciteTests.getJsonMapper()).tmpDir(temporaryFolder.newFolder()).segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance()).schema(new IncrementalIndexSchema.Builder().withMetrics(new CountAggregatorFactory("cnt"), new DoubleSumAggregatorFactory("m1", "m1"), new DoublesSketchAggregatorFactory("qsketch_m1", "m1", 128)).withRollup(false).build()).rows(CalciteTests.ROWS1).buildMMappedIndex();
    return new SpecificSegmentsQuerySegmentWalker(conglomerate).add(DataSegment.builder().dataSource(CalciteTests.DATASOURCE1).interval(index.getDataInterval()).version("1").shardSpec(new LinearShardSpec(0)).size(0).build(), index);
}
Also used : CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) SpecificSegmentsQuerySegmentWalker(org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker) QueryableIndex(org.apache.druid.segment.QueryableIndex) LinearShardSpec(org.apache.druid.timeline.partition.LinearShardSpec) IndexBuilder(org.apache.druid.segment.IndexBuilder) DoublesSketchAggregatorFactory(org.apache.druid.query.aggregation.datasketches.quantiles.DoublesSketchAggregatorFactory)

Aggregations

LinearShardSpec (org.apache.druid.timeline.partition.LinearShardSpec)42 DataSegment (org.apache.druid.timeline.DataSegment)30 Test (org.junit.Test)18 QueryableIndex (org.apache.druid.segment.QueryableIndex)14 Interval (org.joda.time.Interval)14 GeneratorSchemaInfo (org.apache.druid.segment.generator.GeneratorSchemaInfo)12 SegmentGenerator (org.apache.druid.segment.generator.SegmentGenerator)12 SpecificSegmentsQuerySegmentWalker (org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker)12 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)11 DoubleSumAggregatorFactory (org.apache.druid.query.aggregation.DoubleSumAggregatorFactory)9 Setup (org.openjdk.jmh.annotations.Setup)9 SegmentIdWithShardSpec (org.apache.druid.segment.realtime.appenderator.SegmentIdWithShardSpec)8 MetadataStorageTablesConfig (org.apache.druid.metadata.MetadataStorageTablesConfig)7 IndexBuilder (org.apache.druid.segment.IndexBuilder)7 DataSegmentPusher (org.apache.druid.segment.loading.DataSegmentPusher)7 HdfsDataSegmentPusher (org.apache.druid.storage.hdfs.HdfsDataSegmentPusher)7 HdfsDataSegmentPusherConfig (org.apache.druid.storage.hdfs.HdfsDataSegmentPusherConfig)7 LocalFileSystem (org.apache.hadoop.fs.LocalFileSystem)7 Path (org.apache.hadoop.fs.Path)7 File (java.io.File)6