Search in sources :

Example 6 with SegmentGenerator

use of org.apache.druid.segment.generator.SegmentGenerator in project druid by druid-io.

the class SqlVsNativeBenchmark method setup.

@Setup(Level.Trial)
public void setup() {
    this.closer = Closer.create();
    final GeneratorSchemaInfo schemaInfo = GeneratorBasicSchemas.SCHEMA_MAP.get("basic");
    final DataSegment dataSegment = DataSegment.builder().dataSource("foo").interval(schemaInfo.getDataInterval()).version("1").shardSpec(new LinearShardSpec(0)).size(0).build();
    final SegmentGenerator segmentGenerator = closer.register(new SegmentGenerator());
    log.info("Starting benchmark setup using tmpDir[%s], rows[%,d].", segmentGenerator.getCacheDir(), rowsPerSegment);
    final QueryableIndex index = segmentGenerator.generate(dataSegment, schemaInfo, Granularities.NONE, rowsPerSegment);
    final QueryRunnerFactoryConglomerate conglomerate = QueryStackTests.createQueryRunnerFactoryConglomerate(closer);
    final PlannerConfig plannerConfig = new PlannerConfig();
    this.walker = closer.register(new SpecificSegmentsQuerySegmentWalker(conglomerate).add(dataSegment, index));
    final DruidSchemaCatalog rootSchema = CalciteTests.createMockRootSchema(conglomerate, walker, plannerConfig, AuthTestUtils.TEST_AUTHORIZER_MAPPER);
    plannerFactory = new PlannerFactory(rootSchema, CalciteTests.createMockQueryMakerFactory(walker, conglomerate), CalciteTests.createOperatorTable(), CalciteTests.createExprMacroTable(), plannerConfig, AuthTestUtils.TEST_AUTHORIZER_MAPPER, CalciteTests.getJsonMapper(), CalciteTests.DRUID_SCHEMA_NAME);
    groupByQuery = GroupByQuery.builder().setDataSource("foo").setInterval(Intervals.ETERNITY).setDimensions(new DefaultDimensionSpec("dimZipf", "d0"), new DefaultDimensionSpec("dimSequential", "d1")).setAggregatorSpecs(new CountAggregatorFactory("c")).setGranularity(Granularities.ALL).build();
    sqlQuery = "SELECT\n" + "  dimZipf AS d0," + "  dimSequential AS d1,\n" + "  COUNT(*) AS c\n" + "FROM druid.foo\n" + "GROUP BY dimZipf, dimSequential";
}
Also used : SegmentGenerator(org.apache.druid.segment.generator.SegmentGenerator) QueryRunnerFactoryConglomerate(org.apache.druid.query.QueryRunnerFactoryConglomerate) SpecificSegmentsQuerySegmentWalker(org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) LinearShardSpec(org.apache.druid.timeline.partition.LinearShardSpec) QueryableIndex(org.apache.druid.segment.QueryableIndex) GeneratorSchemaInfo(org.apache.druid.segment.generator.GeneratorSchemaInfo) PlannerConfig(org.apache.druid.sql.calcite.planner.PlannerConfig) DruidSchemaCatalog(org.apache.druid.sql.calcite.schema.DruidSchemaCatalog) PlannerFactory(org.apache.druid.sql.calcite.planner.PlannerFactory) DataSegment(org.apache.druid.timeline.DataSegment) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) Setup(org.openjdk.jmh.annotations.Setup)

Example 7 with SegmentGenerator

use of org.apache.druid.segment.generator.SegmentGenerator in project druid by druid-io.

the class IndexedTableJoinCursorBenchmark method makeQueryableIndexSegment.

public static QueryableIndexSegment makeQueryableIndexSegment(Closer closer, String dataSource, int rowsPerSegment) {
    final List<GeneratorColumnSchema> schemaColumnsInfo = ImmutableList.of(GeneratorColumnSchema.makeSequential("stringKey", ValueType.STRING, false, 1, null, 0, rowsPerSegment), GeneratorColumnSchema.makeSequential("longKey", ValueType.LONG, false, 1, null, 0, rowsPerSegment), GeneratorColumnSchema.makeLazyZipf("string1", ValueType.STRING, false, 1, 0.1, 0, rowsPerSegment, 2.0), GeneratorColumnSchema.makeLazyZipf("string2", ValueType.STRING, false, 1, 0.3, 0, 1000000, 1.5), GeneratorColumnSchema.makeLazyZipf("string3", ValueType.STRING, false, 1, 0.12, 0, 1000, 1.25), GeneratorColumnSchema.makeLazyZipf("string4", ValueType.STRING, false, 1, 0.22, 0, 12000, 3.0), GeneratorColumnSchema.makeLazyZipf("string5", ValueType.STRING, false, 1, 0.05, 0, 33333, 1.8), GeneratorColumnSchema.makeLazyZipf("long1", ValueType.LONG, false, 1, 0.1, 0, 1001, 2.0), GeneratorColumnSchema.makeLazyZipf("long2", ValueType.LONG, false, 1, 0.01, 0, 666666, 2.2), GeneratorColumnSchema.makeLazyZipf("long3", ValueType.LONG, false, 1, 0.12, 0, 1000000, 2.5), GeneratorColumnSchema.makeLazyZipf("long4", ValueType.LONG, false, 1, 0.4, 0, 23, 1.2), GeneratorColumnSchema.makeLazyZipf("long5", ValueType.LONG, false, 1, 0.33, 0, 9999, 1.5), GeneratorColumnSchema.makeLazyZipf("double1", ValueType.DOUBLE, false, 1, 0.1, 0, 333, 2.2), GeneratorColumnSchema.makeLazyZipf("double2", ValueType.DOUBLE, false, 1, 0.01, 0, 4021, 2.5), GeneratorColumnSchema.makeLazyZipf("double3", ValueType.DOUBLE, false, 1, 0.41, 0, 90210, 4.0), GeneratorColumnSchema.makeLazyZipf("double4", ValueType.DOUBLE, false, 1, 0.5, 0, 5555555, 1.2), GeneratorColumnSchema.makeLazyZipf("double5", ValueType.DOUBLE, false, 1, 0.23, 0, 80, 1.8), GeneratorColumnSchema.makeLazyZipf("float1", ValueType.FLOAT, false, 1, 0.11, 0, 1000000, 1.7), GeneratorColumnSchema.makeLazyZipf("float2", ValueType.FLOAT, false, 1, 0.4, 0, 10, 1.5), GeneratorColumnSchema.makeLazyZipf("float3", ValueType.FLOAT, false, 1, 0.8, 0, 5000, 2.3), GeneratorColumnSchema.makeLazyZipf("float4", ValueType.FLOAT, false, 1, 0.999, 0, 14440, 2.0), GeneratorColumnSchema.makeLazyZipf("float5", ValueType.FLOAT, false, 1, 0.001, 0, 1029, 1.5));
    final List<AggregatorFactory> aggs = new ArrayList<>();
    aggs.add(new CountAggregatorFactory("rows"));
    final Interval interval = Intervals.of("2000-01-01/P1D");
    final GeneratorSchemaInfo schema = new GeneratorSchemaInfo(schemaColumnsInfo, aggs, interval, false);
    final DataSegment dataSegment = DataSegment.builder().dataSource(dataSource).interval(schema.getDataInterval()).version("1").shardSpec(new LinearShardSpec(0)).size(0).build();
    final QueryableIndex index = closer.register(new SegmentGenerator()).generate(dataSegment, schema, Granularities.NONE, rowsPerSegment);
    return closer.register(new QueryableIndexSegment(index, SegmentId.dummy(dataSource)));
}
Also used : QueryableIndexSegment(org.apache.druid.segment.QueryableIndexSegment) LinearShardSpec(org.apache.druid.timeline.partition.LinearShardSpec) GeneratorSchemaInfo(org.apache.druid.segment.generator.GeneratorSchemaInfo) ArrayList(java.util.ArrayList) GeneratorColumnSchema(org.apache.druid.segment.generator.GeneratorColumnSchema) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) DataSegment(org.apache.druid.timeline.DataSegment) SegmentGenerator(org.apache.druid.segment.generator.SegmentGenerator) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) QueryableIndex(org.apache.druid.segment.QueryableIndex) Interval(org.joda.time.Interval)

Example 8 with SegmentGenerator

use of org.apache.druid.segment.generator.SegmentGenerator in project druid by druid-io.

the class ExpressionSelectorBenchmark method setup.

@Setup(Level.Trial)
public void setup() {
    this.closer = Closer.create();
    final GeneratorSchemaInfo schemaInfo = new GeneratorSchemaInfo(ImmutableList.of(GeneratorColumnSchema.makeZipf("n", ValueType.LONG, false, 1, 0d, 1000, 10000, 3d), GeneratorColumnSchema.makeZipf("s", ValueType.STRING, false, 1, 0d, 1000, 10000, 3d)), ImmutableList.of(), Intervals.of("2000/P1D"), false);
    final DataSegment dataSegment = DataSegment.builder().dataSource("foo").interval(schemaInfo.getDataInterval()).version("1").shardSpec(new LinearShardSpec(0)).size(0).build();
    final SegmentGenerator segmentGenerator = closer.register(new SegmentGenerator());
    this.index = closer.register(segmentGenerator.generate(dataSegment, schemaInfo, Granularities.HOUR, rowsPerSegment));
}
Also used : SegmentGenerator(org.apache.druid.segment.generator.SegmentGenerator) LinearShardSpec(org.apache.druid.timeline.partition.LinearShardSpec) GeneratorSchemaInfo(org.apache.druid.segment.generator.GeneratorSchemaInfo) DataSegment(org.apache.druid.timeline.DataSegment) Setup(org.openjdk.jmh.annotations.Setup)

Example 9 with SegmentGenerator

use of org.apache.druid.segment.generator.SegmentGenerator in project druid by druid-io.

the class ExpressionVectorSelectorBenchmark method setup.

@Setup(Level.Trial)
public void setup() {
    this.closer = Closer.create();
    final GeneratorSchemaInfo schemaInfo = GeneratorBasicSchemas.SCHEMA_MAP.get("expression-testbench");
    final DataSegment dataSegment = DataSegment.builder().dataSource("foo").interval(schemaInfo.getDataInterval()).version("1").shardSpec(new LinearShardSpec(0)).size(0).build();
    final SegmentGenerator segmentGenerator = closer.register(new SegmentGenerator());
    this.index = closer.register(segmentGenerator.generate(dataSegment, schemaInfo, Granularities.HOUR, rowsPerSegment));
    Expr parsed = Parser.parse(expression, ExprMacroTable.nil());
    outputType = parsed.getOutputType(new ColumnInspector() {

        @Nullable
        @Override
        public ColumnCapabilities getColumnCapabilities(String column) {
            return QueryableIndexStorageAdapter.getColumnCapabilities(index, column);
        }
    });
    checkSanity();
}
Also used : SegmentGenerator(org.apache.druid.segment.generator.SegmentGenerator) Expr(org.apache.druid.math.expr.Expr) LinearShardSpec(org.apache.druid.timeline.partition.LinearShardSpec) GeneratorSchemaInfo(org.apache.druid.segment.generator.GeneratorSchemaInfo) ColumnInspector(org.apache.druid.segment.ColumnInspector) DataSegment(org.apache.druid.timeline.DataSegment) Setup(org.openjdk.jmh.annotations.Setup)

Example 10 with SegmentGenerator

use of org.apache.druid.segment.generator.SegmentGenerator in project druid by druid-io.

the class ExpressionAggregationBenchmark method setup.

@Setup(Level.Trial)
public void setup() {
    this.closer = Closer.create();
    final GeneratorSchemaInfo schemaInfo = new GeneratorSchemaInfo(ImmutableList.of(GeneratorColumnSchema.makeNormal("x", ValueType.FLOAT, false, 1, 0d, 0d, 10000d, false), GeneratorColumnSchema.makeNormal("y", ValueType.FLOAT, false, 1, 0d, 0d, 10000d, false)), ImmutableList.of(), Intervals.of("2000/P1D"), false);
    final DataSegment dataSegment = DataSegment.builder().dataSource("foo").interval(schemaInfo.getDataInterval()).version("1").shardSpec(new LinearShardSpec(0)).size(0).build();
    final SegmentGenerator segmentGenerator = closer.register(new SegmentGenerator());
    this.index = closer.register(segmentGenerator.generate(dataSegment, schemaInfo, Granularities.NONE, rowsPerSegment));
    this.javaScriptAggregatorFactory = new JavaScriptAggregatorFactory("name", ImmutableList.of("x", "y"), "function(current,x,y) { if (x > 0) { return current + x + 1 } else { return current + y + 1 } }", "function() { return 0 }", "function(a,b) { return a + b }", JavaScriptConfig.getEnabledInstance());
    this.expressionAggregatorFactory = new DoubleSumAggregatorFactory("name", null, "if(x>0,1.0+x,y+1)", TestExprMacroTable.INSTANCE);
}
Also used : SegmentGenerator(org.apache.druid.segment.generator.SegmentGenerator) DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) LinearShardSpec(org.apache.druid.timeline.partition.LinearShardSpec) GeneratorSchemaInfo(org.apache.druid.segment.generator.GeneratorSchemaInfo) JavaScriptAggregatorFactory(org.apache.druid.query.aggregation.JavaScriptAggregatorFactory) DataSegment(org.apache.druid.timeline.DataSegment) Setup(org.openjdk.jmh.annotations.Setup)

Aggregations

SegmentGenerator (org.apache.druid.segment.generator.SegmentGenerator)13 GeneratorSchemaInfo (org.apache.druid.segment.generator.GeneratorSchemaInfo)12 DataSegment (org.apache.druid.timeline.DataSegment)12 LinearShardSpec (org.apache.druid.timeline.partition.LinearShardSpec)12 Setup (org.openjdk.jmh.annotations.Setup)8 QueryableIndex (org.apache.druid.segment.QueryableIndex)5 PlannerConfig (org.apache.druid.sql.calcite.planner.PlannerConfig)4 PlannerFactory (org.apache.druid.sql.calcite.planner.PlannerFactory)4 DruidSchemaCatalog (org.apache.druid.sql.calcite.schema.DruidSchemaCatalog)4 SpecificSegmentsQuerySegmentWalker (org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker)4 QueryRunnerFactoryConglomerate (org.apache.druid.query.QueryRunnerFactoryConglomerate)3 BeforeClass (org.junit.BeforeClass)3 CachingClusteredClient (org.apache.druid.client.CachingClusteredClient)2 CacheConfig (org.apache.druid.client.cache.CacheConfig)2 CachePopulatorStats (org.apache.druid.client.cache.CachePopulatorStats)2 ForegroundCachePopulator (org.apache.druid.client.cache.ForegroundCachePopulator)2 DruidHttpClientConfig (org.apache.druid.guice.http.DruidHttpClientConfig)2 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)2 MapJoinableFactory (org.apache.druid.segment.join.MapJoinableFactory)2 NoopServiceEmitter (org.apache.druid.server.metrics.NoopServiceEmitter)2