use of org.apache.druid.timeline.partition.LinearShardSpec in project druid by druid-io.
the class ExpressionAggregationBenchmark method setup.
@Setup(Level.Trial)
public void setup() {
this.closer = Closer.create();
final GeneratorSchemaInfo schemaInfo = new GeneratorSchemaInfo(ImmutableList.of(GeneratorColumnSchema.makeNormal("x", ValueType.FLOAT, false, 1, 0d, 0d, 10000d, false), GeneratorColumnSchema.makeNormal("y", ValueType.FLOAT, false, 1, 0d, 0d, 10000d, false)), ImmutableList.of(), Intervals.of("2000/P1D"), false);
final DataSegment dataSegment = DataSegment.builder().dataSource("foo").interval(schemaInfo.getDataInterval()).version("1").shardSpec(new LinearShardSpec(0)).size(0).build();
final SegmentGenerator segmentGenerator = closer.register(new SegmentGenerator());
this.index = closer.register(segmentGenerator.generate(dataSegment, schemaInfo, Granularities.NONE, rowsPerSegment));
this.javaScriptAggregatorFactory = new JavaScriptAggregatorFactory("name", ImmutableList.of("x", "y"), "function(current,x,y) { if (x > 0) { return current + x + 1 } else { return current + y + 1 } }", "function() { return 0 }", "function(a,b) { return a + b }", JavaScriptConfig.getEnabledInstance());
this.expressionAggregatorFactory = new DoubleSumAggregatorFactory("name", null, "if(x>0,1.0+x,y+1)", TestExprMacroTable.INSTANCE);
}
use of org.apache.druid.timeline.partition.LinearShardSpec in project druid by druid-io.
the class DruidSchemaInternRowSignatureBenchmark method setup.
@Setup
public void setup() {
druidSchema = new DruidSchemaForBenchmark(EasyMock.mock(QueryLifecycleFactory.class), EasyMock.mock(TimelineServerView.class), null, null, EasyMock.mock(PlannerConfig.class), null, null);
DruidServerMetadata serverMetadata = new DruidServerMetadata("dummy", "dummy", "dummy", 42, ServerType.HISTORICAL, "tier-0", 0);
DataSegment.Builder builder = DataSegment.builder().dataSource("dummy").shardSpec(new LinearShardSpec(0)).dimensions(ImmutableList.of("col1", "col2", "col3", "col4")).version("1").size(0);
for (int i = 0; i < 10000; ++i) {
DataSegment dataSegment = builder.interval(Intervals.of(i + "/" + (i + 1))).build();
druidSchema.addSegment(serverMetadata, dataSegment);
}
}
use of org.apache.druid.timeline.partition.LinearShardSpec in project druid by druid-io.
the class ExpressionFilterBenchmark method setup.
@Setup(Level.Trial)
public void setup() {
this.closer = Closer.create();
final GeneratorSchemaInfo schemaInfo = new GeneratorSchemaInfo(ImmutableList.of(GeneratorColumnSchema.makeEnumerated("x", ValueType.STRING, false, 3, null, Arrays.asList("Apple", "Orange", "Xylophone", "Corundum", null), Arrays.asList(0.2, 0.25, 0.15, 0.10, 0.3)), GeneratorColumnSchema.makeEnumerated("y", ValueType.STRING, false, 4, null, Arrays.asList("Hello", "World", "Foo", "Bar", "Baz"), Arrays.asList(0.2, 0.25, 0.15, 0.10, 0.3))), ImmutableList.of(), Intervals.of("2000/P1D"), false);
final DataSegment dataSegment = DataSegment.builder().dataSource("foo").interval(schemaInfo.getDataInterval()).version("1").shardSpec(new LinearShardSpec(0)).size(0).build();
final SegmentGenerator segmentGenerator = closer.register(new SegmentGenerator());
this.index = closer.register(segmentGenerator.generate(dataSegment, schemaInfo, Granularities.NONE, rowsPerSegment));
expressionFilter = new ExpressionDimFilter("array_contains(x, ['Orange', 'Xylophone'])", TestExprMacroTable.INSTANCE);
nativeFilter = new AndDimFilter(new SelectorDimFilter("x", "Orange", null), new SelectorDimFilter("x", "Xylophone", null));
}
use of org.apache.druid.timeline.partition.LinearShardSpec in project druid by druid-io.
the class SqlExpressionBenchmark method setup.
@Setup(Level.Trial)
public void setup() {
final GeneratorSchemaInfo schemaInfo = GeneratorBasicSchemas.SCHEMA_MAP.get("expression-testbench");
final DataSegment dataSegment = DataSegment.builder().dataSource("foo").interval(schemaInfo.getDataInterval()).version("1").shardSpec(new LinearShardSpec(0)).size(0).build();
final PlannerConfig plannerConfig = new PlannerConfig();
final SegmentGenerator segmentGenerator = closer.register(new SegmentGenerator());
log.info("Starting benchmark setup using cacheDir[%s], rows[%,d].", segmentGenerator.getCacheDir(), rowsPerSegment);
final QueryableIndex index = segmentGenerator.generate(dataSegment, schemaInfo, Granularities.NONE, rowsPerSegment);
final QueryRunnerFactoryConglomerate conglomerate = QueryStackTests.createQueryRunnerFactoryConglomerate(closer, PROCESSING_CONFIG);
final SpecificSegmentsQuerySegmentWalker walker = new SpecificSegmentsQuerySegmentWalker(conglomerate).add(dataSegment, index);
closer.register(walker);
final DruidSchemaCatalog rootSchema = CalciteTests.createMockRootSchema(conglomerate, walker, plannerConfig, AuthTestUtils.TEST_AUTHORIZER_MAPPER);
plannerFactory = new PlannerFactory(rootSchema, CalciteTests.createMockQueryMakerFactory(walker, conglomerate), CalciteTests.createOperatorTable(), CalciteTests.createExprMacroTable(), plannerConfig, AuthTestUtils.TEST_AUTHORIZER_MAPPER, CalciteTests.getJsonMapper(), CalciteTests.DRUID_SCHEMA_NAME);
try {
SqlVectorizedExpressionSanityTest.sanityTestVectorizedSqlQueries(plannerFactory, QUERIES.get(Integer.parseInt(query)));
} catch (Throwable ignored) {
// the show must go on
}
}
use of org.apache.druid.timeline.partition.LinearShardSpec in project druid by druid-io.
the class DoublesSketchSqlAggregatorTest method createQuerySegmentWalker.
@Override
public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker() throws IOException {
DoublesSketchModule.registerSerde();
final QueryableIndex index = IndexBuilder.create(CalciteTests.getJsonMapper()).tmpDir(temporaryFolder.newFolder()).segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance()).schema(new IncrementalIndexSchema.Builder().withMetrics(new CountAggregatorFactory("cnt"), new DoubleSumAggregatorFactory("m1", "m1"), new DoublesSketchAggregatorFactory("qsketch_m1", "m1", 128)).withRollup(false).build()).rows(CalciteTests.ROWS1).buildMMappedIndex();
return new SpecificSegmentsQuerySegmentWalker(conglomerate).add(DataSegment.builder().dataSource(CalciteTests.DATASOURCE1).interval(index.getDataInterval()).version("1").shardSpec(new LinearShardSpec(0)).size(0).build(), index);
}
Aggregations