use of org.apache.druid.sql.calcite.planner.PlannerResult in project druid by druid-io.
the class SqlVectorizedExpressionSanityTest method sanityTestVectorizedSqlQueries.
public static void sanityTestVectorizedSqlQueries(PlannerFactory plannerFactory, String query) throws ValidationException, RelConversionException, SqlParseException {
final Map<String, Object> vector = ImmutableMap.of(QueryContexts.VECTORIZE_KEY, "force", QueryContexts.VECTORIZE_VIRTUAL_COLUMNS_KEY, "force");
final Map<String, Object> nonvector = ImmutableMap.of(QueryContexts.VECTORIZE_KEY, "false", QueryContexts.VECTORIZE_VIRTUAL_COLUMNS_KEY, "false");
try (final DruidPlanner vectorPlanner = plannerFactory.createPlannerForTesting(vector, query);
final DruidPlanner nonVectorPlanner = plannerFactory.createPlannerForTesting(nonvector, query)) {
final PlannerResult vectorPlan = vectorPlanner.plan();
final PlannerResult nonVectorPlan = nonVectorPlanner.plan();
final Sequence<Object[]> vectorSequence = vectorPlan.run();
final Sequence<Object[]> nonVectorSequence = nonVectorPlan.run();
Yielder<Object[]> vectorizedYielder = Yielders.each(vectorSequence);
Yielder<Object[]> nonVectorizedYielder = Yielders.each(nonVectorSequence);
int row = 0;
int misMatch = 0;
while (!vectorizedYielder.isDone() && !nonVectorizedYielder.isDone()) {
Object[] vectorGet = vectorizedYielder.get();
Object[] nonVectorizedGet = nonVectorizedYielder.get();
try {
Assert.assertEquals(vectorGet.length, nonVectorizedGet.length);
for (int i = 0; i < vectorGet.length; i++) {
Object nonVectorObject = nonVectorizedGet[i];
Object vectorObject = vectorGet[i];
if (vectorObject instanceof Float || vectorObject instanceof Double) {
Assert.assertEquals(StringUtils.format("Double results differed at row %s (%s : %s)", row, nonVectorObject, vectorObject), ((Double) nonVectorObject).doubleValue(), ((Double) vectorObject).doubleValue(), 0.01);
} else {
Assert.assertEquals(StringUtils.format("Results differed at row %s (%s : %s)", row, nonVectorObject, vectorObject), nonVectorObject, vectorObject);
}
}
} catch (Throwable t) {
log.warn(t.getMessage());
misMatch++;
}
vectorizedYielder = vectorizedYielder.next(vectorGet);
nonVectorizedYielder = nonVectorizedYielder.next(nonVectorizedGet);
row++;
}
Assert.assertEquals("Expected no mismatched results", 0, misMatch);
Assert.assertTrue(vectorizedYielder.isDone());
Assert.assertTrue(nonVectorizedYielder.isDone());
}
}
use of org.apache.druid.sql.calcite.planner.PlannerResult in project druid by druid-io.
the class SqlExpressionBenchmark method querySql.
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MILLISECONDS)
public void querySql(Blackhole blackhole) throws Exception {
final Map<String, Object> context = ImmutableMap.of(QueryContexts.VECTORIZE_KEY, vectorize, QueryContexts.VECTORIZE_VIRTUAL_COLUMNS_KEY, vectorize);
final String sql = QUERIES.get(Integer.parseInt(query));
try (final DruidPlanner planner = plannerFactory.createPlannerForTesting(context, sql)) {
final PlannerResult plannerResult = planner.plan();
final Sequence<Object[]> resultSequence = plannerResult.run();
final Object[] lastRow = resultSequence.accumulate(null, (accumulated, in) -> in);
blackhole.consume(lastRow);
}
}
use of org.apache.druid.sql.calcite.planner.PlannerResult in project druid by druid-io.
the class SqlBenchmark method planSql.
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MILLISECONDS)
public void planSql(Blackhole blackhole) throws Exception {
final Map<String, Object> context = ImmutableMap.of(QueryContexts.VECTORIZE_KEY, vectorize, QueryContexts.VECTORIZE_VIRTUAL_COLUMNS_KEY, vectorize);
final String sql = QUERIES.get(Integer.parseInt(query));
try (final DruidPlanner planner = plannerFactory.createPlannerForTesting(context, sql)) {
final PlannerResult plannerResult = planner.plan();
blackhole.consume(plannerResult);
}
}
use of org.apache.druid.sql.calcite.planner.PlannerResult in project druid by druid-io.
the class SqlVsNativeBenchmark method queryPlanner.
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MILLISECONDS)
public void queryPlanner(Blackhole blackhole) throws Exception {
try (final DruidPlanner planner = plannerFactory.createPlannerForTesting(null, sqlQuery)) {
final PlannerResult plannerResult = planner.plan();
final Sequence<Object[]> resultSequence = plannerResult.run();
final Object[] lastRow = resultSequence.accumulate(null, (accumulated, in) -> in);
blackhole.consume(lastRow);
}
}
use of org.apache.druid.sql.calcite.planner.PlannerResult in project druid by druid-io.
the class SqlBenchmark method querySql.
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MILLISECONDS)
public void querySql(Blackhole blackhole) throws Exception {
final Map<String, Object> context = ImmutableMap.of(QueryContexts.VECTORIZE_KEY, vectorize, QueryContexts.VECTORIZE_VIRTUAL_COLUMNS_KEY, vectorize);
final String sql = QUERIES.get(Integer.parseInt(query));
try (final DruidPlanner planner = plannerFactory.createPlannerForTesting(context, sql)) {
final PlannerResult plannerResult = planner.plan();
final Sequence<Object[]> resultSequence = plannerResult.run();
final Object[] lastRow = resultSequence.accumulate(null, (accumulated, in) -> in);
blackhole.consume(lastRow);
}
}
Aggregations