use of io.prestosql.spi.relation.CallExpression in project hetu-core by openlookeng.
the class ClickHouseRowExpressionConverter method handleOperatorFunction.
private String handleOperatorFunction(CallExpression call, FunctionMetadata functionMetadata, JdbcConverterContext context) {
Optional<OperatorType> operatorTypeOptional = functionMetadata.getOperatorType();
OperatorType type = operatorTypeOptional.get();
if (type.equals(OperatorType.CAST)) {
return handleCastOperator(call.getArguments().get(0), call.getType(), context);
}
List<String> argumentList = call.getArguments().stream().map(expr -> expr.accept(this, context)).collect(Collectors.toList());
if (type.isArithmeticOperator()) {
return format("(%s %s %s)", argumentList.get(0), type.getOperator(), argumentList.get(1));
}
if (type.isComparisonOperator()) {
final String[] clickHouseCompareOperators = new String[] { "=", ">", "<", ">=", "<=", "!=", "<>" };
if (Arrays.asList(clickHouseCompareOperators).contains(type.getOperator())) {
return format("(%s %s %s)", argumentList.get(0), type.getOperator(), argumentList.get(1));
} else {
String exceptionInfo = "ClickHouse Connector does not support comparison operator " + type.getOperator();
throw new PrestoException(NOT_SUPPORTED, exceptionInfo);
}
}
if (type.equals(OperatorType.SUBSCRIPT)) {
throw new PrestoException(NOT_SUPPORTED, "ClickHouse Connector does not support subscript now");
}
/*
* "Negative" needs to be tested
*/
if (call.getArguments().size() == 1 && type.equals(OperatorType.NEGATION)) {
String value = argumentList.get(0);
String separator = value.startsWith("-") ? " " : "";
return format("-%s%s", separator, value);
}
throw new PrestoException(NOT_SUPPORTED, String.format("Unknown operator %s in push down", type.getOperator()));
}
use of io.prestosql.spi.relation.CallExpression in project hetu-core by openlookeng.
the class CubeOptimizer method rewriteAggregationNode.
private PlanNode rewriteAggregationNode(CubeRewriteResult cubeRewriteResult, PlanNode inputPlanNode) {
TypeProvider typeProvider = context.getSymbolAllocator().getTypes();
// Add group by
List<Symbol> groupings = aggregationNode.getGroupingKeys().stream().map(Symbol::getName).map(columnRewritesMap::get).map(optimizedPlanMappings::get).collect(Collectors.toList());
Map<Symbol, Symbol> cubeScanToAggOutputMap = new HashMap<>();
// Rewrite AggregationNode using Cube table
ImmutableMap.Builder<Symbol, AggregationNode.Aggregation> aggregationsBuilder = ImmutableMap.builder();
for (CubeRewriteResult.AggregatorSource aggregatorSource : cubeRewriteResult.getAggregationColumns()) {
Type type = cubeRewriteResult.getSymbolMetadataMap().get(aggregatorSource.getOriginalAggSymbol()).getType();
TypeSignature typeSignature = type.getTypeSignature();
ColumnHandle cubeColHandle = cubeRewriteResult.getTableScanNode().getAssignments().get(aggregatorSource.getScanSymbol());
ColumnMetadata cubeColumnMetadata = metadata.getColumnMetadata(context.getSession(), cubeTableHandle, cubeColHandle);
AggregationSignature aggregationSignature = cubeMetadata.getAggregationSignature(cubeColumnMetadata.getName()).orElseThrow(() -> new ColumnNotFoundException(new SchemaTableName("", ""), cubeColHandle.getColumnName()));
String aggFunction = COUNT.getName().equals(aggregationSignature.getFunction()) ? SUM.getName() : aggregationSignature.getFunction();
SymbolReference argument = toSymbolReference(aggregatorSource.getScanSymbol());
FunctionHandle functionHandle = metadata.getFunctionAndTypeManager().lookupFunction(aggFunction, TypeSignatureProvider.fromTypeSignatures(typeSignature));
cubeScanToAggOutputMap.put(aggregatorSource.getScanSymbol(), aggregatorSource.getOriginalAggSymbol());
aggregationsBuilder.put(aggregatorSource.getOriginalAggSymbol(), new AggregationNode.Aggregation(new CallExpression(aggFunction, functionHandle, type, ImmutableList.of(castToRowExpression(argument))), ImmutableList.of(castToRowExpression(argument)), false, Optional.empty(), Optional.empty(), Optional.empty()));
}
PlanNode planNode = inputPlanNode;
AggregationNode aggNode = new AggregationNode(context.getIdAllocator().getNextId(), planNode, aggregationsBuilder.build(), singleGroupingSet(groupings), ImmutableList.of(), AggregationNode.Step.SINGLE, Optional.empty(), Optional.empty(), AggregationNode.AggregationType.HASH, Optional.empty());
if (cubeRewriteResult.getAvgAggregationColumns().isEmpty()) {
return aggNode;
}
if (!cubeRewriteResult.getComputeAvgDividingSumByCount()) {
Map<Symbol, Expression> aggregateAssignments = new HashMap<>();
for (CubeRewriteResult.AggregatorSource aggregatorSource : cubeRewriteResult.getAggregationColumns()) {
aggregateAssignments.put(aggregatorSource.getOriginalAggSymbol(), toSymbolReference(aggregatorSource.getScanSymbol()));
}
planNode = new ProjectNode(context.getIdAllocator().getNextId(), aggNode, new Assignments(aggregateAssignments.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, entry -> castToRowExpression(entry.getValue())))));
} else {
// If there was an AVG aggregation, map it to AVG = SUM/COUNT
Map<Symbol, Expression> projections = new HashMap<>();
aggNode.getGroupingKeys().forEach(symbol -> projections.put(symbol, toSymbolReference(symbol)));
aggNode.getAggregations().keySet().stream().filter(originalAggregationsMap::containsValue).forEach(aggSymbol -> projections.put(aggSymbol, toSymbolReference(aggSymbol)));
// Add AVG = SUM / COUNT
for (CubeRewriteResult.AverageAggregatorSource avgAggSource : cubeRewriteResult.getAvgAggregationColumns()) {
Symbol sumSymbol = cubeScanToAggOutputMap.get(avgAggSource.getSum());
Symbol countSymbol = cubeScanToAggOutputMap.get(avgAggSource.getCount());
Type avgResultType = typeProvider.get(avgAggSource.getOriginalAggSymbol());
ArithmeticBinaryExpression division = new ArithmeticBinaryExpression(ArithmeticBinaryExpression.Operator.DIVIDE, new Cast(toSymbolReference(sumSymbol), avgResultType.getTypeSignature().toString()), new Cast(toSymbolReference(countSymbol), avgResultType.getTypeSignature().toString()));
projections.put(avgAggSource.getOriginalAggSymbol(), division);
}
return new ProjectNode(context.getIdAllocator().getNextId(), aggNode, new Assignments(projections.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, entry -> castToRowExpression(entry.getValue())))));
}
return planNode;
}
use of io.prestosql.spi.relation.CallExpression in project hetu-core by openlookeng.
the class CubeOptimizerUtil method extractMappedValue.
private static Optional<Object> extractMappedValue(Symbol symbol, ProjectNode projectNode) {
Map<Symbol, RowExpression> assignments = projectNode.getAssignments().getMap();
RowExpression rowExpression = assignments.get(symbol);
if (rowExpression == null) {
return Optional.empty();
}
if (OriginalExpressionUtils.isExpression(rowExpression)) {
Expression expression = castToExpression(rowExpression);
if (expression instanceof Cast) {
expression = ((Cast) expression).getExpression();
}
if (expression instanceof SymbolReference) {
return Optional.of(((SymbolReference) expression).getName());
} else if (expression instanceof Literal) {
return Optional.of(expression);
}
} else {
if (rowExpression instanceof CallExpression) {
// Extract the column symbols from CAST expressions
while (rowExpression instanceof CallExpression) {
rowExpression = ((CallExpression) rowExpression).getArguments().get(0);
}
}
if (!(rowExpression instanceof VariableReferenceExpression)) {
return Optional.empty();
}
return Optional.of(((VariableReferenceExpression) rowExpression).getName());
}
return Optional.empty();
}
use of io.prestosql.spi.relation.CallExpression in project hetu-core by openlookeng.
the class TestPageProcessorCompiler method testSanityFilterOnRLE.
@Test
public void testSanityFilterOnRLE() {
Signature lessThan = internalOperator(LESS_THAN, BOOLEAN, ImmutableList.of(BIGINT, BIGINT));
CallExpression filter = new CallExpression(lessThan.getName().getObjectName(), new BuiltInFunctionHandle(lessThan), BOOLEAN, ImmutableList.of(field(0, BIGINT), constant(10L, BIGINT)), Optional.empty());
PageProcessor processor = compiler.compilePageProcessor(Optional.of(filter), ImmutableList.of(field(0, BIGINT)), MAX_BATCH_SIZE).get();
Page page = new Page(createRLEBlock(5L, 100));
Page outputPage = getOnlyElement(processor.process(null, new DriverYieldSignal(), newSimpleAggregatedMemoryContext().newLocalMemoryContext(PageProcessor.class.getSimpleName()), page)).orElseThrow(() -> new AssertionError("page is not present"));
assertEquals(outputPage.getPositionCount(), 100);
assertTrue(outputPage.getBlock(0) instanceof RunLengthEncodedBlock);
RunLengthEncodedBlock rle = (RunLengthEncodedBlock) outputPage.getBlock(0);
assertEquals(BIGINT.getLong(rle.getValue(), 0), 5L);
}
use of io.prestosql.spi.relation.CallExpression in project hetu-core by openlookeng.
the class TestPageProcessorCompiler method testSanityFilterOnDictionary.
@Test
public void testSanityFilterOnDictionary() {
CallExpression lengthVarchar = new CallExpression(QualifiedObjectName.valueOfDefaultFunction("length").getObjectName(), new BuiltInFunctionHandle(new Signature(QualifiedObjectName.valueOfDefaultFunction("length"), SCALAR, parseTypeSignature(StandardTypes.BIGINT), parseTypeSignature(StandardTypes.VARCHAR))), BIGINT, ImmutableList.of(field(0, VARCHAR)), Optional.empty());
Signature lessThan = internalOperator(LESS_THAN, BOOLEAN, ImmutableList.of(BIGINT, BIGINT));
CallExpression filter = new CallExpression(lessThan.getName().getObjectName(), new BuiltInFunctionHandle(lessThan), BOOLEAN, ImmutableList.of(lengthVarchar, constant(10L, BIGINT)), Optional.empty());
PageProcessor processor = compiler.compilePageProcessor(Optional.of(filter), ImmutableList.of(field(0, VARCHAR)), MAX_BATCH_SIZE).get();
Page page = new Page(createDictionaryBlock(createExpectedValues(10), 100));
Page outputPage = getOnlyElement(processor.process(null, new DriverYieldSignal(), newSimpleAggregatedMemoryContext().newLocalMemoryContext(PageProcessor.class.getSimpleName()), page)).orElseThrow(() -> new AssertionError("page is not present"));
assertEquals(outputPage.getPositionCount(), 100);
assertTrue(outputPage.getBlock(0) instanceof DictionaryBlock);
DictionaryBlock dictionaryBlock = (DictionaryBlock) outputPage.getBlock(0);
assertEquals(dictionaryBlock.getDictionary().getPositionCount(), 10);
// test filter caching
Page outputPage2 = getOnlyElement(processor.process(null, new DriverYieldSignal(), newSimpleAggregatedMemoryContext().newLocalMemoryContext(PageProcessor.class.getSimpleName()), page)).orElseThrow(() -> new AssertionError("page is not present"));
assertEquals(outputPage2.getPositionCount(), 100);
assertTrue(outputPage2.getBlock(0) instanceof DictionaryBlock);
DictionaryBlock dictionaryBlock2 = (DictionaryBlock) outputPage2.getBlock(0);
// both output pages must have the same dictionary
assertEquals(dictionaryBlock2.getDictionary(), dictionaryBlock.getDictionary());
}
Aggregations