use of io.prestosql.spi.function.FunctionHandle in project hetu-core by openlookeng.
the class DynamicFilters method createDynamicFilterRowExpression.
public static RowExpression createDynamicFilterRowExpression(Session session, Metadata metadata, TypeManager typeManager, String id, Type inputType, SymbolReference input, Optional<RowExpression> filter) {
ConstantExpression string = new ConstantExpression(utf8Slice(id), VarcharType.VARCHAR);
VariableReferenceExpression expression = new VariableReferenceExpression(input.getName(), inputType);
FunctionHandle handle = metadata.getFunctionAndTypeManager().resolveFunction(session.getTransactionId(), QualifiedObjectName.valueOfDefaultFunction(Function.NAME), fromTypes(VarcharType.VARCHAR, inputType));
return call(Function.NAME, handle, string.getType(), Arrays.asList(string, expression), filter);
}
use of io.prestosql.spi.function.FunctionHandle in project hetu-core by openlookeng.
the class OracleRowExpressionConverter method visitCall.
@Override
public String visitCall(CallExpression call, JdbcConverterContext context) {
FunctionHandle functionHandle = call.getFunctionHandle();
String functionName = functionMetadataManager.getFunctionMetadata(functionHandle).getName().getObjectName();
if (timeExtractFields.contains(functionName)) {
if (call.getArguments().size() == 1) {
try {
Time.ExtractField field = Time.ExtractField.valueOf(functionName.toUpperCase(ENGLISH));
return format("EXTRACT(%s FROM %s)", field, call.getArguments().get(0).accept(this, context));
} catch (IllegalArgumentException e) {
throw new PrestoException(INVALID_FUNCTION_ARGUMENT, "Illegal argument: " + e);
}
} else {
throw new PrestoException(INVALID_FUNCTION_ARGUMENT, "Illegal argument num of function " + functionName);
}
}
if (functionName.equals(AT_TIMEZONE_FUNCTION_NAME)) {
if (call.getArguments().size() == 2) {
return format("%s AT TIME ZONE %s", call.getArguments().get(0).accept(this, context), call.getArguments().get(1).accept(this, context));
} else {
throw new PrestoException(INVALID_FUNCTION_ARGUMENT, "Illegal argument num of function " + functionName);
}
}
if (standardFunctionResolution.isArrayConstructor(functionHandle)) {
throw new PrestoException(NOT_SUPPORTED, "Oracle connector does not support array constructor");
}
if (standardFunctionResolution.isSubscriptFunction(functionHandle)) {
throw new PrestoException(NOT_SUPPORTED, "Oracle connector does not support subscript expression");
}
if (standardFunctionResolution.isCastFunction(functionHandle)) {
// deal with literal, when generic literal expression translate to rowExpression, it will be
// translated to a 'CAST' rowExpression with a varchar type 'CONSTANT' rowExpression, in some
// case, 'CAST' is superfluous
RowExpression argument = call.getArguments().get(0);
Type type = call.getType();
if (argument instanceof ConstantExpression && argument.getType() instanceof VarcharType) {
String value = argument.accept(this, context);
if (type instanceof DateType) {
return format("date %s", value);
}
if (type instanceof VarcharType || type instanceof CharType || type instanceof VarbinaryType || type instanceof DecimalType || type instanceof RealType || type instanceof DoubleType) {
return value;
}
}
if (call.getType().getDisplayName().equals(LIKE_PATTERN_NAME)) {
return call.getArguments().get(0).accept(this, context);
}
return getCastExpression(call.getArguments().get(0).accept(this, context), call.getType());
}
return super.visitCall(call, context);
}
use of io.prestosql.spi.function.FunctionHandle in project hetu-core by openlookeng.
the class RowEqualOperator method resolveEqualOperator.
private static MethodHandle resolveEqualOperator(Type type, FunctionAndTypeManager functionAndTypeManager) {
FunctionHandle operator = functionAndTypeManager.resolveOperatorFunctionHandle(EQUAL, TypeSignatureProvider.fromTypes(type, type));
BuiltInScalarFunctionImplementation implementation = functionAndTypeManager.getBuiltInScalarFunctionImplementation(operator);
return implementation.getMethodHandle();
}
use of io.prestosql.spi.function.FunctionHandle in project hetu-core by openlookeng.
the class AggregationRewriteWithCube method rewrite.
public PlanNode rewrite(AggregationNode originalAggregationNode, PlanNode filterNode) {
QualifiedObjectName starTreeTableName = QualifiedObjectName.valueOf(cubeMetadata.getCubeName());
TableHandle cubeTableHandle = metadata.getTableHandle(session, starTreeTableName).orElseThrow(() -> new CubeNotFoundException(starTreeTableName.toString()));
Map<String, ColumnHandle> cubeColumnsMap = metadata.getColumnHandles(session, cubeTableHandle);
TableMetadata cubeTableMetadata = metadata.getTableMetadata(session, cubeTableHandle);
List<ColumnMetadata> cubeColumnMetadataList = cubeTableMetadata.getColumns();
// Add group by
List<Symbol> groupings = new ArrayList<>(originalAggregationNode.getGroupingKeys().size());
for (Symbol symbol : originalAggregationNode.getGroupingKeys()) {
Object column = symbolMappings.get(symbol.getName());
if (column instanceof ColumnHandle) {
groupings.add(new Symbol(((ColumnHandle) column).getColumnName()));
}
}
Set<String> cubeGroups = cubeMetadata.getGroup();
boolean exactGroupsMatch = false;
if (groupings.size() == cubeGroups.size()) {
exactGroupsMatch = groupings.stream().map(Symbol::getName).map(String::toLowerCase).allMatch(cubeGroups::contains);
}
CubeRewriteResult cubeRewriteResult = createScanNode(originalAggregationNode, filterNode, cubeTableHandle, cubeColumnsMap, cubeColumnMetadataList, exactGroupsMatch);
PlanNode planNode = cubeRewriteResult.getTableScanNode();
// Add filter node
if (filterNode != null) {
Expression expression = castToExpression(((FilterNode) filterNode).getPredicate());
expression = rewriteExpression(expression, rewrittenMappings);
planNode = new FilterNode(idAllocator.getNextId(), planNode, castToRowExpression(expression));
}
if (!exactGroupsMatch) {
Map<Symbol, Symbol> cubeScanToAggOutputMap = new HashMap<>();
// Rewrite AggregationNode using Cube table
ImmutableMap.Builder<Symbol, AggregationNode.Aggregation> aggregationsBuilder = ImmutableMap.builder();
for (CubeRewriteResult.AggregatorSource aggregatorSource : cubeRewriteResult.getAggregationColumns()) {
ColumnHandle cubeColHandle = cubeRewriteResult.getTableScanNode().getAssignments().get(aggregatorSource.getScanSymbol());
ColumnMetadata cubeColumnMetadata = cubeRewriteResult.getSymbolMetadataMap().get(aggregatorSource.getScanSymbol());
Type type = cubeColumnMetadata.getType();
AggregationSignature aggregationSignature = cubeMetadata.getAggregationSignature(cubeColumnMetadata.getName()).orElseThrow(() -> new ColumnNotFoundException(new SchemaTableName(starTreeTableName.getSchemaName(), starTreeTableName.getObjectName()), cubeColHandle.getColumnName()));
String aggFunction = COUNT.getName().equals(aggregationSignature.getFunction()) ? "sum" : aggregationSignature.getFunction();
SymbolReference argument = toSymbolReference(aggregatorSource.getScanSymbol());
FunctionHandle functionHandle = metadata.getFunctionAndTypeManager().lookupFunction(aggFunction, TypeSignatureProvider.fromTypeSignatures(type.getTypeSignature()));
cubeScanToAggOutputMap.put(aggregatorSource.getScanSymbol(), aggregatorSource.getOriginalAggSymbol());
aggregationsBuilder.put(aggregatorSource.getOriginalAggSymbol(), new AggregationNode.Aggregation(new CallExpression(aggFunction, functionHandle, type, ImmutableList.of(OriginalExpressionUtils.castToRowExpression(argument))), ImmutableList.of(OriginalExpressionUtils.castToRowExpression(argument)), false, Optional.empty(), Optional.empty(), Optional.empty()));
}
List<Symbol> groupingKeys = originalAggregationNode.getGroupingKeys().stream().map(Symbol::getName).map(rewrittenMappings::get).collect(Collectors.toList());
planNode = new AggregationNode(idAllocator.getNextId(), planNode, aggregationsBuilder.build(), singleGroupingSet(groupingKeys), ImmutableList.of(), AggregationNode.Step.SINGLE, Optional.empty(), Optional.empty(), AggregationNode.AggregationType.HASH, Optional.empty());
AggregationNode aggNode = (AggregationNode) planNode;
if (!cubeRewriteResult.getAvgAggregationColumns().isEmpty()) {
if (!cubeRewriteResult.getComputeAvgDividingSumByCount()) {
Map<Symbol, Expression> aggregateAssignments = new HashMap<>();
for (CubeRewriteResult.AggregatorSource aggregatorSource : cubeRewriteResult.getAggregationColumns()) {
aggregateAssignments.put(aggregatorSource.getOriginalAggSymbol(), toSymbolReference(aggregatorSource.getScanSymbol()));
}
planNode = new ProjectNode(idAllocator.getNextId(), aggNode, new Assignments(aggregateAssignments.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, entry -> castToRowExpression(entry.getValue())))));
} else {
// If there was an AVG aggregation, map it to AVG = SUM/COUNT
Map<Symbol, Expression> projections = new HashMap<>();
aggNode.getGroupingKeys().forEach(symbol -> projections.put(symbol, toSymbolReference(symbol)));
aggNode.getAggregations().keySet().stream().filter(symbol -> symbolMappings.containsValue(symbol.getName())).forEach(aggSymbol -> projections.put(aggSymbol, toSymbolReference(aggSymbol)));
// Add AVG = SUM / COUNT
for (CubeRewriteResult.AverageAggregatorSource avgAggSource : cubeRewriteResult.getAvgAggregationColumns()) {
Symbol sumSymbol = cubeScanToAggOutputMap.get(avgAggSource.getSum());
Symbol countSymbol = cubeScanToAggOutputMap.get(avgAggSource.getCount());
Type avgResultType = typeProvider.get(avgAggSource.getOriginalAggSymbol());
ArithmeticBinaryExpression division = new ArithmeticBinaryExpression(ArithmeticBinaryExpression.Operator.DIVIDE, new Cast(toSymbolReference(sumSymbol), avgResultType.getTypeSignature().toString()), new Cast(toSymbolReference(countSymbol), avgResultType.getTypeSignature().toString()));
projections.put(avgAggSource.getOriginalAggSymbol(), division);
}
planNode = new ProjectNode(idAllocator.getNextId(), aggNode, new Assignments(projections.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, entry -> castToRowExpression(entry.getValue())))));
}
}
}
// Safety check to remove redundant symbols and rename original column names to intermediate names
if (!planNode.getOutputSymbols().equals(originalAggregationNode.getOutputSymbols())) {
// Map new symbol names to the old symbols
Map<Symbol, Expression> assignments = new HashMap<>();
Set<Symbol> planNodeOutput = new HashSet<>(planNode.getOutputSymbols());
for (Symbol originalAggOutputSymbol : originalAggregationNode.getOutputSymbols()) {
if (!planNodeOutput.contains(originalAggOutputSymbol)) {
// Must be grouping key
assignments.put(originalAggOutputSymbol, toSymbolReference(rewrittenMappings.get(originalAggOutputSymbol.getName())));
} else {
// Should be an expression and must have the same name in the new plan node
assignments.put(originalAggOutputSymbol, toSymbolReference(originalAggOutputSymbol));
}
}
planNode = new ProjectNode(idAllocator.getNextId(), planNode, new Assignments(assignments.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, entry -> castToRowExpression(entry.getValue())))));
}
return planNode;
}
use of io.prestosql.spi.function.FunctionHandle in project hetu-core by openlookeng.
the class PushPartialAggregationThroughExchange method split.
private PlanNode split(AggregationNode node, Context context) {
// otherwise, add a partial and final with an exchange in between
Map<Symbol, AggregationNode.Aggregation> intermediateAggregation = new HashMap<>();
Map<Symbol, AggregationNode.Aggregation> finalAggregation = new HashMap<>();
for (Map.Entry<Symbol, AggregationNode.Aggregation> entry : node.getAggregations().entrySet()) {
AggregationNode.Aggregation originalAggregation = entry.getValue();
String functionName = metadata.getFunctionAndTypeManager().getFunctionMetadata(originalAggregation.getFunctionHandle()).getName().getObjectName();
FunctionHandle functionHandle = originalAggregation.getFunctionHandle();
InternalAggregationFunction function = metadata.getFunctionAndTypeManager().getAggregateFunctionImplementation(functionHandle);
Symbol intermediateSymbol = context.getSymbolAllocator().newSymbol(functionName, function.getIntermediateType());
checkState(!originalAggregation.getOrderingScheme().isPresent(), "Aggregate with ORDER BY does not support partial aggregation");
intermediateAggregation.put(intermediateSymbol, new AggregationNode.Aggregation(new CallExpression(functionName, functionHandle, function.getIntermediateType(), originalAggregation.getArguments(), Optional.empty()), originalAggregation.getArguments(), originalAggregation.isDistinct(), originalAggregation.getFilter(), originalAggregation.getOrderingScheme(), originalAggregation.getMask()));
// rewrite final aggregation in terms of intermediate function
finalAggregation.put(entry.getKey(), new AggregationNode.Aggregation(new CallExpression(functionName, functionHandle, function.getFinalType(), ImmutableList.<RowExpression>builder().add(new VariableReferenceExpression(intermediateSymbol.getName(), function.getIntermediateType())).addAll(originalAggregation.getArguments().stream().filter(PushPartialAggregationThroughExchange::isLambda).collect(toImmutableList())).build(), Optional.empty()), ImmutableList.<RowExpression>builder().add(new VariableReferenceExpression(intermediateSymbol.getName(), function.getIntermediateType())).addAll(originalAggregation.getArguments().stream().filter(PushPartialAggregationThroughExchange::isLambda).collect(toImmutableList())).build(), false, Optional.empty(), Optional.empty(), Optional.empty()));
}
PlanNode partial = new AggregationNode(context.getIdAllocator().getNextId(), node.getSource(), intermediateAggregation, node.getGroupingSets(), // through the exchange may or may not preserve these properties. Hence, it is safest to drop preGroupedSymbols here.
ImmutableList.of(), PARTIAL, node.getHashSymbol(), node.getGroupIdSymbol(), node.getAggregationType(), node.getFinalizeSymbol());
return new AggregationNode(node.getId(), partial, finalAggregation, node.getGroupingSets(), // through the exchange may or may not preserve these properties. Hence, it is safest to drop preGroupedSymbols here.
ImmutableList.of(), FINAL, node.getHashSymbol(), node.getGroupIdSymbol(), node.getAggregationType(), node.getFinalizeSymbol());
}
Aggregations