use of io.prestosql.sql.tree.FunctionCall in project hetu-core by openlookeng.
the class WindowFunctionMatcher method getAssignedSymbol.
@Override
public Optional<Symbol> getAssignedSymbol(PlanNode node, Session session, Metadata metadata, SymbolAliases symbolAliases) {
Optional<Symbol> result = Optional.empty();
if (!(node instanceof WindowNode)) {
return result;
}
WindowNode windowNode = (WindowNode) node;
FunctionCall expectedCall = callMaker.getExpectedValue(symbolAliases);
Optional<WindowNode.Frame> expectedFrame = frameMaker.map(maker -> maker.getExpectedValue(symbolAliases));
for (Map.Entry<Symbol, Function> assignment : windowNode.getWindowFunctions().entrySet()) {
Function function = assignment.getValue();
boolean signatureMatches = functionHandle.map(assignment.getValue().getFunctionHandle()::equals).orElse(true);
if (signatureMatches && windowFunctionMatches(function, expectedCall, expectedFrame, metadata)) {
checkState(!result.isPresent(), "Ambiguous function calls in %s", windowNode);
result = Optional.of(assignment.getKey());
}
}
return result;
}
use of io.prestosql.sql.tree.FunctionCall in project hetu-core by openlookeng.
the class TestShadowing method testCreateTableAsSelect.
@Test
public void testCreateTableAsSelect() throws Exception {
handle.execute("CREATE TABLE \"my_test_table\" (column1 BIGINT, column2 DOUBLE)");
SqlParser parser = new SqlParser();
Query query = new Query(CATALOG, SCHEMA, ImmutableList.of(), "CREATE TABLE my_test_table AS SELECT 1 column1, CAST('2.0' AS DOUBLE) column2 LIMIT 1", ImmutableList.of(), null, null, ImmutableMap.of());
QueryRewriter rewriter = new QueryRewriter(parser, URL, QualifiedName.of("tmp_"), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), 1, new Duration(10, SECONDS));
Query rewrittenQuery = rewriter.shadowQuery(query);
assertEquals(rewrittenQuery.getPreQueries().size(), 1);
assertEquals(rewrittenQuery.getPostQueries().size(), 1);
CreateTableAsSelect createTableAs = (CreateTableAsSelect) parser.createStatement(rewrittenQuery.getPreQueries().get(0));
assertEquals(createTableAs.getName().getParts().size(), 1);
assertTrue(createTableAs.getName().getSuffix().startsWith("tmp_"));
assertFalse(createTableAs.getName().getSuffix().contains("my_test_table"));
assertEquals(statementToQueryType(parser, rewrittenQuery.getQuery()), READ);
Table table = new Table(createTableAs.getName());
SingleColumn column1 = new SingleColumn(new FunctionCall(QualifiedName.of("checksum"), ImmutableList.of(new Identifier("COLUMN1"))));
SingleColumn column2 = new SingleColumn(new FunctionCall(QualifiedName.of("checksum"), ImmutableList.of(new FunctionCall(QualifiedName.of("round"), ImmutableList.of(new Identifier("COLUMN2"), new LongLiteral("1"))))));
Select select = new Select(false, ImmutableList.of(column1, column2));
QuerySpecification querySpecification = new QuerySpecification(select, Optional.of(table), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty());
assertEquals(parser.createStatement(rewrittenQuery.getQuery()), new io.prestosql.sql.tree.Query(Optional.empty(), querySpecification, Optional.empty(), Optional.empty(), Optional.empty()));
assertEquals(parser.createStatement(rewrittenQuery.getPostQueries().get(0)), new DropTable(createTableAs.getName(), true));
}
use of io.prestosql.sql.tree.FunctionCall in project hetu-core by openlookeng.
the class QueryPlanner method aggregate.
private PlanBuilder aggregate(PlanBuilder inputSubPlan, QuerySpecification node) {
PlanBuilder subPlan = inputSubPlan;
if (!analysis.isAggregation(node)) {
return subPlan;
}
// 1. Pre-project all scalar inputs (arguments and non-trivial group by expressions)
Set<Expression> groupByExpressions = ImmutableSet.copyOf(analysis.getGroupByExpressions(node));
ImmutableList.Builder<Expression> arguments = ImmutableList.builder();
analysis.getAggregates(node).stream().map(FunctionCall::getArguments).flatMap(List::stream).filter(// lambda expression is generated at execution time
exp -> !(exp instanceof LambdaExpression)).forEach(arguments::add);
analysis.getAggregates(node).stream().map(FunctionCall::getOrderBy).filter(Optional::isPresent).map(Optional::get).map(OrderBy::getSortItems).flatMap(List::stream).map(SortItem::getSortKey).forEach(arguments::add);
// filter expressions need to be projected first
analysis.getAggregates(node).stream().map(FunctionCall::getFilter).filter(Optional::isPresent).map(Optional::get).forEach(arguments::add);
Iterable<Expression> inputs = Iterables.concat(groupByExpressions, arguments.build());
subPlan = handleSubqueries(subPlan, node, inputs);
if (!Iterables.isEmpty(inputs)) {
// avoid an empty projection if the only aggregation is COUNT (which has no arguments)
subPlan = project(subPlan, inputs);
}
// 2. Aggregate
// 2.a. Rewrite aggregate arguments
TranslationMap argumentTranslations = new TranslationMap(subPlan.getRelationPlan(), analysis, lambdaDeclarationToSymbolMap);
ImmutableList.Builder<Symbol> aggregationArgumentsBuilder = ImmutableList.builder();
for (Expression argument : arguments.build()) {
Symbol symbol = subPlan.translate(argument);
argumentTranslations.put(argument, symbol);
aggregationArgumentsBuilder.add(symbol);
}
List<Symbol> aggregationArguments = aggregationArgumentsBuilder.build();
// 2.b. Rewrite grouping columns
TranslationMap groupingTranslations = new TranslationMap(subPlan.getRelationPlan(), analysis, lambdaDeclarationToSymbolMap);
Map<Symbol, Symbol> groupingSetMappings = new LinkedHashMap<>();
for (Expression expression : groupByExpressions) {
Symbol input = subPlan.translate(expression);
Symbol output = planSymbolAllocator.newSymbol(expression, analysis.getTypeWithCoercions(expression), "gid");
groupingTranslations.put(expression, output);
groupingSetMappings.put(output, input);
}
// This tracks the grouping sets before complex expressions are considered (see comments below)
// It's also used to compute the descriptors needed to implement grouping()
List<Set<FieldId>> columnOnlyGroupingSets = ImmutableList.of(ImmutableSet.of());
List<List<Symbol>> groupingSets = ImmutableList.of(ImmutableList.of());
if (node.getGroupBy().isPresent()) {
// For the purpose of "distinct", we need to canonicalize column references that may have varying
// syntactic forms (e.g., "t.a" vs "a"). Thus we need to enumerate grouping sets based on the underlying
// fieldId associated with each column reference expression.
// The catch is that simple group-by expressions can be arbitrary expressions (this is a departure from the SQL specification).
// But, they don't affect the number of grouping sets or the behavior of "distinct" . We can compute all the candidate
// grouping sets in terms of fieldId, dedup as appropriate and then cross-join them with the complex expressions.
Analysis.GroupingSetAnalysis groupingSetAnalysis = analysis.getGroupingSets(node);
columnOnlyGroupingSets = enumerateGroupingSets(groupingSetAnalysis);
if (node.getGroupBy().get().isDistinct()) {
columnOnlyGroupingSets = columnOnlyGroupingSets.stream().distinct().collect(toImmutableList());
}
// add in the complex expressions an turn materialize the grouping sets in terms of plan columns
ImmutableList.Builder<List<Symbol>> groupingSetBuilder = ImmutableList.builder();
for (Set<FieldId> groupingSet : columnOnlyGroupingSets) {
ImmutableList.Builder<Symbol> columns = ImmutableList.builder();
groupingSetAnalysis.getComplexExpressions().stream().map(groupingTranslations::get).forEach(columns::add);
groupingSet.stream().map(field -> groupingTranslations.get(new FieldReference(field.getFieldIndex()))).forEach(columns::add);
groupingSetBuilder.add(columns.build());
}
groupingSets = groupingSetBuilder.build();
}
// 2.c. Generate GroupIdNode (multiple grouping sets) or ProjectNode (single grouping set)
Optional<Symbol> groupIdSymbol = Optional.empty();
if (groupingSets.size() > 1) {
groupIdSymbol = Optional.of(planSymbolAllocator.newSymbol("groupId", BIGINT));
GroupIdNode groupId = new GroupIdNode(idAllocator.getNextId(), subPlan.getRoot(), groupingSets, groupingSetMappings, aggregationArguments, groupIdSymbol.get());
subPlan = new PlanBuilder(groupingTranslations, groupId);
} else {
Assignments.Builder assignments = Assignments.builder();
aggregationArguments.forEach(symbol -> assignments.put(symbol, castToRowExpression(toSymbolReference(symbol))));
groupingSetMappings.forEach((key, value) -> assignments.put(key, castToRowExpression(toSymbolReference(value))));
ProjectNode project = new ProjectNode(idAllocator.getNextId(), subPlan.getRoot(), assignments.build());
subPlan = new PlanBuilder(groupingTranslations, project);
}
TranslationMap aggregationTranslations = new TranslationMap(subPlan.getRelationPlan(), analysis, lambdaDeclarationToSymbolMap);
aggregationTranslations.copyMappingsFrom(groupingTranslations);
// 2.d. Rewrite aggregates
ImmutableMap.Builder<Symbol, Aggregation> aggregationsBuilder = ImmutableMap.builder();
boolean needPostProjectionCoercion = false;
for (FunctionCall aggregate : analysis.getAggregates(node)) {
Expression rewritten = argumentTranslations.rewrite(aggregate);
Symbol newSymbol = planSymbolAllocator.newSymbol(rewritten, analysis.getType(aggregate));
// Therefore we can end up with this implicit cast, and have to move it into a post-projection
if (rewritten instanceof Cast) {
rewritten = ((Cast) rewritten).getExpression();
needPostProjectionCoercion = true;
}
aggregationTranslations.put(aggregate, newSymbol);
FunctionCall functionCall = (FunctionCall) rewritten;
aggregationsBuilder.put(newSymbol, new Aggregation(call(aggregate.getName().getSuffix(), analysis.getFunctionHandle(aggregate), analysis.getType(aggregate), functionCall.getArguments().stream().map(OriginalExpressionUtils::castToRowExpression).collect(toImmutableList())), functionCall.getArguments().stream().map(OriginalExpressionUtils::castToRowExpression).collect(toImmutableList()), functionCall.isDistinct(), functionCall.getFilter().map(SymbolUtils::from), functionCall.getOrderBy().map(OrderingSchemeUtils::fromOrderBy), Optional.empty()));
}
Map<Symbol, Aggregation> aggregations = aggregationsBuilder.build();
ImmutableSet.Builder<Integer> globalGroupingSets = ImmutableSet.builder();
for (int i = 0; i < groupingSets.size(); i++) {
if (groupingSets.get(i).isEmpty()) {
globalGroupingSets.add(i);
}
}
ImmutableList.Builder<Symbol> groupingKeys = ImmutableList.builder();
groupingSets.stream().flatMap(List::stream).distinct().forEach(groupingKeys::add);
groupIdSymbol.ifPresent(groupingKeys::add);
AggregationNode aggregationNode = new AggregationNode(idAllocator.getNextId(), subPlan.getRoot(), aggregations, groupingSets(groupingKeys.build(), groupingSets.size(), globalGroupingSets.build()), ImmutableList.of(), AggregationNode.Step.SINGLE, Optional.empty(), groupIdSymbol, AggregationNode.AggregationType.HASH, Optional.empty());
subPlan = new PlanBuilder(aggregationTranslations, aggregationNode);
// TODO: this is a hack, we should change type coercions to coerce the inputs to functions/operators instead of coercing the output
if (needPostProjectionCoercion) {
ImmutableList.Builder<Expression> alreadyCoerced = ImmutableList.builder();
alreadyCoerced.addAll(groupByExpressions);
groupIdSymbol.map(SymbolUtils::toSymbolReference).ifPresent(alreadyCoerced::add);
subPlan = explicitCoercionFields(subPlan, alreadyCoerced.build(), analysis.getAggregates(node));
}
// 4. Project and re-write all grouping functions
return handleGroupingOperations(subPlan, node, groupIdSymbol, columnOnlyGroupingSets);
}
use of io.prestosql.sql.tree.FunctionCall in project hetu-core by openlookeng.
the class LogicalPlanner method noTruncationCast.
private Expression noTruncationCast(Expression expression, Type fromType, Type toType) {
// cast larger size varchar to small size varchar
if ((fromType instanceof VarcharType || fromType instanceof CharType) && (toType instanceof VarcharType || toType instanceof CharType)) {
int targetLength;
if (toType instanceof VarcharType) {
if (((VarcharType) toType).isUnbounded()) {
return new Cast(expression, toType.getTypeSignature().toString());
}
targetLength = ((VarcharType) toType).getBoundedLength();
} else {
targetLength = ((CharType) toType).getLength();
}
Signature spaceTrimmedLength = metadata.getFunctionAndTypeManager().resolveBuiltInFunction(QualifiedName.of("$space_trimmed_length"), fromTypes(VARCHAR));
Signature fail = metadata.getFunctionAndTypeManager().resolveBuiltInFunction(QualifiedName.of("fail"), fromTypes(VARCHAR));
return new IfExpression(// check if the trimmed value fits in the target type
new ComparisonExpression(GREATER_THAN_OR_EQUAL, new GenericLiteral("BIGINT", Integer.toString(targetLength)), new FunctionCall(QualifiedName.of("$space_trimmed_length"), ImmutableList.of(new Cast(expression, VARCHAR.getTypeSignature().toString())))), new Cast(expression, toType.getTypeSignature().toString()), new Cast(new FunctionCall(QualifiedName.of("fail"), ImmutableList.of(new Cast(new StringLiteral(format("Out of range for insert query type: Table: %s, Query: %s", toType.toString(), fromType.toString())), VARCHAR.getTypeSignature().toString()))), toType.getTypeSignature().toString()));
}
return new Cast(expression, toType.getTypeSignature().toString());
}
use of io.prestosql.sql.tree.FunctionCall in project hetu-core by openlookeng.
the class CubeConsole method createCubeCommand.
/**
* Process the Create Cube Query
*
* @param queryRunner queryRunner
* @param outputFormat outputFormat
* @param schemaChanged schemaChanged
* @param usePager usePager
* @param schemaChanged schemaChanged
* @param showProgress showProgress
* @param terminal terminal
* @param out out
* @param errorChannel errorChannel
* @return boolean after processing the create cube query command.
*/
public boolean createCubeCommand(String query, QueryRunner queryRunner, ClientOptions.OutputFormat outputFormat, Runnable schemaChanged, boolean usePager, boolean showProgress, Terminal terminal, PrintStream out, PrintStream errorChannel) {
boolean success = true;
SqlParser parser = new SqlParser();
QualifiedName cubeName = null;
try {
CreateCube createCube = (CreateCube) parser.createStatement(query, new ParsingOptions(ParsingOptions.DecimalLiteralTreatment.AS_DOUBLE));
cubeName = createCube.getCubeName();
QualifiedName sourceTableName = createCube.getSourceTableName();
String whereClause = createCube.getWhere().get().toString();
Set<FunctionCall> aggregations = createCube.getAggregations();
List<Identifier> groupingSet = createCube.getGroupingSet();
List<Property> properties = createCube.getProperties();
boolean notExists = createCube.isNotExists();
CreateCube modifiedCreateCube = new CreateCube(cubeName, sourceTableName, groupingSet, aggregations, notExists, properties, Optional.empty(), createCube.getSourceFilter().orElse(null));
String queryCreateCube = SqlFormatter.formatSql(modifiedCreateCube, Optional.empty());
if (!console.runQuery(queryRunner, queryCreateCube, outputFormat, schemaChanged, usePager, showProgress, terminal, out, errorChannel)) {
return false;
}
// we check whether the create cube expression can be processed
if (isSupportedExpression(createCube, queryRunner, outputFormat, schemaChanged, usePager, showProgress, terminal, out, errorChannel)) {
if (createCube.getWhere().get() instanceof BetweenPredicate) {
// we process the between predicate in the create cube query where clause
success = processBetweenPredicate(createCube, queryRunner, outputFormat, schemaChanged, usePager, showProgress, terminal, out, errorChannel, parser);
}
if (createCube.getWhere().get() instanceof ComparisonExpression) {
// we process the comparison expression in the create cube query where clause
success = processComparisonExpression(createCube, queryRunner, outputFormat, schemaChanged, usePager, showProgress, terminal, out, errorChannel, parser);
}
} else {
// if we donot support processing the create cube query with multiple inserts, then only a single insert is run internally.
String queryInsert = String.format(INSERT_INTO_CUBE_STRING, cubeName, whereClause);
success = console.runQuery(queryRunner, queryInsert, outputFormat, schemaChanged, usePager, showProgress, terminal, out, errorChannel);
}
if (!success) {
// roll back mechanism for unsuccessful create cube query
String dropCubeQuery = String.format(DROP_CUBE_STRING, cubeName);
console.runQuery(queryRunner, dropCubeQuery, outputFormat, schemaChanged, usePager, showProgress, terminal, out, errorChannel);
}
} catch (ParsingException e) {
if (cubeName != null) {
// roll back mechanism for unsuccessful create cube query
String dropCubeQuery = String.format(DROP_CUBE_STRING, cubeName);
console.runQuery(queryRunner, dropCubeQuery, outputFormat, schemaChanged, usePager, showProgress, terminal, out, errorChannel);
}
System.out.println(e.getMessage());
Query.renderErrorLocation(query, new ErrorLocation(e.getLineNumber(), e.getColumnNumber()), errorChannel);
success = false;
} catch (Exception e) {
if (cubeName != null) {
// roll back mechanism for unsuccessful create cube query
String dropCubeQuery = String.format(DROP_CUBE_STRING, cubeName);
console.runQuery(queryRunner, dropCubeQuery, outputFormat, schemaChanged, usePager, showProgress, terminal, out, errorChannel);
}
// Add blank line after progress bar
System.out.println();
System.out.println(e.getMessage());
success = false;
}
return success;
}
Aggregations