use of com.facebook.presto.sql.tree.OrderBy in project presto by prestodb.
the class AstBuilder method visitQueryNoWith.
@Override
public Node visitQueryNoWith(SqlBaseParser.QueryNoWithContext context) {
QueryBody term = (QueryBody) visit(context.queryTerm());
Optional<OrderBy> orderBy = Optional.empty();
if (context.ORDER() != null) {
orderBy = Optional.of(new OrderBy(getLocation(context.ORDER()), visit(context.sortItem(), SortItem.class)));
}
Optional<Offset> offset = Optional.empty();
if (context.OFFSET() != null) {
offset = Optional.of(new Offset(Optional.of(getLocation(context.OFFSET())), getTextIfPresent(context.offset).orElseThrow(() -> new IllegalStateException("Missing OFFSET row count"))));
}
if (term instanceof QuerySpecification) {
// When we have a simple query specification
// followed by order by, offset, limit, fold the order by and limit
// clauses into the query specification (analyzer/planner
// expects this structure to resolve references with respect
// to columns defined in the query specification)
QuerySpecification query = (QuerySpecification) term;
return new Query(getLocation(context), Optional.empty(), new QuerySpecification(getLocation(context), query.getSelect(), query.getFrom(), query.getWhere(), query.getGroupBy(), query.getHaving(), orderBy, offset, getTextIfPresent(context.limit)), Optional.empty(), Optional.empty(), Optional.empty());
}
return new Query(getLocation(context), Optional.empty(), term, orderBy, offset, getTextIfPresent(context.limit));
}
use of com.facebook.presto.sql.tree.OrderBy in project presto by prestodb.
the class AstBuilder method visitFunctionCall.
@Override
public Node visitFunctionCall(SqlBaseParser.FunctionCallContext context) {
Optional<Expression> filter = visitIfPresent(context.filter(), Expression.class);
Optional<Window> window = visitIfPresent(context.over(), Window.class);
Optional<OrderBy> orderBy = Optional.empty();
if (context.ORDER() != null) {
orderBy = Optional.of(new OrderBy(visit(context.sortItem(), SortItem.class)));
}
QualifiedName name = getQualifiedName(context.qualifiedName());
boolean distinct = isDistinct(context.setQuantifier());
boolean ignoreNulls = context.nullTreatment() != null && context.nullTreatment().IGNORE() != null;
if (name.toString().equalsIgnoreCase("if")) {
check(context.expression().size() == 2 || context.expression().size() == 3, "Invalid number of arguments for 'if' function", context);
check(!window.isPresent(), "OVER clause not valid for 'if' function", context);
check(!distinct, "DISTINCT not valid for 'if' function", context);
check(!filter.isPresent(), "FILTER not valid for 'if' function", context);
Expression elseExpression = null;
if (context.expression().size() == 3) {
elseExpression = (Expression) visit(context.expression(2));
}
return new IfExpression(getLocation(context), (Expression) visit(context.expression(0)), (Expression) visit(context.expression(1)), elseExpression);
}
if (name.toString().equalsIgnoreCase("nullif")) {
check(context.expression().size() == 2, "Invalid number of arguments for 'nullif' function", context);
check(!window.isPresent(), "OVER clause not valid for 'nullif' function", context);
check(!distinct, "DISTINCT not valid for 'nullif' function", context);
check(!filter.isPresent(), "FILTER not valid for 'nullif' function", context);
return new NullIfExpression(getLocation(context), (Expression) visit(context.expression(0)), (Expression) visit(context.expression(1)));
}
if (name.toString().equalsIgnoreCase("coalesce")) {
check(context.expression().size() >= 2, "The 'coalesce' function must have at least two arguments", context);
check(!window.isPresent(), "OVER clause not valid for 'coalesce' function", context);
check(!distinct, "DISTINCT not valid for 'coalesce' function", context);
check(!filter.isPresent(), "FILTER not valid for 'coalesce' function", context);
return new CoalesceExpression(getLocation(context), visit(context.expression(), Expression.class));
}
if (name.toString().equalsIgnoreCase("try")) {
check(context.expression().size() == 1, "The 'try' function must have exactly one argument", context);
check(!window.isPresent(), "OVER clause not valid for 'try' function", context);
check(!distinct, "DISTINCT not valid for 'try' function", context);
check(!filter.isPresent(), "FILTER not valid for 'try' function", context);
return new TryExpression(getLocation(context), (Expression) visit(getOnlyElement(context.expression())));
}
if (name.toString().equalsIgnoreCase("$internal$bind")) {
check(context.expression().size() >= 1, "The '$internal$bind' function must have at least one arguments", context);
check(!window.isPresent(), "OVER clause not valid for '$internal$bind' function", context);
check(!distinct, "DISTINCT not valid for '$internal$bind' function", context);
check(!filter.isPresent(), "FILTER not valid for '$internal$bind' function", context);
int numValues = context.expression().size() - 1;
List<Expression> arguments = context.expression().stream().map(this::visit).map(Expression.class::cast).collect(toImmutableList());
return new BindExpression(getLocation(context), arguments.subList(0, numValues), arguments.get(numValues));
}
return new FunctionCall(getLocation(context), getQualifiedName(context.qualifiedName()), window, filter, orderBy, distinct, ignoreNulls, visit(context.expression(), Expression.class));
}
use of com.facebook.presto.sql.tree.OrderBy in project presto by prestodb.
the class TestSqlParser method testSelectWithOffset.
@Test
public void testSelectWithOffset() {
assertStatement("SELECT * FROM table1 OFFSET 2 ROWS", simpleQuery(selectList(new AllColumns()), new Table(QualifiedName.of("table1")), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.of(new Offset("2")), Optional.empty()));
assertStatement("SELECT * FROM table1 OFFSET 2", new Query(Optional.empty(), new QuerySpecification(selectList(new AllColumns()), Optional.of(new Table(QualifiedName.of("table1"))), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.of(new Offset("2")), Optional.empty()), Optional.empty(), Optional.empty(), Optional.empty()));
assertStatement("SELECT * FROM table1 order by x OFFSET 2 limit 10", new Query(Optional.empty(), new QuerySpecification(selectList(new AllColumns()), Optional.of(new Table(QualifiedName.of("table1"))), Optional.empty(), Optional.empty(), Optional.empty(), Optional.of(new OrderBy(ImmutableList.of(new SortItem(new Identifier("x"), ASCENDING, UNDEFINED)))), Optional.of(new Offset("2")), Optional.of("10")), Optional.empty(), Optional.empty(), Optional.empty()));
Query valuesQuery = query(values(row(new LongLiteral("1"), new StringLiteral("1")), row(new LongLiteral("2"), new StringLiteral("2"))));
assertStatement("SELECT * FROM (VALUES (1, '1'), (2, '2')) OFFSET 2 ROWS", simpleQuery(selectList(new AllColumns()), subquery(valuesQuery), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.of(new Offset("2")), Optional.empty()));
assertStatement("SELECT * FROM (VALUES (1, '1'), (2, '2')) OFFSET 2", simpleQuery(selectList(new AllColumns()), subquery(valuesQuery), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.of(new Offset("2")), Optional.empty()));
}
use of com.facebook.presto.sql.tree.OrderBy in project presto by prestodb.
the class LimitQueryDeterminismAnalyzer method analyzeQuerySpecification.
private LimitQueryDeterminismAnalysis analyzeQuerySpecification(Optional<With> with, QuerySpecification querySpecification) {
if (!querySpecification.getLimit().isPresent()) {
return NOT_RUN;
}
if (isLimitAll(querySpecification.getLimit().get())) {
return NOT_RUN;
}
long limit = parseLong(querySpecification.getLimit().get());
if (rowCount < limit) {
return DETERMINISTIC;
}
Optional<String> newLimit = Optional.of(Long.toString(limit + 1));
Optional<OrderBy> orderBy = querySpecification.getOrderBy();
if (orderBy.isPresent()) {
List<SelectItem> selectItems = new ArrayList<>(querySpecification.getSelect().getSelectItems());
List<ColumnNameOrIndex> orderByKeys = populateSelectItems(selectItems, orderBy.get());
return analyzeLimitOrderBy(new Query(with, new QuerySpecification(new Select(false, selectItems), querySpecification.getFrom(), querySpecification.getWhere(), querySpecification.getGroupBy(), querySpecification.getHaving(), orderBy, querySpecification.getOffset(), newLimit), Optional.empty(), Optional.empty(), Optional.empty()), orderByKeys, limit);
}
Query newLimitQuery = new Query(with, new QuerySpecification(querySpecification.getSelect(), querySpecification.getFrom(), querySpecification.getWhere(), querySpecification.getGroupBy(), querySpecification.getHaving(), Optional.empty(), querySpecification.getOffset(), newLimit), Optional.empty(), Optional.empty(), Optional.empty());
return analyzeLimitNoOrderBy(newLimitQuery, limit);
}
use of com.facebook.presto.sql.tree.OrderBy in project presto by prestodb.
the class QueryPlanner method aggregate.
private PlanBuilder aggregate(PlanBuilder subPlan, QuerySpecification node) {
if (!analysis.isAggregation(node)) {
return subPlan;
}
// 1. Pre-project all scalar inputs (arguments and non-trivial group by expressions)
Set<Expression> groupByExpressions = ImmutableSet.copyOf(analysis.getGroupByExpressions(node));
ImmutableList.Builder<Expression> arguments = ImmutableList.builder();
analysis.getAggregates(node).stream().map(FunctionCall::getArguments).flatMap(List::stream).filter(// lambda expression is generated at execution time
exp -> !(exp instanceof LambdaExpression)).forEach(arguments::add);
analysis.getAggregates(node).stream().map(FunctionCall::getOrderBy).filter(Optional::isPresent).map(Optional::get).map(OrderBy::getSortItems).flatMap(List::stream).map(SortItem::getSortKey).forEach(arguments::add);
// filter expressions need to be projected first
analysis.getAggregates(node).stream().map(FunctionCall::getFilter).filter(Optional::isPresent).map(Optional::get).forEach(arguments::add);
Iterable<Expression> inputs = Iterables.concat(groupByExpressions, arguments.build());
subPlan = handleSubqueries(subPlan, node, inputs);
if (!Iterables.isEmpty(inputs)) {
// avoid an empty projection if the only aggregation is COUNT (which has no arguments)
subPlan = project(subPlan, inputs);
}
// 2. Aggregate
// 2.a. Rewrite aggregate arguments
TranslationMap argumentTranslations = new TranslationMap(subPlan.getRelationPlan(), analysis, lambdaDeclarationToVariableMap);
ImmutableList.Builder<VariableReferenceExpression> aggregationArgumentsBuilder = ImmutableList.builder();
for (Expression argument : arguments.build()) {
VariableReferenceExpression variable = subPlan.translate(argument);
argumentTranslations.put(argument, variable);
aggregationArgumentsBuilder.add(variable);
}
List<VariableReferenceExpression> aggregationArguments = aggregationArgumentsBuilder.build();
// 2.b. Rewrite grouping columns
TranslationMap groupingTranslations = new TranslationMap(subPlan.getRelationPlan(), analysis, lambdaDeclarationToVariableMap);
Map<VariableReferenceExpression, VariableReferenceExpression> groupingSetMappings = new LinkedHashMap<>();
for (Expression expression : groupByExpressions) {
VariableReferenceExpression input = subPlan.translate(expression);
VariableReferenceExpression output = variableAllocator.newVariable(expression, analysis.getTypeWithCoercions(expression), "gid");
groupingTranslations.put(expression, output);
groupingSetMappings.put(output, input);
}
// This tracks the grouping sets before complex expressions are considered (see comments below)
// It's also used to compute the descriptors needed to implement grouping()
List<Set<FieldId>> columnOnlyGroupingSets = ImmutableList.of(ImmutableSet.of());
List<List<VariableReferenceExpression>> groupingSets = ImmutableList.of(ImmutableList.of());
if (node.getGroupBy().isPresent()) {
// For the purpose of "distinct", we need to canonicalize column references that may have varying
// syntactic forms (e.g., "t.a" vs "a"). Thus we need to enumerate grouping sets based on the underlying
// fieldId associated with each column reference expression.
// The catch is that simple group-by expressions can be arbitrary expressions (this is a departure from the SQL specification).
// But, they don't affect the number of grouping sets or the behavior of "distinct" . We can compute all the candidate
// grouping sets in terms of fieldId, dedup as appropriate and then cross-join them with the complex expressions.
Analysis.GroupingSetAnalysis groupingSetAnalysis = analysis.getGroupingSets(node);
columnOnlyGroupingSets = enumerateGroupingSets(groupingSetAnalysis);
if (node.getGroupBy().get().isDistinct()) {
columnOnlyGroupingSets = columnOnlyGroupingSets.stream().distinct().collect(toImmutableList());
}
// add in the complex expressions an turn materialize the grouping sets in terms of plan columns
ImmutableList.Builder<List<VariableReferenceExpression>> groupingSetBuilder = ImmutableList.builder();
for (Set<FieldId> groupingSet : columnOnlyGroupingSets) {
ImmutableList.Builder<VariableReferenceExpression> columns = ImmutableList.builder();
groupingSetAnalysis.getComplexExpressions().stream().map(groupingTranslations::get).forEach(columns::add);
groupingSet.stream().map(field -> groupingTranslations.get(new FieldReference(field.getFieldIndex()))).forEach(columns::add);
groupingSetBuilder.add(columns.build());
}
groupingSets = groupingSetBuilder.build();
}
// 2.c. Generate GroupIdNode (multiple grouping sets) or ProjectNode (single grouping set)
Optional<VariableReferenceExpression> groupIdVariable = Optional.empty();
if (groupingSets.size() > 1) {
groupIdVariable = Optional.of(variableAllocator.newVariable("groupId", BIGINT));
GroupIdNode groupId = new GroupIdNode(subPlan.getRoot().getSourceLocation(), idAllocator.getNextId(), subPlan.getRoot(), groupingSets, groupingSetMappings, aggregationArguments, groupIdVariable.get());
subPlan = new PlanBuilder(groupingTranslations, groupId);
} else {
Assignments.Builder assignments = Assignments.builder();
aggregationArguments.stream().map(AssignmentUtils::identityAsSymbolReference).forEach(assignments::put);
groupingSetMappings.forEach((key, value) -> assignments.put(key, castToRowExpression(asSymbolReference(value))));
ProjectNode project = new ProjectNode(subPlan.getRoot().getSourceLocation(), idAllocator.getNextId(), subPlan.getRoot(), assignments.build(), LOCAL);
subPlan = new PlanBuilder(groupingTranslations, project);
}
TranslationMap aggregationTranslations = new TranslationMap(subPlan.getRelationPlan(), analysis, lambdaDeclarationToVariableMap);
aggregationTranslations.copyMappingsFrom(groupingTranslations);
// 2.d. Rewrite aggregates
ImmutableMap.Builder<VariableReferenceExpression, Aggregation> aggregationsBuilder = ImmutableMap.builder();
boolean needPostProjectionCoercion = false;
for (FunctionCall aggregate : analysis.getAggregates(node)) {
Expression rewritten = argumentTranslations.rewrite(aggregate);
VariableReferenceExpression newVariable = variableAllocator.newVariable(rewritten, analysis.getType(aggregate));
// Therefore we can end up with this implicit cast, and have to move it into a post-projection
if (rewritten instanceof Cast) {
rewritten = ((Cast) rewritten).getExpression();
needPostProjectionCoercion = true;
}
aggregationTranslations.put(aggregate, newVariable);
FunctionCall rewrittenFunction = (FunctionCall) rewritten;
aggregationsBuilder.put(newVariable, new Aggregation(new CallExpression(getSourceLocation(rewrittenFunction), aggregate.getName().getSuffix(), analysis.getFunctionHandle(aggregate), analysis.getType(aggregate), rewrittenFunction.getArguments().stream().map(OriginalExpressionUtils::castToRowExpression).collect(toImmutableList())), rewrittenFunction.getFilter().map(OriginalExpressionUtils::castToRowExpression), rewrittenFunction.getOrderBy().map(orderBy -> toOrderingScheme(orderBy, variableAllocator.getTypes())), rewrittenFunction.isDistinct(), Optional.empty()));
}
Map<VariableReferenceExpression, Aggregation> aggregations = aggregationsBuilder.build();
ImmutableSet.Builder<Integer> globalGroupingSets = ImmutableSet.builder();
for (int i = 0; i < groupingSets.size(); i++) {
if (groupingSets.get(i).isEmpty()) {
globalGroupingSets.add(i);
}
}
ImmutableList.Builder<VariableReferenceExpression> groupingKeys = ImmutableList.builder();
groupingSets.stream().flatMap(List::stream).distinct().forEach(groupingKeys::add);
groupIdVariable.ifPresent(groupingKeys::add);
AggregationNode aggregationNode = new AggregationNode(subPlan.getRoot().getSourceLocation(), idAllocator.getNextId(), subPlan.getRoot(), aggregations, groupingSets(groupingKeys.build(), groupingSets.size(), globalGroupingSets.build()), ImmutableList.of(), AggregationNode.Step.SINGLE, Optional.empty(), groupIdVariable);
subPlan = new PlanBuilder(aggregationTranslations, aggregationNode);
// TODO: this is a hack, we should change type coercions to coerce the inputs to functions/operators instead of coercing the output
if (needPostProjectionCoercion) {
ImmutableList.Builder<Expression> alreadyCoerced = ImmutableList.builder();
alreadyCoerced.addAll(groupByExpressions);
groupIdVariable.map(ExpressionTreeUtils::createSymbolReference).ifPresent(alreadyCoerced::add);
subPlan = explicitCoercionFields(subPlan, alreadyCoerced.build(), analysis.getAggregates(node));
}
// 4. Project and re-write all grouping functions
return handleGroupingOperations(subPlan, node, groupIdVariable, columnOnlyGroupingSets);
}
Aggregations