use of io.prestosql.sql.tree.Cast in project hetu-core by openlookeng.
the class QueryPlanner method window.
private PlanBuilder window(PlanBuilder inputSubPlan, List<FunctionCall> windowFunctions) {
PlanBuilder subPlan = inputSubPlan;
if (windowFunctions.isEmpty()) {
return subPlan;
}
for (FunctionCall windowFunction : windowFunctions) {
Window window = windowFunction.getWindow().get();
// Extract frame
WindowFrameType frameType = RANGE;
FrameBoundType frameStartType = UNBOUNDED_PRECEDING;
FrameBoundType frameEndType = CURRENT_ROW;
Expression frameStart = null;
Expression frameEnd = null;
if (window.getFrame().isPresent()) {
WindowFrame frame = window.getFrame().get();
frameType = frame.getType();
frameStartType = frame.getStart().getType();
frameStart = frame.getStart().getValue().orElse(null);
if (frame.getEnd().isPresent()) {
frameEndType = frame.getEnd().get().getType();
frameEnd = frame.getEnd().get().getValue().orElse(null);
}
}
// Pre-project inputs
ImmutableList.Builder<Expression> inputs = ImmutableList.<Expression>builder().addAll(windowFunction.getArguments()).addAll(window.getPartitionBy()).addAll(Iterables.transform(getSortItemsFromOrderBy(window.getOrderBy()), SortItem::getSortKey));
if (frameStart != null) {
inputs.add(frameStart);
}
if (frameEnd != null) {
inputs.add(frameEnd);
}
subPlan = subPlan.appendProjections(inputs.build(), planSymbolAllocator, idAllocator);
// Rewrite PARTITION BY in terms of pre-projected inputs
ImmutableList.Builder<Symbol> partitionBySymbols = ImmutableList.builder();
for (Expression expression : window.getPartitionBy()) {
partitionBySymbols.add(subPlan.translate(expression));
}
// Rewrite ORDER BY in terms of pre-projected inputs
LinkedHashMap<Symbol, SortOrder> orderings = new LinkedHashMap<>();
for (SortItem item : getSortItemsFromOrderBy(window.getOrderBy())) {
Symbol symbol = subPlan.translate(item.getSortKey());
// don't override existing keys, i.e. when "ORDER BY a ASC, a DESC" is specified
orderings.putIfAbsent(symbol, sortItemToSortOrder(item));
}
// Rewrite frame bounds in terms of pre-projected inputs
Optional<Symbol> frameStartSymbol = Optional.empty();
Optional<Symbol> frameEndSymbol = Optional.empty();
if (frameStart != null) {
frameStartSymbol = Optional.of(subPlan.translate(frameStart));
}
if (frameEnd != null) {
frameEndSymbol = Optional.of(subPlan.translate(frameEnd));
}
WindowNode.Frame frame = new WindowNode.Frame(frameType, frameStartType, frameStartSymbol, frameEndType, frameEndSymbol, Optional.ofNullable(frameStart).map(Expression::toString), Optional.ofNullable(frameEnd).map(Expression::toString));
TranslationMap outputTranslations = subPlan.copyTranslations();
// Rewrite function call in terms of pre-projected inputs
Expression rewritten = subPlan.rewrite(windowFunction);
boolean needCoercion = rewritten instanceof Cast;
// Strip out the cast and add it back as a post-projection
if (rewritten instanceof Cast) {
rewritten = ((Cast) rewritten).getExpression();
}
// If refers to existing symbol, don't create another PlanNode
if (rewritten instanceof SymbolReference) {
if (needCoercion) {
subPlan = explicitCoercionSymbols(subPlan, subPlan.getRoot().getOutputSymbols(), ImmutableList.of(windowFunction));
}
continue;
}
Type returnType = analysis.getType(windowFunction);
Symbol newSymbol = planSymbolAllocator.newSymbol(rewritten, analysis.getType(windowFunction));
outputTranslations.put(windowFunction, newSymbol);
List<RowExpression> arguments = new ArrayList<>();
for (int i = 0; i < ((FunctionCall) rewritten).getArguments().size(); i++) {
arguments.add(castToRowExpression(((FunctionCall) rewritten).getArguments().get(i)));
}
WindowNode.Function function = new WindowNode.Function(call(windowFunction.getName().toString(), analysis.getFunctionHandle(windowFunction), returnType, ((FunctionCall) rewritten).getArguments().stream().map(OriginalExpressionUtils::castToRowExpression).collect(toImmutableList())), arguments, frame);
List<Symbol> sourceSymbols = subPlan.getRoot().getOutputSymbols();
ImmutableList.Builder<Symbol> orderBySymbols = ImmutableList.builder();
orderBySymbols.addAll(orderings.keySet());
Optional<OrderingScheme> orderingScheme = Optional.empty();
if (!orderings.isEmpty()) {
orderingScheme = Optional.of(new OrderingScheme(orderBySymbols.build(), orderings));
}
// create window node
subPlan = new PlanBuilder(outputTranslations, new WindowNode(idAllocator.getNextId(), subPlan.getRoot(), new WindowNode.Specification(partitionBySymbols.build(), orderingScheme), ImmutableMap.of(newSymbol, function), Optional.empty(), ImmutableSet.of(), 0));
if (needCoercion) {
subPlan = explicitCoercionSymbols(subPlan, sourceSymbols, ImmutableList.of(windowFunction));
}
}
return subPlan;
}
use of io.prestosql.sql.tree.Cast in project hetu-core by openlookeng.
the class RelationPlanner method planJoinUsing.
private RelationPlan planJoinUsing(Join node, RelationPlan left, RelationPlan right) {
/* Given: l JOIN r USING (k1, ..., kn)
produces:
- project
coalesce(l.k1, r.k1)
...,
coalesce(l.kn, r.kn)
l.v1,
...,
l.vn,
r.v1,
...,
r.vn
- join (l.k1 = r.k1 and ... l.kn = r.kn)
- project
cast(l.k1 as commonType(l.k1, r.k1))
...
- project
cast(rl.k1 as commonType(l.k1, r.k1))
If casts are redundant (due to column type and common type being equal),
they will be removed by optimization passes.
*/
List<Identifier> joinColumns = ((JoinUsing) node.getCriteria().get()).getColumns();
Analysis.JoinUsingAnalysis joinAnalysis = analysis.getJoinUsing(node);
ImmutableList.Builder<JoinNode.EquiJoinClause> clauses = ImmutableList.builder();
Map<Identifier, Symbol> leftJoinColumns = new HashMap<>();
Map<Identifier, Symbol> rightJoinColumns = new HashMap<>();
Assignments.Builder leftCoercions = Assignments.builder();
Assignments.Builder rightCoercions = Assignments.builder();
leftCoercions.putAll(AssignmentUtils.identityAsSymbolReferences(left.getRoot().getOutputSymbols()));
rightCoercions.putAll(AssignmentUtils.identityAsSymbolReferences(right.getRoot().getOutputSymbols()));
for (int i = 0; i < joinColumns.size(); i++) {
Identifier identifier = joinColumns.get(i);
Type type = analysis.getType(identifier);
// compute the coercion for the field on the left to the common supertype of left & right
Symbol leftOutput = planSymbolAllocator.newSymbol(identifier, type);
int leftField = joinAnalysis.getLeftJoinFields().get(i);
leftCoercions.put(leftOutput, castToRowExpression(new Cast(toSymbolReference(left.getSymbol(leftField)), type.getTypeSignature().toString(), false, typeCoercion.isTypeOnlyCoercion(left.getDescriptor().getFieldByIndex(leftField).getType(), type))));
leftJoinColumns.put(identifier, leftOutput);
// compute the coercion for the field on the right to the common supertype of left & right
Symbol rightOutput = planSymbolAllocator.newSymbol(identifier, type);
int rightField = joinAnalysis.getRightJoinFields().get(i);
rightCoercions.put(rightOutput, castToRowExpression(new Cast(toSymbolReference(right.getSymbol(rightField)), type.getTypeSignature().toString(), false, typeCoercion.isTypeOnlyCoercion(right.getDescriptor().getFieldByIndex(rightField).getType(), type))));
rightJoinColumns.put(identifier, rightOutput);
clauses.add(new JoinNode.EquiJoinClause(leftOutput, rightOutput));
}
ProjectNode leftCoercion = new ProjectNode(idAllocator.getNextId(), left.getRoot(), leftCoercions.build());
ProjectNode rightCoercion = new ProjectNode(idAllocator.getNextId(), right.getRoot(), rightCoercions.build());
JoinNode join = new JoinNode(idAllocator.getNextId(), JoinNodeUtils.typeConvert(node.getType()), leftCoercion, rightCoercion, clauses.build(), ImmutableList.<Symbol>builder().addAll(leftCoercion.getOutputSymbols()).addAll(rightCoercion.getOutputSymbols()).build(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), ImmutableMap.of());
// Add a projection to produce the outputs of the columns in the USING clause,
// which are defined as coalesce(l.k, r.k)
Assignments.Builder assignments = Assignments.builder();
ImmutableList.Builder<Symbol> outputs = ImmutableList.builder();
for (Identifier column : joinColumns) {
Symbol output = planSymbolAllocator.newSymbol(column, analysis.getType(column));
outputs.add(output);
assignments.put(output, castToRowExpression(new CoalesceExpression(toSymbolReference(leftJoinColumns.get(column)), toSymbolReference(rightJoinColumns.get(column)))));
}
for (int field : joinAnalysis.getOtherLeftFields()) {
Symbol symbol = left.getFieldMappings().get(field);
outputs.add(symbol);
assignments.put(symbol, castToRowExpression(toSymbolReference(symbol)));
}
for (int field : joinAnalysis.getOtherRightFields()) {
Symbol symbol = right.getFieldMappings().get(field);
outputs.add(symbol);
assignments.put(symbol, castToRowExpression(toSymbolReference(symbol)));
}
return new RelationPlan(new ProjectNode(idAllocator.getNextId(), join, assignments.build()), analysis.getScope(node), outputs.build());
}
use of io.prestosql.sql.tree.Cast in project hetu-core by openlookeng.
the class LiteralEncoder method toExpression.
public Expression toExpression(Object inputObject, Type type) {
requireNonNull(type, "type is null");
Object object = inputObject;
if (object instanceof Expression) {
return (Expression) object;
}
if (object == null) {
if (type.equals(UNKNOWN)) {
return new NullLiteral();
}
return new Cast(new NullLiteral(), type.getTypeSignature().toString(), false, true);
}
// AbstractIntType internally uses long as javaType. So specially handled for AbstractIntType types.
Class<?> wrap = Primitives.wrap(type.getJavaType());
checkArgument(wrap.isInstance(object) || (type instanceof AbstractIntType && wrap == Long.class && Integer.class.isInstance(object)), "object.getClass (%s) and type.getJavaType (%s) do not agree", object.getClass(), type.getJavaType());
if (type.equals(TINYINT)) {
return new GenericLiteral("TINYINT", object.toString());
}
if (type.equals(SMALLINT)) {
return new GenericLiteral("SMALLINT", object.toString());
}
if (type.equals(INTEGER)) {
return new LongLiteral(object.toString());
}
if (type.equals(BIGINT)) {
LongLiteral expression = new LongLiteral(object.toString());
if (expression.getValue() >= Integer.MIN_VALUE && expression.getValue() <= Integer.MAX_VALUE) {
return new GenericLiteral("BIGINT", object.toString());
}
return new LongLiteral(object.toString());
}
if (type.equals(DOUBLE)) {
Double value = (Double) object;
// When changing this, don't forget about similar code for REAL below
if (value.isNaN()) {
return new FunctionCallBuilder(metadata).setName(QualifiedName.of("nan")).build();
}
if (value.equals(Double.NEGATIVE_INFINITY)) {
return ArithmeticUnaryExpression.negative(new FunctionCallBuilder(metadata).setName(QualifiedName.of("infinity")).build());
}
if (value.equals(Double.POSITIVE_INFINITY)) {
return new FunctionCallBuilder(metadata).setName(QualifiedName.of("infinity")).build();
}
return new DoubleLiteral(object.toString());
}
if (type.equals(REAL)) {
Float value = intBitsToFloat(((Long) object).intValue());
// WARNING for ORC predicate code as above (for double)
if (value.isNaN()) {
return new Cast(new FunctionCallBuilder(metadata).setName(QualifiedName.of("nan")).build(), StandardTypes.REAL);
}
if (value.equals(Float.NEGATIVE_INFINITY)) {
return ArithmeticUnaryExpression.negative(new Cast(new FunctionCallBuilder(metadata).setName(QualifiedName.of("infinity")).build(), StandardTypes.REAL));
}
if (value.equals(Float.POSITIVE_INFINITY)) {
return new Cast(new FunctionCallBuilder(metadata).setName(QualifiedName.of("infinity")).build(), StandardTypes.REAL);
}
return new GenericLiteral("REAL", value.toString());
}
if (type instanceof DecimalType) {
String string;
if (isShortDecimal(type)) {
string = Decimals.toString((long) object, ((DecimalType) type).getScale());
} else {
string = Decimals.toString((Slice) object, ((DecimalType) type).getScale());
}
return new Cast(new DecimalLiteral(string), type.getDisplayName());
}
if (type instanceof VarcharType) {
VarcharType varcharType = (VarcharType) type;
Slice value = (Slice) object;
StringLiteral stringLiteral = new StringLiteral(value.toStringUtf8());
if (!varcharType.isUnbounded() && varcharType.getBoundedLength() == SliceUtf8.countCodePoints(value)) {
return stringLiteral;
}
return new Cast(stringLiteral, type.getDisplayName(), false, true);
}
if (type instanceof CharType) {
StringLiteral stringLiteral = new StringLiteral(((Slice) object).toStringUtf8());
return new Cast(stringLiteral, type.getDisplayName(), false, true);
}
if (type.equals(BOOLEAN)) {
return new BooleanLiteral(object.toString());
}
if (type.equals(DATE)) {
return new GenericLiteral("DATE", new SqlDate(toIntExact((Long) object)).toString());
}
if (type.equals(TIMESTAMP)) {
return new GenericLiteral("TIMESTAMP", new SqlTimestamp((Long) object).toString());
}
if (object instanceof Block) {
SliceOutput output = new DynamicSliceOutput(toIntExact(((Block) object).getSizeInBytes()));
BlockSerdeUtil.writeBlock(metadata.getFunctionAndTypeManager().getBlockEncodingSerde(), output, (Block) object);
object = output.slice();
// This if condition will evaluate to true: object instanceof Slice && !type.equals(VARCHAR)
}
Signature signature = LiteralFunction.getLiteralFunctionSignature(type);
Type argumentType = typeForLiteralFunctionArgument(type);
Expression argument;
if (object instanceof Slice) {
// HACK: we need to serialize VARBINARY in a format that can be embedded in an expression to be
// able to encode it in the plan that gets sent to workers.
// We do this by transforming the in-memory varbinary into a call to from_base64(<base64-encoded value>)
Slice encoded = VarbinaryFunctions.toBase64((Slice) object);
argument = new FunctionCallBuilder(metadata).setName(QualifiedName.of("from_base64")).addArgument(VARCHAR, new StringLiteral(encoded.toStringUtf8())).build();
} else {
argument = toExpression(object, argumentType);
}
return new FunctionCallBuilder(metadata).setName(QualifiedName.of(signature.getNameSuffix())).addArgument(argumentType, argument).build();
}
use of io.prestosql.sql.tree.Cast in project hetu-core by openlookeng.
the class ParameterRewriter method coerceIfNecessary.
private Expression coerceIfNecessary(Expression original, Expression rewritten) {
if (analysis == null) {
return rewritten;
}
Type coercion = analysis.getCoercion(original);
Expression rewrittenExpression = rewritten;
if (coercion != null) {
rewrittenExpression = new Cast(rewrittenExpression, coercion.getTypeSignature().toString(), false, analysis.isTypeOnlyCoercion(original));
}
return rewrittenExpression;
}
use of io.prestosql.sql.tree.Cast in project hetu-core by openlookeng.
the class TestScalarStatsCalculator method testCastDoubleToShortRange.
@Test
public void testCastDoubleToShortRange() {
PlanNodeStatsEstimate inputStatistics = PlanNodeStatsEstimate.builder().addSymbolStatistics(new Symbol("a"), SymbolStatsEstimate.builder().setNullsFraction(0.3).setLowValue(1.6).setHighValue(3.3).setDistinctValuesCount(10).setAverageRowSize(2.0).build()).build();
assertCalculate(new Cast(new SymbolReference("a"), "bigint"), inputStatistics).lowValue(2.0).highValue(3.0).distinctValuesCount(2).nullsFraction(0.3).dataSizeUnknown();
}
Aggregations