use of io.trino.sql.tree.Cast in project trino by trinodb.
the class ExtractSpatialJoins method addPartitioningNodes.
private static PlanNode addPartitioningNodes(PlannerContext plannerContext, Context context, PlanNode node, Symbol partitionSymbol, KdbTree kdbTree, Expression geometry, Optional<Expression> radius) {
Assignments.Builder projections = Assignments.builder();
for (Symbol outputSymbol : node.getOutputSymbols()) {
projections.putIdentity(outputSymbol);
}
TypeSignature typeSignature = new TypeSignature(KDB_TREE_TYPENAME);
FunctionCallBuilder spatialPartitionsCall = FunctionCallBuilder.resolve(context.getSession(), plannerContext.getMetadata()).setName(QualifiedName.of("spatial_partitions")).addArgument(typeSignature, new Cast(new StringLiteral(KdbTreeUtils.toJson(kdbTree)), toSqlType(plannerContext.getTypeManager().getType(typeSignature)))).addArgument(GEOMETRY_TYPE_SIGNATURE, geometry);
radius.ifPresent(value -> spatialPartitionsCall.addArgument(DOUBLE, value));
FunctionCall partitioningFunction = spatialPartitionsCall.build();
Symbol partitionsSymbol = context.getSymbolAllocator().newSymbol(partitioningFunction, new ArrayType(INTEGER));
projections.put(partitionsSymbol, partitioningFunction);
return new UnnestNode(context.getIdAllocator().getNextId(), new ProjectNode(context.getIdAllocator().getNextId(), node, projections.build()), node.getOutputSymbols(), ImmutableList.of(new UnnestNode.Mapping(partitionsSymbol, ImmutableList.of(partitionSymbol))), Optional.empty(), INNER, Optional.empty());
}
use of io.trino.sql.tree.Cast in project trino by trinodb.
the class LiteralEncoder method toExpression.
public Expression toExpression(Session session, Object object, Type type) {
requireNonNull(type, "type is null");
if (object instanceof Expression) {
return (Expression) object;
}
if (object == null) {
if (type.equals(UNKNOWN)) {
return new NullLiteral();
}
return new Cast(new NullLiteral(), toSqlType(type), false, true);
}
checkArgument(Primitives.wrap(type.getJavaType()).isInstance(object), "object.getClass (%s) and type.getJavaType (%s) do not agree", object.getClass(), type.getJavaType());
if (type.equals(TINYINT)) {
return new GenericLiteral("TINYINT", object.toString());
}
if (type.equals(SMALLINT)) {
return new GenericLiteral("SMALLINT", object.toString());
}
if (type.equals(INTEGER)) {
return new LongLiteral(object.toString());
}
if (type.equals(BIGINT)) {
LongLiteral expression = new LongLiteral(object.toString());
if (expression.getValue() >= Integer.MIN_VALUE && expression.getValue() <= Integer.MAX_VALUE) {
return new GenericLiteral("BIGINT", object.toString());
}
return new LongLiteral(object.toString());
}
if (type.equals(DOUBLE)) {
Double value = (Double) object;
if (value.isNaN()) {
return FunctionCallBuilder.resolve(session, plannerContext.getMetadata()).setName(QualifiedName.of("nan")).build();
}
if (value.equals(Double.NEGATIVE_INFINITY)) {
return ArithmeticUnaryExpression.negative(FunctionCallBuilder.resolve(session, plannerContext.getMetadata()).setName(QualifiedName.of("infinity")).build());
}
if (value.equals(Double.POSITIVE_INFINITY)) {
return FunctionCallBuilder.resolve(session, plannerContext.getMetadata()).setName(QualifiedName.of("infinity")).build();
}
return new DoubleLiteral(object.toString());
}
if (type.equals(REAL)) {
Float value = intBitsToFloat(((Long) object).intValue());
if (value.isNaN()) {
return new Cast(FunctionCallBuilder.resolve(session, plannerContext.getMetadata()).setName(QualifiedName.of("nan")).build(), toSqlType(REAL));
}
if (value.equals(Float.NEGATIVE_INFINITY)) {
return ArithmeticUnaryExpression.negative(new Cast(FunctionCallBuilder.resolve(session, plannerContext.getMetadata()).setName(QualifiedName.of("infinity")).build(), toSqlType(REAL)));
}
if (value.equals(Float.POSITIVE_INFINITY)) {
return new Cast(FunctionCallBuilder.resolve(session, plannerContext.getMetadata()).setName(QualifiedName.of("infinity")).build(), toSqlType(REAL));
}
return new GenericLiteral("REAL", value.toString());
}
if (type instanceof DecimalType) {
String string;
if (isShortDecimal(type)) {
string = Decimals.toString((long) object, ((DecimalType) type).getScale());
} else {
string = Decimals.toString((Int128) object, ((DecimalType) type).getScale());
}
return new Cast(new DecimalLiteral(string), toSqlType(type));
}
if (type instanceof VarcharType) {
VarcharType varcharType = (VarcharType) type;
Slice value = (Slice) object;
if (varcharType.isUnbounded()) {
return new GenericLiteral("VARCHAR", value.toStringUtf8());
}
StringLiteral stringLiteral = new StringLiteral(value.toStringUtf8());
int boundedLength = varcharType.getBoundedLength();
int valueLength = SliceUtf8.countCodePoints(value);
if (boundedLength == valueLength) {
return stringLiteral;
}
if (boundedLength > valueLength) {
return new Cast(stringLiteral, toSqlType(type), false, true);
}
throw new IllegalArgumentException(format("Value [%s] does not fit in type %s", value.toStringUtf8(), varcharType));
}
if (type instanceof CharType) {
StringLiteral stringLiteral = new StringLiteral(((Slice) object).toStringUtf8());
return new Cast(stringLiteral, toSqlType(type), false, true);
}
if (type.equals(BOOLEAN)) {
return new BooleanLiteral(object.toString());
}
if (type.equals(DATE)) {
return new GenericLiteral("DATE", new SqlDate(toIntExact((Long) object)).toString());
}
if (type instanceof TimestampType) {
TimestampType timestampType = (TimestampType) type;
String representation;
if (timestampType.isShort()) {
representation = TimestampToVarcharCast.cast(timestampType.getPrecision(), (Long) object).toStringUtf8();
} else {
representation = TimestampToVarcharCast.cast(timestampType.getPrecision(), (LongTimestamp) object).toStringUtf8();
}
return new TimestampLiteral(representation);
}
if (type instanceof TimestampWithTimeZoneType) {
TimestampWithTimeZoneType timestampWithTimeZoneType = (TimestampWithTimeZoneType) type;
String representation;
if (timestampWithTimeZoneType.isShort()) {
representation = TimestampWithTimeZoneToVarcharCast.cast(timestampWithTimeZoneType.getPrecision(), (long) object).toStringUtf8();
} else {
representation = TimestampWithTimeZoneToVarcharCast.cast(timestampWithTimeZoneType.getPrecision(), (LongTimestampWithTimeZone) object).toStringUtf8();
}
if (!object.equals(parseTimestampWithTimeZone(timestampWithTimeZoneType.getPrecision(), representation))) {
// Certain (point in time, time zone) pairs cannot be represented as a TIMESTAMP literal, as the literal uses local date/time in given time zone.
// Thus, during DST backwards change by e.g. 1 hour, the local time is "repeated" twice and thus one local date/time logically corresponds to two
// points in time, leaving one of them non-referencable.
// TODO (https://github.com/trinodb/trino/issues/5781) consider treating such values as illegal
} else {
return new TimestampLiteral(representation);
}
}
// If the stack value is not a simple type, encode the stack value in a block
if (!type.getJavaType().isPrimitive() && type.getJavaType() != Slice.class && type.getJavaType() != Block.class) {
object = nativeValueToBlock(type, object);
}
if (object instanceof Block) {
SliceOutput output = new DynamicSliceOutput(toIntExact(((Block) object).getSizeInBytes()));
BlockSerdeUtil.writeBlock(plannerContext.getBlockEncodingSerde(), output, (Block) object);
object = output.slice();
// This if condition will evaluate to true: object instanceof Slice && !type.equals(VARCHAR)
}
Type argumentType = typeForMagicLiteral(type);
Expression argument;
if (object instanceof Slice) {
// HACK: we need to serialize VARBINARY in a format that can be embedded in an expression to be
// able to encode it in the plan that gets sent to workers.
// We do this by transforming the in-memory varbinary into a call to from_base64(<base64-encoded value>)
Slice encoded = VarbinaryFunctions.toBase64((Slice) object);
argument = FunctionCallBuilder.resolve(session, plannerContext.getMetadata()).setName(QualifiedName.of("from_base64")).addArgument(VARCHAR, new StringLiteral(encoded.toStringUtf8())).build();
} else {
argument = toExpression(session, object, argumentType);
}
ResolvedFunction resolvedFunction = plannerContext.getMetadata().getCoercion(session, QualifiedName.of(LITERAL_FUNCTION_NAME), argumentType, type);
return FunctionCallBuilder.resolve(session, plannerContext.getMetadata()).setName(resolvedFunction.toQualifiedName()).addArgument(argumentType, argument).build();
}
use of io.trino.sql.tree.Cast in project trino by trinodb.
the class DynamicFilters method extractSourceSymbol.
private static Symbol extractSourceSymbol(DynamicFilters.Descriptor descriptor) {
Expression dynamicFilterExpression = descriptor.getInput();
if (dynamicFilterExpression instanceof SymbolReference) {
return Symbol.from(dynamicFilterExpression);
}
checkState(dynamicFilterExpression instanceof Cast);
checkState(((Cast) dynamicFilterExpression).getExpression() instanceof SymbolReference);
return Symbol.from(((Cast) dynamicFilterExpression).getExpression());
}
use of io.trino.sql.tree.Cast in project trino by trinodb.
the class SetOperationNodeTranslator method appendMarkers.
private static PlanNode appendMarkers(PlanNodeIdAllocator idAllocator, SymbolAllocator symbolAllocator, PlanNode source, int markerIndex, List<Symbol> markers, Map<Symbol, SymbolReference> projections) {
Assignments.Builder assignments = Assignments.builder();
// add existing intersect symbols to projection
for (Map.Entry<Symbol, SymbolReference> entry : projections.entrySet()) {
Symbol symbol = symbolAllocator.newSymbol(entry.getKey().getName(), symbolAllocator.getTypes().get(entry.getKey()));
assignments.put(symbol, entry.getValue());
}
// add extra marker fields to the projection
for (int i = 0; i < markers.size(); ++i) {
Expression expression = (i == markerIndex) ? TRUE_LITERAL : new Cast(new NullLiteral(), toSqlType(BOOLEAN));
assignments.put(symbolAllocator.newSymbol(markers.get(i).getName(), BOOLEAN), expression);
}
return new ProjectNode(idAllocator.getNextId(), source, assignments.build());
}
use of io.trino.sql.tree.Cast in project trino by trinodb.
the class TransformCorrelatedScalarSubquery method apply.
@Override
public Result apply(CorrelatedJoinNode correlatedJoinNode, Captures captures, Context context) {
PlanNode subquery = context.getLookup().resolve(correlatedJoinNode.getSubquery());
if (!searchFrom(subquery, context.getLookup()).where(EnforceSingleRowNode.class::isInstance).recurseOnlyWhen(ProjectNode.class::isInstance).matches()) {
return Result.empty();
}
PlanNode rewrittenSubquery = searchFrom(subquery, context.getLookup()).where(EnforceSingleRowNode.class::isInstance).recurseOnlyWhen(ProjectNode.class::isInstance).removeFirst();
Range<Long> subqueryCardinality = extractCardinality(rewrittenSubquery, context.getLookup());
boolean producesAtMostOneRow = Range.closed(0L, 1L).encloses(subqueryCardinality);
if (producesAtMostOneRow) {
boolean producesSingleRow = Range.singleton(1L).encloses(subqueryCardinality);
return Result.ofPlanNode(new CorrelatedJoinNode(context.getIdAllocator().getNextId(), correlatedJoinNode.getInput(), rewrittenSubquery, correlatedJoinNode.getCorrelation(), producesSingleRow ? correlatedJoinNode.getType() : LEFT, correlatedJoinNode.getFilter(), correlatedJoinNode.getOriginSubquery()));
}
Symbol unique = context.getSymbolAllocator().newSymbol("unique", BigintType.BIGINT);
CorrelatedJoinNode rewrittenCorrelatedJoinNode = new CorrelatedJoinNode(context.getIdAllocator().getNextId(), new AssignUniqueId(context.getIdAllocator().getNextId(), correlatedJoinNode.getInput(), unique), rewrittenSubquery, correlatedJoinNode.getCorrelation(), LEFT, correlatedJoinNode.getFilter(), correlatedJoinNode.getOriginSubquery());
Symbol isDistinct = context.getSymbolAllocator().newSymbol("is_distinct", BOOLEAN);
MarkDistinctNode markDistinctNode = new MarkDistinctNode(context.getIdAllocator().getNextId(), rewrittenCorrelatedJoinNode, isDistinct, rewrittenCorrelatedJoinNode.getInput().getOutputSymbols(), Optional.empty());
FilterNode filterNode = new FilterNode(context.getIdAllocator().getNextId(), markDistinctNode, new SimpleCaseExpression(isDistinct.toSymbolReference(), ImmutableList.of(new WhenClause(TRUE_LITERAL, TRUE_LITERAL)), Optional.of(new Cast(FunctionCallBuilder.resolve(context.getSession(), metadata).setName(QualifiedName.of("fail")).addArgument(INTEGER, new LongLiteral(Integer.toString(SUBQUERY_MULTIPLE_ROWS.toErrorCode().getCode()))).addArgument(VARCHAR, new StringLiteral("Scalar sub-query has returned multiple rows")).build(), toSqlType(BOOLEAN)))));
return Result.ofPlanNode(new ProjectNode(context.getIdAllocator().getNextId(), filterNode, Assignments.identity(correlatedJoinNode.getOutputSymbols())));
}
Aggregations