use of io.trino.metadata.ResolvedFunction in project trino by trinodb.
the class LiteralEncoder method toExpression.
public Expression toExpression(Session session, Object object, Type type) {
requireNonNull(type, "type is null");
if (object instanceof Expression) {
return (Expression) object;
}
if (object == null) {
if (type.equals(UNKNOWN)) {
return new NullLiteral();
}
return new Cast(new NullLiteral(), toSqlType(type), false, true);
}
checkArgument(Primitives.wrap(type.getJavaType()).isInstance(object), "object.getClass (%s) and type.getJavaType (%s) do not agree", object.getClass(), type.getJavaType());
if (type.equals(TINYINT)) {
return new GenericLiteral("TINYINT", object.toString());
}
if (type.equals(SMALLINT)) {
return new GenericLiteral("SMALLINT", object.toString());
}
if (type.equals(INTEGER)) {
return new LongLiteral(object.toString());
}
if (type.equals(BIGINT)) {
LongLiteral expression = new LongLiteral(object.toString());
if (expression.getValue() >= Integer.MIN_VALUE && expression.getValue() <= Integer.MAX_VALUE) {
return new GenericLiteral("BIGINT", object.toString());
}
return new LongLiteral(object.toString());
}
if (type.equals(DOUBLE)) {
Double value = (Double) object;
if (value.isNaN()) {
return FunctionCallBuilder.resolve(session, plannerContext.getMetadata()).setName(QualifiedName.of("nan")).build();
}
if (value.equals(Double.NEGATIVE_INFINITY)) {
return ArithmeticUnaryExpression.negative(FunctionCallBuilder.resolve(session, plannerContext.getMetadata()).setName(QualifiedName.of("infinity")).build());
}
if (value.equals(Double.POSITIVE_INFINITY)) {
return FunctionCallBuilder.resolve(session, plannerContext.getMetadata()).setName(QualifiedName.of("infinity")).build();
}
return new DoubleLiteral(object.toString());
}
if (type.equals(REAL)) {
Float value = intBitsToFloat(((Long) object).intValue());
if (value.isNaN()) {
return new Cast(FunctionCallBuilder.resolve(session, plannerContext.getMetadata()).setName(QualifiedName.of("nan")).build(), toSqlType(REAL));
}
if (value.equals(Float.NEGATIVE_INFINITY)) {
return ArithmeticUnaryExpression.negative(new Cast(FunctionCallBuilder.resolve(session, plannerContext.getMetadata()).setName(QualifiedName.of("infinity")).build(), toSqlType(REAL)));
}
if (value.equals(Float.POSITIVE_INFINITY)) {
return new Cast(FunctionCallBuilder.resolve(session, plannerContext.getMetadata()).setName(QualifiedName.of("infinity")).build(), toSqlType(REAL));
}
return new GenericLiteral("REAL", value.toString());
}
if (type instanceof DecimalType) {
String string;
if (isShortDecimal(type)) {
string = Decimals.toString((long) object, ((DecimalType) type).getScale());
} else {
string = Decimals.toString((Int128) object, ((DecimalType) type).getScale());
}
return new Cast(new DecimalLiteral(string), toSqlType(type));
}
if (type instanceof VarcharType) {
VarcharType varcharType = (VarcharType) type;
Slice value = (Slice) object;
if (varcharType.isUnbounded()) {
return new GenericLiteral("VARCHAR", value.toStringUtf8());
}
StringLiteral stringLiteral = new StringLiteral(value.toStringUtf8());
int boundedLength = varcharType.getBoundedLength();
int valueLength = SliceUtf8.countCodePoints(value);
if (boundedLength == valueLength) {
return stringLiteral;
}
if (boundedLength > valueLength) {
return new Cast(stringLiteral, toSqlType(type), false, true);
}
throw new IllegalArgumentException(format("Value [%s] does not fit in type %s", value.toStringUtf8(), varcharType));
}
if (type instanceof CharType) {
StringLiteral stringLiteral = new StringLiteral(((Slice) object).toStringUtf8());
return new Cast(stringLiteral, toSqlType(type), false, true);
}
if (type.equals(BOOLEAN)) {
return new BooleanLiteral(object.toString());
}
if (type.equals(DATE)) {
return new GenericLiteral("DATE", new SqlDate(toIntExact((Long) object)).toString());
}
if (type instanceof TimestampType) {
TimestampType timestampType = (TimestampType) type;
String representation;
if (timestampType.isShort()) {
representation = TimestampToVarcharCast.cast(timestampType.getPrecision(), (Long) object).toStringUtf8();
} else {
representation = TimestampToVarcharCast.cast(timestampType.getPrecision(), (LongTimestamp) object).toStringUtf8();
}
return new TimestampLiteral(representation);
}
if (type instanceof TimestampWithTimeZoneType) {
TimestampWithTimeZoneType timestampWithTimeZoneType = (TimestampWithTimeZoneType) type;
String representation;
if (timestampWithTimeZoneType.isShort()) {
representation = TimestampWithTimeZoneToVarcharCast.cast(timestampWithTimeZoneType.getPrecision(), (long) object).toStringUtf8();
} else {
representation = TimestampWithTimeZoneToVarcharCast.cast(timestampWithTimeZoneType.getPrecision(), (LongTimestampWithTimeZone) object).toStringUtf8();
}
if (!object.equals(parseTimestampWithTimeZone(timestampWithTimeZoneType.getPrecision(), representation))) {
// Certain (point in time, time zone) pairs cannot be represented as a TIMESTAMP literal, as the literal uses local date/time in given time zone.
// Thus, during DST backwards change by e.g. 1 hour, the local time is "repeated" twice and thus one local date/time logically corresponds to two
// points in time, leaving one of them non-referencable.
// TODO (https://github.com/trinodb/trino/issues/5781) consider treating such values as illegal
} else {
return new TimestampLiteral(representation);
}
}
// If the stack value is not a simple type, encode the stack value in a block
if (!type.getJavaType().isPrimitive() && type.getJavaType() != Slice.class && type.getJavaType() != Block.class) {
object = nativeValueToBlock(type, object);
}
if (object instanceof Block) {
SliceOutput output = new DynamicSliceOutput(toIntExact(((Block) object).getSizeInBytes()));
BlockSerdeUtil.writeBlock(plannerContext.getBlockEncodingSerde(), output, (Block) object);
object = output.slice();
// This if condition will evaluate to true: object instanceof Slice && !type.equals(VARCHAR)
}
Type argumentType = typeForMagicLiteral(type);
Expression argument;
if (object instanceof Slice) {
// HACK: we need to serialize VARBINARY in a format that can be embedded in an expression to be
// able to encode it in the plan that gets sent to workers.
// We do this by transforming the in-memory varbinary into a call to from_base64(<base64-encoded value>)
Slice encoded = VarbinaryFunctions.toBase64((Slice) object);
argument = FunctionCallBuilder.resolve(session, plannerContext.getMetadata()).setName(QualifiedName.of("from_base64")).addArgument(VARCHAR, new StringLiteral(encoded.toStringUtf8())).build();
} else {
argument = toExpression(session, object, argumentType);
}
ResolvedFunction resolvedFunction = plannerContext.getMetadata().getCoercion(session, QualifiedName.of(LITERAL_FUNCTION_NAME), argumentType, type);
return FunctionCallBuilder.resolve(session, plannerContext.getMetadata()).setName(resolvedFunction.toQualifiedName()).addArgument(argumentType, argument).build();
}
use of io.trino.metadata.ResolvedFunction in project trino by trinodb.
the class PushPartialAggregationThroughExchange method split.
private PlanNode split(AggregationNode node, Context context) {
// otherwise, add a partial and final with an exchange in between
Map<Symbol, AggregationNode.Aggregation> intermediateAggregation = new HashMap<>();
Map<Symbol, AggregationNode.Aggregation> finalAggregation = new HashMap<>();
for (Map.Entry<Symbol, AggregationNode.Aggregation> entry : node.getAggregations().entrySet()) {
AggregationNode.Aggregation originalAggregation = entry.getValue();
ResolvedFunction resolvedFunction = originalAggregation.getResolvedFunction();
AggregationFunctionMetadata functionMetadata = plannerContext.getMetadata().getAggregationFunctionMetadata(resolvedFunction);
List<Type> intermediateTypes = functionMetadata.getIntermediateTypes().stream().map(plannerContext.getTypeManager()::getType).collect(toImmutableList());
Type intermediateType = intermediateTypes.size() == 1 ? intermediateTypes.get(0) : RowType.anonymous(intermediateTypes);
Symbol intermediateSymbol = context.getSymbolAllocator().newSymbol(resolvedFunction.getSignature().getName(), intermediateType);
checkState(originalAggregation.getOrderingScheme().isEmpty(), "Aggregate with ORDER BY does not support partial aggregation");
intermediateAggregation.put(intermediateSymbol, new AggregationNode.Aggregation(resolvedFunction, originalAggregation.getArguments(), originalAggregation.isDistinct(), originalAggregation.getFilter(), originalAggregation.getOrderingScheme(), originalAggregation.getMask()));
// rewrite final aggregation in terms of intermediate function
finalAggregation.put(entry.getKey(), new AggregationNode.Aggregation(resolvedFunction, ImmutableList.<Expression>builder().add(intermediateSymbol.toSymbolReference()).addAll(originalAggregation.getArguments().stream().filter(LambdaExpression.class::isInstance).collect(toImmutableList())).build(), false, Optional.empty(), Optional.empty(), Optional.empty()));
}
PlanNode partial = new AggregationNode(context.getIdAllocator().getNextId(), node.getSource(), intermediateAggregation, node.getGroupingSets(), // through the exchange may or may not preserve these properties. Hence, it is safest to drop preGroupedSymbols here.
ImmutableList.of(), PARTIAL, node.getHashSymbol(), node.getGroupIdSymbol());
return new AggregationNode(node.getId(), partial, finalAggregation, node.getGroupingSets(), // through the exchange may or may not preserve these properties. Hence, it is safest to drop preGroupedSymbols here.
ImmutableList.of(), FINAL, node.getHashSymbol(), node.getGroupIdSymbol());
}
use of io.trino.metadata.ResolvedFunction in project trino by trinodb.
the class PruneCountAggregationOverScalar method apply.
@Override
public Result apply(AggregationNode parent, Captures captures, Context context) {
if (!parent.hasDefaultOutput() || parent.getOutputSymbols().size() != 1) {
return Result.empty();
}
FunctionId countFunctionId = metadata.resolveFunction(context.getSession(), QualifiedName.of("count"), ImmutableList.of()).getFunctionId();
Map<Symbol, AggregationNode.Aggregation> assignments = parent.getAggregations();
for (Map.Entry<Symbol, AggregationNode.Aggregation> entry : assignments.entrySet()) {
AggregationNode.Aggregation aggregation = entry.getValue();
requireNonNull(aggregation, "aggregation is null");
ResolvedFunction resolvedFunction = aggregation.getResolvedFunction();
if (!countFunctionId.equals(resolvedFunction.getFunctionId())) {
return Result.empty();
}
}
if (!assignments.isEmpty() && isScalar(parent.getSource(), context.getLookup())) {
return Result.ofPlanNode(new ValuesNode(parent.getId(), parent.getOutputSymbols(), ImmutableList.of(new Row(ImmutableList.of(new GenericLiteral("BIGINT", "1"))))));
}
return Result.empty();
}
use of io.trino.metadata.ResolvedFunction in project trino by trinodb.
the class RewriteSpatialPartitioningAggregation method apply.
@Override
public Result apply(AggregationNode node, Captures captures, Context context) {
ResolvedFunction spatialPartitioningFunction = plannerContext.getMetadata().resolveFunction(context.getSession(), QualifiedName.of(NAME), fromTypeSignatures(GEOMETRY_TYPE_SIGNATURE, INTEGER.getTypeSignature()));
ResolvedFunction stEnvelopeFunction = plannerContext.getMetadata().resolveFunction(context.getSession(), QualifiedName.of("ST_Envelope"), fromTypeSignatures(GEOMETRY_TYPE_SIGNATURE));
ImmutableMap.Builder<Symbol, Aggregation> aggregations = ImmutableMap.builder();
Symbol partitionCountSymbol = context.getSymbolAllocator().newSymbol("partition_count", INTEGER);
ImmutableMap.Builder<Symbol, Expression> envelopeAssignments = ImmutableMap.builder();
for (Map.Entry<Symbol, Aggregation> entry : node.getAggregations().entrySet()) {
Aggregation aggregation = entry.getValue();
String name = aggregation.getResolvedFunction().getSignature().getName();
if (name.equals(NAME) && aggregation.getArguments().size() == 1) {
Expression geometry = getOnlyElement(aggregation.getArguments());
Symbol envelopeSymbol = context.getSymbolAllocator().newSymbol("envelope", plannerContext.getTypeManager().getType(GEOMETRY_TYPE_SIGNATURE));
if (isStEnvelopeFunctionCall(geometry, stEnvelopeFunction)) {
envelopeAssignments.put(envelopeSymbol, geometry);
} else {
envelopeAssignments.put(envelopeSymbol, FunctionCallBuilder.resolve(context.getSession(), plannerContext.getMetadata()).setName(QualifiedName.of("ST_Envelope")).addArgument(GEOMETRY_TYPE_SIGNATURE, geometry).build());
}
aggregations.put(entry.getKey(), new Aggregation(spatialPartitioningFunction, ImmutableList.of(envelopeSymbol.toSymbolReference(), partitionCountSymbol.toSymbolReference()), false, Optional.empty(), Optional.empty(), aggregation.getMask()));
} else {
aggregations.put(entry);
}
}
return Result.ofPlanNode(new AggregationNode(node.getId(), new ProjectNode(context.getIdAllocator().getNextId(), node.getSource(), Assignments.builder().putIdentities(node.getSource().getOutputSymbols()).put(partitionCountSymbol, new LongLiteral(Integer.toString(getHashPartitionCount(context.getSession())))).putAll(envelopeAssignments.buildOrThrow()).build()), aggregations.buildOrThrow(), node.getGroupingSets(), node.getPreGroupedSymbols(), node.getStep(), node.getHashSymbol(), node.getGroupIdSymbol()));
}
use of io.trino.metadata.ResolvedFunction in project trino by trinodb.
the class SimplifyCountOverConstant method apply.
@Override
public Result apply(AggregationNode parent, Captures captures, Context context) {
ProjectNode child = captures.get(CHILD);
boolean changed = false;
Map<Symbol, AggregationNode.Aggregation> aggregations = new LinkedHashMap<>(parent.getAggregations());
ResolvedFunction countFunction = plannerContext.getMetadata().resolveFunction(context.getSession(), QualifiedName.of("count"), ImmutableList.of());
for (Entry<Symbol, AggregationNode.Aggregation> entry : parent.getAggregations().entrySet()) {
Symbol symbol = entry.getKey();
AggregationNode.Aggregation aggregation = entry.getValue();
if (isCountOverConstant(context.getSession(), aggregation, child.getAssignments())) {
changed = true;
aggregations.put(symbol, new AggregationNode.Aggregation(countFunction, ImmutableList.of(), false, Optional.empty(), Optional.empty(), aggregation.getMask()));
}
}
if (!changed) {
return Result.empty();
}
return Result.ofPlanNode(new AggregationNode(parent.getId(), child, aggregations, parent.getGroupingSets(), ImmutableList.of(), parent.getStep(), parent.getHashSymbol(), parent.getGroupIdSymbol()));
}
Aggregations