use of org.apache.flink.table.expressions.ResolvedExpression in project flink by apache.
the class OperationTreeBuilder method windowAggregate.
public QueryOperation windowAggregate(List<Expression> groupingExpressions, GroupWindow window, List<Expression> windowProperties, List<Expression> aggregates, QueryOperation child) {
ExpressionResolver resolver = getAggResolver(child, groupingExpressions);
ResolvedGroupWindow resolvedWindow = aggregateOperationFactory.createResolvedWindow(window, resolver);
ExpressionResolver resolverWithWindowReferences = getResolverBuilder(child).withLocalReferences(localRef(resolvedWindow.getAlias(), resolvedWindow.getTimeAttribute().getOutputDataType())).build();
List<ResolvedExpression> convertedGroupings = resolverWithWindowReferences.resolve(groupingExpressions);
List<ResolvedExpression> convertedAggregates = resolverWithWindowReferences.resolve(aggregates);
List<ResolvedExpression> convertedProperties = resolverWithWindowReferences.resolve(windowProperties);
return aggregateOperationFactory.createWindowAggregate(convertedGroupings, convertedAggregates, convertedProperties, resolvedWindow, child);
}
use of org.apache.flink.table.expressions.ResolvedExpression in project flink by apache.
the class DeclarativeExpressionResolver method defaultMethod.
@Override
protected ResolvedExpression defaultMethod(Expression expression) {
if (expression instanceof UnresolvedReferenceExpression) {
UnresolvedReferenceExpression expr = (UnresolvedReferenceExpression) expression;
String name = expr.getName();
int localIndex = ArrayUtils.indexOf(function.aggBufferAttributes(), expr);
if (localIndex == -1) {
// case, the input is aggregate buffers which sent by local aggregate.
if (isMerge) {
return toMergeInputExpr(name, ArrayUtils.indexOf(function.mergeOperands(), expr));
} else {
return toAccInputExpr(name, ArrayUtils.indexOf(function.operands(), expr));
}
} else {
return toAggBufferExpr(name, localIndex);
}
} else if (expression instanceof UnresolvedCallExpression) {
UnresolvedCallExpression unresolvedCall = (UnresolvedCallExpression) expression;
return resolver.resolve(ApiExpressionUtils.unresolvedCall(unresolvedCall.getFunctionDefinition(), unresolvedCall.getChildren().stream().map(c -> c.accept(DeclarativeExpressionResolver.this)).collect(Collectors.toList())));
} else if (expression instanceof ResolvedExpression) {
return (ResolvedExpression) expression;
} else {
return resolver.resolve(expression);
}
}
use of org.apache.flink.table.expressions.ResolvedExpression in project flink by apache.
the class HiveTableUtilTest method testMakePartitionFilter.
@Test
public void testMakePartitionFilter() {
List<String> partColNames = Arrays.asList("p1", "p2", "p3");
ResolvedExpression p1Ref = new FieldReferenceExpression("p1", DataTypes.INT(), 0, 2);
ResolvedExpression p2Ref = new FieldReferenceExpression("p2", DataTypes.STRING(), 0, 3);
ResolvedExpression p3Ref = new FieldReferenceExpression("p3", DataTypes.DOUBLE(), 0, 4);
ResolvedExpression p1Exp = CallExpression.permanent(BuiltInFunctionDefinitions.EQUALS, Arrays.asList(p1Ref, valueLiteral(1)), DataTypes.BOOLEAN());
ResolvedExpression p2Exp = CallExpression.permanent(BuiltInFunctionDefinitions.EQUALS, Arrays.asList(p2Ref, valueLiteral("a", DataTypes.STRING().notNull())), DataTypes.BOOLEAN());
ResolvedExpression p3Exp = CallExpression.permanent(BuiltInFunctionDefinitions.EQUALS, Arrays.asList(p3Ref, valueLiteral(1.1)), DataTypes.BOOLEAN());
Optional<String> filter = HiveTableUtil.makePartitionFilter(2, partColNames, Arrays.asList(p1Exp), hiveShim);
assertEquals("(p1 = 1)", filter.orElse(null));
filter = HiveTableUtil.makePartitionFilter(2, partColNames, Arrays.asList(p1Exp, p3Exp), hiveShim);
assertEquals("(p1 = 1) and (p3 = 1.1)", filter.orElse(null));
filter = HiveTableUtil.makePartitionFilter(2, partColNames, Arrays.asList(p2Exp, CallExpression.permanent(BuiltInFunctionDefinitions.OR, Arrays.asList(p1Exp, p3Exp), DataTypes.BOOLEAN())), hiveShim);
assertEquals("(p2 = 'a') and ((p1 = 1) or (p3 = 1.1))", filter.orElse(null));
}
use of org.apache.flink.table.expressions.ResolvedExpression in project flink by apache.
the class OrcFileSystemFilterTest method testApplyPredicate.
@Test
@SuppressWarnings("unchecked")
public void testApplyPredicate() {
List<ResolvedExpression> args = new ArrayList<>();
// equal
FieldReferenceExpression fieldReferenceExpression = new FieldReferenceExpression("long1", DataTypes.BIGINT(), 0, 0);
ValueLiteralExpression valueLiteralExpression = new ValueLiteralExpression(10);
args.add(fieldReferenceExpression);
args.add(valueLiteralExpression);
CallExpression equalExpression = CallExpression.permanent(BuiltInFunctionDefinitions.EQUALS, args, DataTypes.BOOLEAN());
OrcFilters.Predicate predicate1 = OrcFilters.toOrcPredicate(equalExpression);
OrcFilters.Predicate predicate2 = new OrcFilters.Equals("long1", PredicateLeaf.Type.LONG, 10);
assertTrue(predicate1.toString().equals(predicate2.toString()));
// greater than
CallExpression greaterExpression = CallExpression.permanent(BuiltInFunctionDefinitions.GREATER_THAN, args, DataTypes.BOOLEAN());
OrcFilters.Predicate predicate3 = OrcFilters.toOrcPredicate(greaterExpression);
OrcFilters.Predicate predicate4 = new OrcFilters.Not(new OrcFilters.LessThanEquals("long1", PredicateLeaf.Type.LONG, 10));
assertTrue(predicate3.toString().equals(predicate4.toString()));
// less than
CallExpression lessExpression = CallExpression.permanent(BuiltInFunctionDefinitions.LESS_THAN, args, DataTypes.BOOLEAN());
OrcFilters.Predicate predicate5 = OrcFilters.toOrcPredicate(lessExpression);
OrcFilters.Predicate predicate6 = new OrcFilters.LessThan("long1", PredicateLeaf.Type.LONG, 10);
assertTrue(predicate5.toString().equals(predicate6.toString()));
}
use of org.apache.flink.table.expressions.ResolvedExpression in project flink by apache.
the class ValuesOperationFactory method findCommonTypeAtPosition.
private DataType findCommonTypeAtPosition(List<List<ResolvedExpression>> resolvedRows, int i) {
List<LogicalType> typesAtIPosition = extractLogicalTypesAtPosition(resolvedRows, i);
LogicalType logicalType = LogicalTypeMerging.findCommonType(typesAtIPosition).orElseThrow(() -> {
Set<DataType> columnTypes = resolvedRows.stream().map(row -> row.get(i).getOutputDataType()).collect(Collectors.toCollection(LinkedHashSet::new));
return new ValidationException(String.format("Types in fromValues(...) must have a common super type. Could not find a common type" + " for all rows at column %d.\n" + "Could not find a common super type for types: %s", i, columnTypes));
});
return TypeConversions.fromLogicalToDataType(logicalType);
}
Aggregations