use of org.apache.flink.table.planner.plan.nodes.logical.FlinkLogicalMatch in project flink by apache.
the class RelTimeIndicatorConverter method visitMatch.
private RelNode visitMatch(FlinkLogicalMatch match) {
RelNode newInput = match.getInput().accept(this);
RexTimeIndicatorMaterializer materializer = new RexTimeIndicatorMaterializer(newInput);
Function<Map<String, RexNode>, Map<String, RexNode>> materializeExprs = rexNodesMap -> rexNodesMap.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().accept(materializer), (e1, e2) -> e1, LinkedHashMap::new));
// update input expressions
Map<String, RexNode> newPatternDefs = materializeExprs.apply(match.getPatternDefinitions());
Map<String, RexNode> newMeasures = materializeExprs.apply(match.getMeasures());
RexNode newInterval = null;
if (match.getInterval() != null) {
newInterval = match.getInterval().accept(materializer);
}
Predicate<String> isNoLongerTimeIndicator = fieldName -> {
RexNode newMeasure = newMeasures.get(fieldName);
if (newMeasure == null) {
return false;
} else {
return !isTimeIndicatorType(newMeasure.getType());
}
};
// materialize all output types
RelDataType newOutputType = getRowTypeWithoutTimeIndicator(match.getRowType(), isNoLongerTimeIndicator);
return new FlinkLogicalMatch(match.getCluster(), match.getTraitSet(), newInput, newOutputType, match.getPattern(), match.isStrictStart(), match.isStrictEnd(), newPatternDefs, newMeasures, match.getAfter(), match.getSubsets(), match.isAllRows(), match.getPartitionKeys(), match.getOrderKeys(), newInterval);
}
Aggregations