use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rel.core.Project in project flink by apache.
the class HiveParserDMLHelper method replaceDistForStaticParts.
private RelNode replaceDistForStaticParts(LogicalDistribution hiveDist, Table destTable, Map<String, String> staticPartSpec, Map<String, RelDataType> targetColToType) {
Project project = (Project) hiveDist.getInput();
RelNode expandedProject = replaceProjectForStaticPart(project, staticPartSpec, destTable, targetColToType);
hiveDist.replaceInput(0, null);
final int toShift = staticPartSpec.size();
final int numDynmPart = destTable.getTTable().getPartitionKeys().size() - toShift;
return LogicalDistribution.create(expandedProject, shiftRelCollation(hiveDist.getCollation(), project, toShift, numDynmPart), shiftDistKeys(hiveDist.getDistKeys(), project, toShift, numDynmPart));
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rel.core.Project in project flink by apache.
the class HiveParserDMLHelper method addTypeConversions.
static RelNode addTypeConversions(RexBuilder rexBuilder, RelNode queryRelNode, List<RelDataType> targetCalcTypes, List<TypeInfo> targetHiveTypes, SqlFunctionConverter funcConverter) throws SemanticException {
if (queryRelNode instanceof Project) {
return replaceProjectForTypeConversion(rexBuilder, (Project) queryRelNode, targetCalcTypes, targetHiveTypes, funcConverter);
} else {
// current node is not Project, we search for it in inputs
RelNode newInput = addTypeConversions(rexBuilder, queryRelNode.getInput(0), targetCalcTypes, targetHiveTypes, funcConverter);
queryRelNode.replaceInput(0, newInput);
return queryRelNode;
}
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rel.core.Project in project beam by apache.
the class ArrayScanToJoinConverter method convert.
/**
* Returns a LogicJoin.
*/
@Override
public RelNode convert(ResolvedArrayScan zetaNode, List<RelNode> inputs) {
List<RexNode> projects = new ArrayList<>();
RelNode leftInput = inputs.get(0);
ResolvedColumnRef columnRef = (ResolvedColumnRef) zetaNode.getArrayExpr();
CorrelationId correlationId = getCluster().createCorrel();
getCluster().getQuery().mapCorrel(correlationId.getName(), leftInput);
String columnName = String.format("%s%s", zetaNode.getElementColumn().getTableName(), zetaNode.getElementColumn().getName());
projects.add(getCluster().getRexBuilder().makeFieldAccess(getCluster().getRexBuilder().makeCorrel(leftInput.getRowType(), correlationId), getExpressionConverter().indexOfProjectionColumnRef(columnRef.getColumn().getId(), zetaNode.getInputScan().getColumnList())));
RelNode projectNode = LogicalProject.create(createOneRow(getCluster()), ImmutableList.of(), projects, ImmutableList.of(columnName));
// Create an UnCollect
boolean ordinality = (zetaNode.getArrayOffsetColumn() != null);
// If they aren't true we need the Project to reorder columns.
assert zetaNode.getElementColumn().getId() == 1;
assert !ordinality || zetaNode.getArrayOffsetColumn().getColumn().getId() == 2;
ZetaSqlUnnest uncollectNode = ZetaSqlUnnest.create(projectNode.getTraitSet(), projectNode, ordinality);
List<RexInputRef> rightProjects = new ArrayList<>();
List<String> rightNames = new ArrayList<>();
rightProjects.add(getCluster().getRexBuilder().makeInputRef(uncollectNode, 0));
rightNames.add(columnName);
if (ordinality) {
rightProjects.add(getCluster().getRexBuilder().makeInputRef(uncollectNode, 1));
rightNames.add(String.format(zetaNode.getArrayOffsetColumn().getColumn().getTableName(), zetaNode.getArrayOffsetColumn().getColumn().getName()));
}
RelNode rightInput = LogicalProject.create(uncollectNode, ImmutableList.of(), rightProjects, rightNames);
// Join condition should be a RexNode converted from join_expr.
RexNode condition = getExpressionConverter().convertRexNodeFromResolvedExpr(zetaNode.getJoinExpr());
JoinRelType joinRelType = zetaNode.getIsOuter() ? JoinRelType.LEFT : JoinRelType.INNER;
return LogicalJoin.create(leftInput, rightInput, ImmutableList.of(), condition, ImmutableSet.of(), joinRelType, false, ImmutableList.of());
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rel.core.Project in project beam by apache.
the class ArrayScanLiteralToUncollectConverter method convert.
@Override
public RelNode convert(ResolvedArrayScan zetaNode, List<RelNode> inputs) {
RexNode arrayLiteralExpression = getExpressionConverter().convertRexNodeFromResolvedExpr(zetaNode.getArrayExpr());
String fieldName = String.format("%s%s", zetaNode.getElementColumn().getTableName(), zetaNode.getElementColumn().getName());
RelNode projectNode = LogicalProject.create(createOneRow(getCluster()), ImmutableList.of(), Collections.singletonList(arrayLiteralExpression), ImmutableList.of(fieldName));
boolean ordinality = (zetaNode.getArrayOffsetColumn() != null);
// If they aren't true we need to add a Project to reorder columns.
assert zetaNode.getElementColumn().getId() == 1;
assert !ordinality || zetaNode.getArrayOffsetColumn().getColumn().getId() == 2;
return ZetaSqlUnnest.create(projectNode.getTraitSet(), projectNode, ordinality);
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rel.core.Project in project druid by druid-io.
the class DruidRelsTest method test_isScanOrMapping_mapping.
@Test
public void test_isScanOrMapping_mapping() {
final Project project = mockMappingProject(ImmutableList.of(1, 0), 2);
final DruidRel<?> rel = mockDruidRel(DruidQueryRel.class, PartialDruidQuery.Stage.SELECT_PROJECT, null, project, null);
Assert.assertTrue(DruidRels.isScanOrMapping(rel, true));
Assert.assertTrue(DruidRels.isScanOrMapping(rel, false));
EasyMock.verify(rel, rel.getPartialDruidQuery(), project);
}
Aggregations