use of org.apache.flink.optimizer.CompilerException in project flink by apache.
the class OuterJoinNode method createRightOuterJoinDescriptors.
private List<OperatorDescriptorDual> createRightOuterJoinDescriptors(JoinHint hint) {
List<OperatorDescriptorDual> list = new ArrayList<>();
switch(hint) {
case OPTIMIZER_CHOOSES:
list.add(new SortMergeRightOuterJoinDescriptor(this.keys1, this.keys2, true));
list.add(new HashRightOuterJoinBuildFirstDescriptor(this.keys1, this.keys2, true, true));
break;
case REPARTITION_SORT_MERGE:
list.add(new SortMergeRightOuterJoinDescriptor(this.keys1, this.keys2, false));
break;
case REPARTITION_HASH_FIRST:
list.add(new HashRightOuterJoinBuildFirstDescriptor(this.keys1, this.keys2, false, true));
break;
case BROADCAST_HASH_FIRST:
list.add(new HashRightOuterJoinBuildFirstDescriptor(this.keys1, this.keys2, true, false));
break;
case REPARTITION_HASH_SECOND:
list.add(new HashRightOuterJoinBuildSecondDescriptor(this.keys1, this.keys2, false, true));
break;
case BROADCAST_HASH_SECOND:
default:
throw new CompilerException("Invalid join hint: " + hint + " for right outer join");
}
return list;
}
use of org.apache.flink.optimizer.CompilerException in project flink by apache.
the class SingleInputNode method setInput.
@Override
public void setInput(Map<Operator<?>, OptimizerNode> contractToNode, ExecutionMode defaultExchangeMode) throws CompilerException {
// see if an internal hint dictates the strategy to use
final Configuration conf = getOperator().getParameters();
final String shipStrategy = conf.getString(Optimizer.HINT_SHIP_STRATEGY, null);
final ShipStrategyType preSet;
if (shipStrategy != null) {
if (shipStrategy.equalsIgnoreCase(Optimizer.HINT_SHIP_STRATEGY_REPARTITION_HASH)) {
preSet = ShipStrategyType.PARTITION_HASH;
} else if (shipStrategy.equalsIgnoreCase(Optimizer.HINT_SHIP_STRATEGY_REPARTITION_RANGE)) {
preSet = ShipStrategyType.PARTITION_RANGE;
} else if (shipStrategy.equalsIgnoreCase(Optimizer.HINT_SHIP_STRATEGY_FORWARD)) {
preSet = ShipStrategyType.FORWARD;
} else if (shipStrategy.equalsIgnoreCase(Optimizer.HINT_SHIP_STRATEGY_REPARTITION)) {
preSet = ShipStrategyType.PARTITION_RANDOM;
} else {
throw new CompilerException("Unrecognized ship strategy hint: " + shipStrategy);
}
} else {
preSet = null;
}
// get the predecessor node
Operator<?> children = ((SingleInputOperator<?, ?, ?>) getOperator()).getInput();
OptimizerNode pred;
DagConnection conn;
if (children == null) {
throw new CompilerException("Error: Node for '" + getOperator().getName() + "' has no input.");
} else {
pred = contractToNode.get(children);
conn = new DagConnection(pred, this, defaultExchangeMode);
if (preSet != null) {
conn.setShipStrategy(preSet);
}
}
// create the connection and add it
setIncomingConnection(conn);
pred.addOutgoingConnection(conn);
}
use of org.apache.flink.optimizer.CompilerException in project flink by apache.
the class OperatorDescriptorDual method checkSameOrdering.
protected boolean checkSameOrdering(LocalProperties produced1, LocalProperties produced2, int numRelevantFields) {
Ordering prod1 = produced1.getOrdering();
Ordering prod2 = produced2.getOrdering();
if (prod1 == null || prod2 == null) {
throw new CompilerException("The given properties do not meet this operators requirements.");
}
// check that order of fields is equivalent
if (!checkEquivalentFieldPositionsInKeyFields(prod1.getInvolvedIndexes(), prod2.getInvolvedIndexes(), numRelevantFields)) {
return false;
}
// check that both inputs have the same directions of order
for (int i = 0; i < numRelevantFields; i++) {
if (prod1.getOrder(i) != prod2.getOrder(i)) {
return false;
}
}
return true;
}
use of org.apache.flink.optimizer.CompilerException in project flink by apache.
the class BulkIterationPlanNode method mergeBranchPlanMaps.
private void mergeBranchPlanMaps() {
for (OptimizerNode.UnclosedBranchDescriptor desc : template.getOpenBranches()) {
OptimizerNode brancher = desc.getBranchingNode();
if (branchPlan == null) {
branchPlan = new HashMap<OptimizerNode, PlanNode>(6);
}
if (!branchPlan.containsKey(brancher)) {
PlanNode selectedCandidate = null;
if (rootOfStepFunction.branchPlan != null) {
selectedCandidate = rootOfStepFunction.branchPlan.get(brancher);
}
if (selectedCandidate == null) {
throw new CompilerException("Candidates for a node with open branches are missing information about the selected candidate ");
}
this.branchPlan.put(brancher, selectedCandidate);
}
}
}
use of org.apache.flink.optimizer.CompilerException in project flink by apache.
the class CoGroupRawDescriptor method areCoFulfilled.
@Override
public boolean areCoFulfilled(RequestedLocalProperties requested1, RequestedLocalProperties requested2, LocalProperties produced1, LocalProperties produced2) {
int numRelevantFields = this.keys1.size();
Ordering prod1 = produced1.getOrdering();
Ordering prod2 = produced2.getOrdering();
if (prod1 == null || prod2 == null || prod1.getNumberOfFields() < numRelevantFields || prod2.getNumberOfFields() < numRelevantFields) {
throw new CompilerException("The given properties do not meet this operators requirements.");
}
for (int i = 0; i < numRelevantFields; i++) {
if (prod1.getOrder(i) != prod2.getOrder(i)) {
return false;
}
}
return true;
}
Aggregations