use of org.apache.flink.optimizer.operators.OperatorDescriptorSingle in project flink by apache.
the class SingleInputNode method getAlternativePlans.
@Override
public List<PlanNode> getAlternativePlans(CostEstimator estimator) {
// check if we have a cached version
if (this.cachedPlans != null) {
return this.cachedPlans;
}
boolean childrenSkippedDueToReplicatedInput = false;
// calculate alternative sub-plans for predecessor
final List<? extends PlanNode> subPlans = getPredecessorNode().getAlternativePlans(estimator);
final Set<RequestedGlobalProperties> intGlobal = this.inConn.getInterestingProperties().getGlobalProperties();
// calculate alternative sub-plans for broadcast inputs
final List<Set<? extends NamedChannel>> broadcastPlanChannels = new ArrayList<Set<? extends NamedChannel>>();
List<DagConnection> broadcastConnections = getBroadcastConnections();
List<String> broadcastConnectionNames = getBroadcastConnectionNames();
for (int i = 0; i < broadcastConnections.size(); i++) {
DagConnection broadcastConnection = broadcastConnections.get(i);
String broadcastConnectionName = broadcastConnectionNames.get(i);
List<PlanNode> broadcastPlanCandidates = broadcastConnection.getSource().getAlternativePlans(estimator);
// wrap the plan candidates in named channels
HashSet<NamedChannel> broadcastChannels = new HashSet<NamedChannel>(broadcastPlanCandidates.size());
for (PlanNode plan : broadcastPlanCandidates) {
NamedChannel c = new NamedChannel(broadcastConnectionName, plan);
DataExchangeMode exMode = DataExchangeMode.select(broadcastConnection.getDataExchangeMode(), ShipStrategyType.BROADCAST, broadcastConnection.isBreakingPipeline());
c.setShipStrategy(ShipStrategyType.BROADCAST, exMode);
broadcastChannels.add(c);
}
broadcastPlanChannels.add(broadcastChannels);
}
final RequestedGlobalProperties[] allValidGlobals;
{
Set<RequestedGlobalProperties> pairs = new HashSet<RequestedGlobalProperties>();
for (OperatorDescriptorSingle ods : getPossibleProperties()) {
pairs.addAll(ods.getPossibleGlobalProperties());
}
allValidGlobals = pairs.toArray(new RequestedGlobalProperties[pairs.size()]);
}
final ArrayList<PlanNode> outputPlans = new ArrayList<PlanNode>();
final ExecutionMode executionMode = this.inConn.getDataExchangeMode();
final int parallelism = getParallelism();
final int inParallelism = getPredecessorNode().getParallelism();
final boolean parallelismChange = inParallelism != parallelism;
final boolean breaksPipeline = this.inConn.isBreakingPipeline();
// create all candidates
for (PlanNode child : subPlans) {
if (child.getGlobalProperties().isFullyReplicated()) {
// fully replicated input is always locally forwarded if the parallelism is not changed
if (parallelismChange) {
// can not continue with this child
childrenSkippedDueToReplicatedInput = true;
continue;
} else {
this.inConn.setShipStrategy(ShipStrategyType.FORWARD);
}
}
if (this.inConn.getShipStrategy() == null) {
// pick the strategy ourselves
for (RequestedGlobalProperties igps : intGlobal) {
final Channel c = new Channel(child, this.inConn.getMaterializationMode());
igps.parameterizeChannel(c, parallelismChange, executionMode, breaksPipeline);
// ship strategy preserves/establishes them even under changing parallelisms
if (parallelismChange && !c.getShipStrategy().isNetworkStrategy()) {
c.getGlobalProperties().reset();
}
// requested properties
for (RequestedGlobalProperties rgps : allValidGlobals) {
if (rgps.isMetBy(c.getGlobalProperties())) {
c.setRequiredGlobalProps(rgps);
addLocalCandidates(c, broadcastPlanChannels, igps, outputPlans, estimator);
break;
}
}
}
} else {
// hint fixed the strategy
final Channel c = new Channel(child, this.inConn.getMaterializationMode());
final ShipStrategyType shipStrategy = this.inConn.getShipStrategy();
final DataExchangeMode exMode = DataExchangeMode.select(executionMode, shipStrategy, breaksPipeline);
if (this.keys != null) {
c.setShipStrategy(shipStrategy, this.keys.toFieldList(), exMode);
} else {
c.setShipStrategy(shipStrategy, exMode);
}
if (parallelismChange) {
c.adjustGlobalPropertiesForFullParallelismChange();
}
// check whether we meet any of the accepted properties
for (RequestedGlobalProperties rgps : allValidGlobals) {
if (rgps.isMetBy(c.getGlobalProperties())) {
addLocalCandidates(c, broadcastPlanChannels, rgps, outputPlans, estimator);
break;
}
}
}
}
if (outputPlans.isEmpty()) {
if (childrenSkippedDueToReplicatedInput) {
throw new CompilerException("No plan meeting the requirements could be created @ " + this + ". Most likely reason: Invalid use of replicated input.");
} else {
throw new CompilerException("No plan meeting the requirements could be created @ " + this + ". Most likely reason: Too restrictive plan hints.");
}
}
// cost and prune the plans
for (PlanNode node : outputPlans) {
estimator.costOperator(node);
}
prunePlanAlternatives(outputPlans);
outputPlans.trimToSize();
this.cachedPlans = outputPlans;
return outputPlans;
}
use of org.apache.flink.optimizer.operators.OperatorDescriptorSingle in project flink by apache.
the class SingleInputNode method computeInterestingPropertiesForInputs.
@Override
public void computeInterestingPropertiesForInputs(CostEstimator estimator) {
// get what we inherit and what is preserved by our user code
final InterestingProperties props = getInterestingProperties().filterByCodeAnnotations(this, 0);
// add all properties relevant to this node
for (OperatorDescriptorSingle dps : getPossibleProperties()) {
for (RequestedGlobalProperties gp : dps.getPossibleGlobalProperties()) {
if (gp.getPartitioning().isPartitionedOnKey()) {
for (RequestedGlobalProperties contained : props.getGlobalProperties()) {
if (contained.getPartitioning() == gp.getPartitioning() && gp.getPartitionedFields().isValidSubset(contained.getPartitionedFields())) {
props.getGlobalProperties().remove(contained);
break;
}
}
}
props.addGlobalProperties(gp);
}
for (RequestedLocalProperties lp : dps.getPossibleLocalProperties()) {
props.addLocalProperties(lp);
}
}
this.inConn.setInterestingProperties(props);
for (DagConnection conn : getBroadcastConnections()) {
conn.setInterestingProperties(new InterestingProperties());
}
}
use of org.apache.flink.optimizer.operators.OperatorDescriptorSingle in project flink by apache.
the class GroupCombineNode method initPossibleProperties.
private List<OperatorDescriptorSingle> initPossibleProperties() {
// check if we can work with a grouping (simple reducer), or if we need ordering because of a group order
Ordering groupOrder = getOperator().getGroupOrder();
if (groupOrder != null && groupOrder.getNumberOfFields() == 0) {
groupOrder = null;
}
OperatorDescriptorSingle props = (this.keys == null ? new AllGroupCombineProperties() : new GroupCombineProperties(this.keys, groupOrder));
return Collections.singletonList(props);
}
use of org.apache.flink.optimizer.operators.OperatorDescriptorSingle in project flink by apache.
the class GroupReduceNode method initPossibleProperties.
private List<OperatorDescriptorSingle> initPossibleProperties(Partitioner<?> customPartitioner) {
// see if an internal hint dictates the strategy to use
final Configuration conf = getOperator().getParameters();
final String localStrategy = conf.getString(Optimizer.HINT_LOCAL_STRATEGY, null);
final boolean useCombiner;
if (localStrategy != null) {
if (Optimizer.HINT_LOCAL_STRATEGY_SORT.equals(localStrategy)) {
useCombiner = false;
} else if (Optimizer.HINT_LOCAL_STRATEGY_COMBINING_SORT.equals(localStrategy)) {
if (!isCombineable()) {
Optimizer.LOG.warn("Strategy hint for GroupReduce '" + getOperator().getName() + "' requires combinable reduce, but user function is not marked combinable.");
}
useCombiner = true;
} else {
throw new CompilerException("Invalid local strategy hint for match contract: " + localStrategy);
}
} else {
useCombiner = isCombineable();
}
// check if we can work with a grouping (simple reducer), or if we need ordering because of a group order
Ordering groupOrder = null;
if (getOperator() instanceof GroupReduceOperatorBase) {
groupOrder = getOperator().getGroupOrder();
if (groupOrder != null && groupOrder.getNumberOfFields() == 0) {
groupOrder = null;
}
}
OperatorDescriptorSingle props = useCombiner ? (this.keys == null ? new AllGroupWithPartialPreGroupProperties() : new GroupReduceWithCombineProperties(this.keys, groupOrder, customPartitioner)) : (this.keys == null ? new AllGroupReduceProperties() : new GroupReduceProperties(this.keys, groupOrder, customPartitioner));
return Collections.singletonList(props);
}
use of org.apache.flink.optimizer.operators.OperatorDescriptorSingle in project flink by apache.
the class SingleInputNode method addLocalCandidates.
protected void addLocalCandidates(Channel template, List<Set<? extends NamedChannel>> broadcastPlanChannels, RequestedGlobalProperties rgps, List<PlanNode> target, CostEstimator estimator) {
for (RequestedLocalProperties ilp : this.inConn.getInterestingProperties().getLocalProperties()) {
final Channel in = template.clone();
ilp.parameterizeChannel(in);
// instantiate a candidate, if the instantiated local properties meet one possible local property set
outer: for (OperatorDescriptorSingle dps : getPossibleProperties()) {
for (RequestedLocalProperties ilps : dps.getPossibleLocalProperties()) {
if (ilps.isMetBy(in.getLocalProperties())) {
in.setRequiredLocalProps(ilps);
instantiateCandidate(dps, in, broadcastPlanChannels, target, estimator, rgps, ilp);
break outer;
}
}
}
}
}
Aggregations