use of org.apache.flink.optimizer.dataproperties.GlobalProperties in project flink by apache.
the class FeedbackPropertiesMatchTest method testTwoOperatorsBothDependent.
@Test
public void testTwoOperatorsBothDependent() {
try {
SourcePlanNode target = new SourcePlanNode(getSourceNode(), "Partial Solution");
Channel toMap1 = new Channel(target);
toMap1.setShipStrategy(ShipStrategyType.FORWARD, DataExchangeMode.PIPELINED);
toMap1.setLocalStrategy(LocalStrategy.NONE);
SingleInputPlanNode map1 = new SingleInputPlanNode(getMapNode(), "Mapper 1", toMap1, DriverStrategy.MAP);
Channel toMap2 = new Channel(target);
toMap2.setShipStrategy(ShipStrategyType.FORWARD, DataExchangeMode.PIPELINED);
toMap2.setLocalStrategy(LocalStrategy.NONE);
SingleInputPlanNode map2 = new SingleInputPlanNode(getMapNode(), "Mapper 2", toMap2, DriverStrategy.MAP);
Channel toJoin1 = new Channel(map1);
toJoin1.setShipStrategy(ShipStrategyType.FORWARD, DataExchangeMode.PIPELINED);
toJoin1.setLocalStrategy(LocalStrategy.NONE);
Channel toJoin2 = new Channel(map2);
toJoin2.setShipStrategy(ShipStrategyType.FORWARD, DataExchangeMode.PIPELINED);
toJoin2.setLocalStrategy(LocalStrategy.NONE);
DualInputPlanNode join = new DualInputPlanNode(getJoinNode(), "Join", toJoin1, toJoin2, DriverStrategy.HYBRIDHASH_BUILD_FIRST);
Channel toAfterJoin = new Channel(join);
toAfterJoin.setShipStrategy(ShipStrategyType.FORWARD, DataExchangeMode.PIPELINED);
toAfterJoin.setLocalStrategy(LocalStrategy.NONE);
SingleInputPlanNode afterJoin = new SingleInputPlanNode(getMapNode(), "After Join Mapper", toAfterJoin, DriverStrategy.MAP);
// no properties from the partial solution, no required properties
{
GlobalProperties gp = new GlobalProperties();
LocalProperties lp = LocalProperties.EMPTY;
FeedbackPropertiesMeetRequirementsReport report = afterJoin.checkPartialSolutionPropertiesMet(target, gp, lp);
assertTrue(report != null && report != NO_PARTIAL_SOLUTION && report != NOT_MET);
}
// some properties from the partial solution, no required properties
{
GlobalProperties gp = new GlobalProperties();
gp.setHashPartitioned(new FieldList(0));
LocalProperties lp = LocalProperties.forGrouping(new FieldList(2, 1));
FeedbackPropertiesMeetRequirementsReport report = afterJoin.checkPartialSolutionPropertiesMet(target, gp, lp);
assertTrue(report != null && report != NO_PARTIAL_SOLUTION && report != NOT_MET);
}
// test requirements on one input and met
{
GlobalProperties gp = new GlobalProperties();
gp.setHashPartitioned(new FieldList(0));
LocalProperties lp = LocalProperties.forGrouping(new FieldList(2, 1));
RequestedGlobalProperties rgp = new RequestedGlobalProperties();
rgp.setHashPartitioned(new FieldList(0));
RequestedLocalProperties rlp = new RequestedLocalProperties();
rlp.setGroupedFields(new FieldList(2, 1));
toJoin1.setRequiredGlobalProps(rgp);
toJoin1.setRequiredLocalProps(rlp);
FeedbackPropertiesMeetRequirementsReport report = afterJoin.checkPartialSolutionPropertiesMet(target, gp, lp);
assertTrue(report != null && report != NO_PARTIAL_SOLUTION && report != NOT_MET);
}
// test requirements on both input and met
{
GlobalProperties gp = new GlobalProperties();
gp.setHashPartitioned(new FieldList(0));
LocalProperties lp = LocalProperties.forGrouping(new FieldList(2, 1));
RequestedGlobalProperties rgp = new RequestedGlobalProperties();
rgp.setHashPartitioned(new FieldList(0));
RequestedLocalProperties rlp = new RequestedLocalProperties();
rlp.setGroupedFields(new FieldList(2, 1));
toJoin1.setRequiredGlobalProps(rgp);
toJoin1.setRequiredLocalProps(rlp);
toJoin2.setRequiredGlobalProps(rgp);
toJoin2.setRequiredLocalProps(rlp);
FeedbackPropertiesMeetRequirementsReport report = afterJoin.checkPartialSolutionPropertiesMet(target, gp, lp);
assertTrue(report != null && report != NO_PARTIAL_SOLUTION && report != NOT_MET);
}
// test requirements on both inputs, one not met
{
GlobalProperties gp = new GlobalProperties();
gp.setHashPartitioned(new FieldList(0));
LocalProperties lp = LocalProperties.forGrouping(new FieldList(2, 1));
RequestedGlobalProperties rgp1 = new RequestedGlobalProperties();
rgp1.setHashPartitioned(new FieldList(0));
RequestedLocalProperties rlp1 = new RequestedLocalProperties();
rlp1.setGroupedFields(new FieldList(2, 1));
RequestedGlobalProperties rgp2 = new RequestedGlobalProperties();
rgp2.setHashPartitioned(new FieldList(1));
RequestedLocalProperties rlp2 = new RequestedLocalProperties();
rlp2.setGroupedFields(new FieldList(0, 3));
toJoin1.setRequiredGlobalProps(rgp1);
toJoin1.setRequiredLocalProps(rlp1);
toJoin2.setRequiredGlobalProps(rgp2);
toJoin2.setRequiredLocalProps(rlp2);
FeedbackPropertiesMeetRequirementsReport report = afterJoin.checkPartialSolutionPropertiesMet(target, gp, lp);
assertEquals(NOT_MET, report);
}
// test override on both inputs, later requirement ignored
{
GlobalProperties gp = new GlobalProperties();
gp.setHashPartitioned(new FieldList(0));
LocalProperties lp = LocalProperties.forGrouping(new FieldList(2, 1));
RequestedGlobalProperties rgp = new RequestedGlobalProperties();
rgp.setHashPartitioned(new FieldList(1));
RequestedLocalProperties rlp = new RequestedLocalProperties();
rlp.setGroupedFields(new FieldList(0, 3));
toJoin1.setRequiredGlobalProps(null);
toJoin1.setRequiredLocalProps(null);
toJoin2.setRequiredGlobalProps(null);
toJoin2.setRequiredLocalProps(null);
toJoin1.setShipStrategy(ShipStrategyType.PARTITION_HASH, new FieldList(88), DataExchangeMode.PIPELINED);
toJoin2.setShipStrategy(ShipStrategyType.BROADCAST, DataExchangeMode.PIPELINED);
toAfterJoin.setRequiredGlobalProps(rgp);
toAfterJoin.setRequiredLocalProps(rlp);
FeedbackPropertiesMeetRequirementsReport report = afterJoin.checkPartialSolutionPropertiesMet(target, gp, lp);
assertEquals(MET, report);
}
// test override on one inputs, later requirement met
{
GlobalProperties gp = new GlobalProperties();
gp.setHashPartitioned(new FieldList(0));
LocalProperties lp = LocalProperties.forGrouping(new FieldList(2, 1));
RequestedGlobalProperties rgp = new RequestedGlobalProperties();
rgp.setHashPartitioned(new FieldList(0));
RequestedLocalProperties rlp = new RequestedLocalProperties();
rlp.setGroupedFields(new FieldList(2, 1));
toJoin1.setShipStrategy(ShipStrategyType.PARTITION_HASH, new FieldList(88), DataExchangeMode.PIPELINED);
toJoin2.setShipStrategy(ShipStrategyType.FORWARD, DataExchangeMode.PIPELINED);
toAfterJoin.setRequiredGlobalProps(rgp);
toAfterJoin.setRequiredLocalProps(rlp);
FeedbackPropertiesMeetRequirementsReport report = afterJoin.checkPartialSolutionPropertiesMet(target, gp, lp);
assertEquals(PENDING, report);
}
// test override on one input, later requirement not met
{
GlobalProperties gp = new GlobalProperties();
gp.setHashPartitioned(new FieldList(0));
LocalProperties lp = LocalProperties.forGrouping(new FieldList(2, 1));
RequestedGlobalProperties rgp = new RequestedGlobalProperties();
rgp.setHashPartitioned(new FieldList(3));
RequestedLocalProperties rlp = new RequestedLocalProperties();
rlp.setGroupedFields(new FieldList(77, 69));
toJoin1.setShipStrategy(ShipStrategyType.PARTITION_HASH, new FieldList(88), DataExchangeMode.PIPELINED);
toJoin2.setShipStrategy(ShipStrategyType.FORWARD, DataExchangeMode.PIPELINED);
toAfterJoin.setRequiredGlobalProps(rgp);
toAfterJoin.setRequiredLocalProps(rlp);
FeedbackPropertiesMeetRequirementsReport report = afterJoin.checkPartialSolutionPropertiesMet(target, gp, lp);
assertEquals(NOT_MET, report);
}
// test override on one input locally, later global requirement not met
{
GlobalProperties gp = new GlobalProperties();
gp.setHashPartitioned(new FieldList(0));
LocalProperties lp = LocalProperties.forGrouping(new FieldList(2, 1));
RequestedGlobalProperties rgp = new RequestedGlobalProperties();
rgp.setHashPartitioned(new FieldList(3));
toJoin1.setShipStrategy(ShipStrategyType.FORWARD, DataExchangeMode.PIPELINED);
toJoin1.setLocalStrategy(LocalStrategy.SORT, new FieldList(3), new boolean[] { false });
toJoin2.setShipStrategy(ShipStrategyType.FORWARD, DataExchangeMode.PIPELINED);
toJoin1.setLocalStrategy(LocalStrategy.NONE);
toAfterJoin.setRequiredGlobalProps(rgp);
toAfterJoin.setRequiredLocalProps(null);
FeedbackPropertiesMeetRequirementsReport report = afterJoin.checkPartialSolutionPropertiesMet(target, gp, lp);
assertEquals(NOT_MET, report);
}
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
use of org.apache.flink.optimizer.dataproperties.GlobalProperties in project flink by apache.
the class JsonMapper method getOptimizerPropertiesJson.
public static String getOptimizerPropertiesJson(JsonFactory jsonFactory, PlanNode node) {
try {
final StringWriter writer = new StringWriter(256);
final JsonGenerator gen = jsonFactory.createGenerator(writer);
final OptimizerNode optNode = node.getOptimizerNode();
gen.writeStartObject();
// global properties
if (node.getGlobalProperties() != null) {
GlobalProperties gp = node.getGlobalProperties();
gen.writeArrayFieldStart("global_properties");
addProperty(gen, "Partitioning", gp.getPartitioning().name());
if (gp.getPartitioningFields() != null) {
addProperty(gen, "Partitioned on", gp.getPartitioningFields().toString());
}
if (gp.getPartitioningOrdering() != null) {
addProperty(gen, "Partitioning Order", gp.getPartitioningOrdering().toString());
} else {
addProperty(gen, "Partitioning Order", "(none)");
}
if (optNode.getUniqueFields() == null || optNode.getUniqueFields().size() == 0) {
addProperty(gen, "Uniqueness", "not unique");
} else {
addProperty(gen, "Uniqueness", optNode.getUniqueFields().toString());
}
gen.writeEndArray();
}
// local properties
if (node.getLocalProperties() != null) {
LocalProperties lp = node.getLocalProperties();
gen.writeArrayFieldStart("local_properties");
if (lp.getOrdering() != null) {
addProperty(gen, "Order", lp.getOrdering().toString());
} else {
addProperty(gen, "Order", "(none)");
}
if (lp.getGroupedFields() != null && lp.getGroupedFields().size() > 0) {
addProperty(gen, "Grouped on", lp.getGroupedFields().toString());
} else {
addProperty(gen, "Grouping", "not grouped");
}
if (optNode.getUniqueFields() == null || optNode.getUniqueFields().size() == 0) {
addProperty(gen, "Uniqueness", "not unique");
} else {
addProperty(gen, "Uniqueness", optNode.getUniqueFields().toString());
}
gen.writeEndArray();
}
// output size estimates
{
gen.writeArrayFieldStart("estimates");
addProperty(gen, "Est. Output Size", optNode.getEstimatedOutputSize() == -1 ? "(unknown)" : formatNumber(optNode.getEstimatedOutputSize(), "B"));
addProperty(gen, "Est. Cardinality", optNode.getEstimatedNumRecords() == -1 ? "(unknown)" : formatNumber(optNode.getEstimatedNumRecords()));
gen.writeEndArray();
}
// output node cost
if (node.getNodeCosts() != null) {
gen.writeArrayFieldStart("costs");
addProperty(gen, "Network", node.getNodeCosts().getNetworkCost() == -1 ? "(unknown)" : formatNumber(node.getNodeCosts().getNetworkCost(), "B"));
addProperty(gen, "Disk I/O", node.getNodeCosts().getDiskCost() == -1 ? "(unknown)" : formatNumber(node.getNodeCosts().getDiskCost(), "B"));
addProperty(gen, "CPU", node.getNodeCosts().getCpuCost() == -1 ? "(unknown)" : formatNumber(node.getNodeCosts().getCpuCost(), ""));
addProperty(gen, "Cumulative Network", node.getCumulativeCosts().getNetworkCost() == -1 ? "(unknown)" : formatNumber(node.getCumulativeCosts().getNetworkCost(), "B"));
addProperty(gen, "Cumulative Disk I/O", node.getCumulativeCosts().getDiskCost() == -1 ? "(unknown)" : formatNumber(node.getCumulativeCosts().getDiskCost(), "B"));
addProperty(gen, "Cumulative CPU", node.getCumulativeCosts().getCpuCost() == -1 ? "(unknown)" : formatNumber(node.getCumulativeCosts().getCpuCost(), ""));
gen.writeEndArray();
}
// compiler hints
if (optNode.getOperator().getCompilerHints() != null) {
CompilerHints hints = optNode.getOperator().getCompilerHints();
CompilerHints defaults = new CompilerHints();
String size = hints.getOutputSize() == defaults.getOutputSize() ? "(none)" : String.valueOf(hints.getOutputSize());
String card = hints.getOutputCardinality() == defaults.getOutputCardinality() ? "(none)" : String.valueOf(hints.getOutputCardinality());
String width = hints.getAvgOutputRecordSize() == defaults.getAvgOutputRecordSize() ? "(none)" : String.valueOf(hints.getAvgOutputRecordSize());
String filter = hints.getFilterFactor() == defaults.getFilterFactor() ? "(none)" : String.valueOf(hints.getFilterFactor());
gen.writeArrayFieldStart("compiler_hints");
addProperty(gen, "Output Size (bytes)", size);
addProperty(gen, "Output Cardinality", card);
addProperty(gen, "Avg. Output Record Size (bytes)", width);
addProperty(gen, "Filter Factor", filter);
gen.writeEndArray();
}
gen.writeEndObject();
gen.close();
return writer.toString();
} catch (Exception e) {
return "{}";
}
}
use of org.apache.flink.optimizer.dataproperties.GlobalProperties in project flink by apache.
the class CoGroupRawDescriptor method computeGlobalProperties.
@Override
public GlobalProperties computeGlobalProperties(GlobalProperties in1, GlobalProperties in2) {
GlobalProperties gp = GlobalProperties.combine(in1, in2);
if (gp.getUniqueFieldCombination() != null && gp.getUniqueFieldCombination().size() > 0 && gp.getPartitioning() == PartitioningProperty.RANDOM_PARTITIONED) {
gp.setAnyPartitioning(gp.getUniqueFieldCombination().iterator().next().toFieldList());
}
gp.clearUniqueFieldCombinations();
return gp;
}
use of org.apache.flink.optimizer.dataproperties.GlobalProperties in project flink by apache.
the class WorksetIterationNode method instantiate.
@SuppressWarnings("unchecked")
@Override
protected void instantiate(OperatorDescriptorDual operator, Channel solutionSetIn, Channel worksetIn, List<Set<? extends NamedChannel>> broadcastPlanChannels, List<PlanNode> target, CostEstimator estimator, RequestedGlobalProperties globPropsReqSolutionSet, RequestedGlobalProperties globPropsReqWorkset, RequestedLocalProperties locPropsReqSolutionSet, RequestedLocalProperties locPropsReqWorkset) {
// check for pipeline breaking using hash join with build on the solution set side
placePipelineBreakersIfNecessary(DriverStrategy.HYBRIDHASH_BUILD_FIRST, solutionSetIn, worksetIn);
// NOTES ON THE ENUMERATION OF THE STEP FUNCTION PLANS:
// Whenever we instantiate the iteration, we enumerate new candidates for the step function.
// That way, we make sure we have an appropriate plan for each candidate for the initial
// partial solution,
// we have a fitting candidate for the step function (often, work is pushed out of the step
// function).
// Among the candidates of the step function, we keep only those that meet the requested
// properties of the
// current candidate initial partial solution. That makes sure these properties exist at the
// beginning of
// every iteration.
// 1) Because we enumerate multiple times, we may need to clean the cached plans
// before starting another enumeration
this.nextWorkset.accept(PlanCacheCleaner.INSTANCE);
this.solutionSetDelta.accept(PlanCacheCleaner.INSTANCE);
// 2) Give the partial solution the properties of the current candidate for the initial
// partial solution
// This concerns currently only the workset.
this.worksetNode.setCandidateProperties(worksetIn.getGlobalProperties(), worksetIn.getLocalProperties(), worksetIn);
this.solutionSetNode.setCandidateProperties(this.partitionedProperties, new LocalProperties(), solutionSetIn);
final SolutionSetPlanNode sspn = this.solutionSetNode.getCurrentSolutionSetPlanNode();
final WorksetPlanNode wspn = this.worksetNode.getCurrentWorksetPlanNode();
// 3) Get the alternative plans
List<PlanNode> solutionSetDeltaCandidates = this.solutionSetDelta.getAlternativePlans(estimator);
List<PlanNode> worksetCandidates = this.nextWorkset.getAlternativePlans(estimator);
// 4) Throw away all that are not compatible with the properties currently requested to the
// initial partial solution
// Make sure that the workset candidates fulfill the input requirements
{
List<PlanNode> newCandidates = new ArrayList<PlanNode>();
for (Iterator<PlanNode> planDeleter = worksetCandidates.iterator(); planDeleter.hasNext(); ) {
PlanNode candidate = planDeleter.next();
GlobalProperties atEndGlobal = candidate.getGlobalProperties();
LocalProperties atEndLocal = candidate.getLocalProperties();
FeedbackPropertiesMeetRequirementsReport report = candidate.checkPartialSolutionPropertiesMet(wspn, atEndGlobal, atEndLocal);
if (report == FeedbackPropertiesMeetRequirementsReport.NO_PARTIAL_SOLUTION) {
// depends only through broadcast variable on the workset solution
} else if (report == FeedbackPropertiesMeetRequirementsReport.NOT_MET) {
// attach a no-op node through which we create the properties of the original
// input
Channel toNoOp = new Channel(candidate);
globPropsReqWorkset.parameterizeChannel(toNoOp, false, nextWorksetRootConnection.getDataExchangeMode(), false);
locPropsReqWorkset.parameterizeChannel(toNoOp);
NoOpUnaryUdfOp noOpUnaryUdfOp = new NoOpUnaryUdfOp<>();
noOpUnaryUdfOp.setInput(candidate.getProgramOperator());
UnaryOperatorNode rebuildWorksetPropertiesNode = new UnaryOperatorNode("Rebuild Workset Properties", noOpUnaryUdfOp, true);
rebuildWorksetPropertiesNode.setParallelism(candidate.getParallelism());
SingleInputPlanNode rebuildWorksetPropertiesPlanNode = new SingleInputPlanNode(rebuildWorksetPropertiesNode, "Rebuild Workset Properties", toNoOp, DriverStrategy.UNARY_NO_OP);
rebuildWorksetPropertiesPlanNode.initProperties(toNoOp.getGlobalProperties(), toNoOp.getLocalProperties());
estimator.costOperator(rebuildWorksetPropertiesPlanNode);
GlobalProperties atEndGlobalModified = rebuildWorksetPropertiesPlanNode.getGlobalProperties();
LocalProperties atEndLocalModified = rebuildWorksetPropertiesPlanNode.getLocalProperties();
if (!(atEndGlobalModified.equals(atEndGlobal) && atEndLocalModified.equals(atEndLocal))) {
FeedbackPropertiesMeetRequirementsReport report2 = candidate.checkPartialSolutionPropertiesMet(wspn, atEndGlobalModified, atEndLocalModified);
if (report2 != FeedbackPropertiesMeetRequirementsReport.NOT_MET) {
newCandidates.add(rebuildWorksetPropertiesPlanNode);
}
}
// remove the original operator and add the modified candidate
planDeleter.remove();
}
}
worksetCandidates.addAll(newCandidates);
}
if (worksetCandidates.isEmpty()) {
return;
}
// sanity check the solution set delta
for (PlanNode solutionSetDeltaCandidate : solutionSetDeltaCandidates) {
SingleInputPlanNode candidate = (SingleInputPlanNode) solutionSetDeltaCandidate;
GlobalProperties gp = candidate.getGlobalProperties();
if (gp.getPartitioning() != PartitioningProperty.HASH_PARTITIONED || gp.getPartitioningFields() == null || !gp.getPartitioningFields().equals(this.solutionSetKeyFields)) {
throw new CompilerException("Bug: The solution set delta is not partitioned.");
}
}
// 5) Create a candidate for the Iteration Node for every remaining plan of the step
// function.
final GlobalProperties gp = new GlobalProperties();
gp.setHashPartitioned(this.solutionSetKeyFields);
gp.addUniqueFieldCombination(this.solutionSetKeyFields);
LocalProperties lp = LocalProperties.EMPTY.addUniqueFields(this.solutionSetKeyFields);
// take all combinations of solution set delta and workset plans
for (PlanNode worksetCandidate : worksetCandidates) {
for (PlanNode solutionSetCandidate : solutionSetDeltaCandidates) {
// check whether they have the same operator at their latest branching point
if (this.singleRoot.areBranchCompatible(solutionSetCandidate, worksetCandidate)) {
SingleInputPlanNode siSolutionDeltaCandidate = (SingleInputPlanNode) solutionSetCandidate;
boolean immediateDeltaUpdate;
// can update on the fly
if (siSolutionDeltaCandidate.getInput().getShipStrategy() == ShipStrategyType.FORWARD && this.solutionDeltaImmediatelyAfterSolutionJoin) {
// sanity check the node and connection
if (siSolutionDeltaCandidate.getDriverStrategy() != DriverStrategy.UNARY_NO_OP || siSolutionDeltaCandidate.getInput().getLocalStrategy() != LocalStrategy.NONE) {
throw new CompilerException("Invalid Solution set delta node.");
}
solutionSetCandidate = siSolutionDeltaCandidate.getInput().getSource();
immediateDeltaUpdate = true;
} else {
// was not partitioned, we need to keep this node.
// mark that we materialize the input
siSolutionDeltaCandidate.getInput().setTempMode(TempMode.PIPELINE_BREAKER);
immediateDeltaUpdate = false;
}
WorksetIterationPlanNode wsNode = new WorksetIterationPlanNode(this, this.getOperator().getName(), solutionSetIn, worksetIn, sspn, wspn, worksetCandidate, solutionSetCandidate);
wsNode.setImmediateSolutionSetUpdate(immediateDeltaUpdate);
wsNode.initProperties(gp, lp);
target.add(wsNode);
}
}
}
}
use of org.apache.flink.optimizer.dataproperties.GlobalProperties in project flink by apache.
the class SingleInputNode method instantiateCandidate.
protected void instantiateCandidate(OperatorDescriptorSingle dps, Channel in, List<Set<? extends NamedChannel>> broadcastPlanChannels, List<PlanNode> target, CostEstimator estimator, RequestedGlobalProperties globPropsReq, RequestedLocalProperties locPropsReq) {
final PlanNode inputSource = in.getSource();
for (List<NamedChannel> broadcastChannelsCombination : Sets.cartesianProduct(broadcastPlanChannels)) {
boolean validCombination = true;
boolean requiresPipelinebreaker = false;
// check whether the broadcast inputs use the same plan candidate at the branching point
for (int i = 0; i < broadcastChannelsCombination.size(); i++) {
NamedChannel nc = broadcastChannelsCombination.get(i);
PlanNode bcSource = nc.getSource();
// check branch compatibility against input
if (!areBranchCompatible(bcSource, inputSource)) {
validCombination = false;
break;
}
// check branch compatibility against all other broadcast variables
for (int k = 0; k < i; k++) {
PlanNode otherBcSource = broadcastChannelsCombination.get(k).getSource();
if (!areBranchCompatible(bcSource, otherBcSource)) {
validCombination = false;
break;
}
}
// all common predecessors
if (in.isOnDynamicPath() && this.hereJoinedBranches != null) {
for (OptimizerNode brancher : this.hereJoinedBranches) {
PlanNode candAtBrancher = in.getSource().getCandidateAtBranchPoint(brancher);
if (candAtBrancher == null) {
// closed branch between two broadcast variables
continue;
}
SourceAndDamReport res = in.getSource().hasDamOnPathDownTo(candAtBrancher);
if (res == NOT_FOUND) {
throw new CompilerException("Bug: Tracing dams for deadlock detection is broken.");
} else if (res == FOUND_SOURCE) {
requiresPipelinebreaker = true;
break;
} else if (res == FOUND_SOURCE_AND_DAM) {
// good
} else {
throw new CompilerException();
}
}
}
}
if (!validCombination) {
continue;
}
if (requiresPipelinebreaker) {
in.setTempMode(in.getTempMode().makePipelineBreaker());
}
final SingleInputPlanNode node = dps.instantiate(in, this);
node.setBroadcastInputs(broadcastChannelsCombination);
// compute how the strategy affects the properties
GlobalProperties gProps = in.getGlobalProperties().clone();
LocalProperties lProps = in.getLocalProperties().clone();
gProps = dps.computeGlobalProperties(gProps);
lProps = dps.computeLocalProperties(lProps);
// filter by the user code field copies
gProps = gProps.filterBySemanticProperties(getSemanticPropertiesForGlobalPropertyFiltering(), 0);
lProps = lProps.filterBySemanticProperties(getSemanticPropertiesForLocalPropertyFiltering(), 0);
// apply
node.initProperties(gProps, lProps);
node.updatePropertiesWithUniqueSets(getUniqueFields());
target.add(node);
}
}
Aggregations