Search in sources :

Example 1 with IPartitioningRequirementsCoordinator

use of org.apache.hyracks.algebricks.core.algebra.properties.IPartitioningRequirementsCoordinator in project asterixdb by apache.

the class EnforceStructuralPropertiesRule method physOptimizeOp.

private boolean physOptimizeOp(Mutable<ILogicalOperator> opRef, IPhysicalPropertiesVector required, boolean nestedPlan, IOptimizationContext context) throws AlgebricksException {
    boolean changed = false;
    AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
    optimizeUsingConstraintsAndEquivClasses(op);
    PhysicalRequirements pr = op.getRequiredPhysicalPropertiesForChildren(required, context);
    IPhysicalPropertiesVector[] reqdProperties = null;
    if (pr != null) {
        reqdProperties = pr.getRequiredProperties();
    }
    boolean opIsRedundantSort = false;
    // compute properties and figure out the domain
    INodeDomain childrenDomain = null;
    {
        int j = 0;
        for (Mutable<ILogicalOperator> childRef : op.getInputs()) {
            AbstractLogicalOperator child = (AbstractLogicalOperator) childRef.getValue();
            // recursive call
            if (physOptimizeOp(childRef, reqdProperties[j], nestedPlan, context)) {
                changed = true;
            }
            child.computeDeliveredPhysicalProperties(context);
            IPhysicalPropertiesVector delivered = child.getDeliveredPhysicalProperties();
            if (childrenDomain == null) {
                childrenDomain = delivered.getPartitioningProperty().getNodeDomain();
            } else {
                INodeDomain dom2 = delivered.getPartitioningProperty().getNodeDomain();
                if (!childrenDomain.sameAs(dom2)) {
                    childrenDomain = context.getComputationNodeDomain();
                }
            }
            j++;
        }
    }
    if (reqdProperties != null) {
        for (int k = 0; k < reqdProperties.length; k++) {
            IPhysicalPropertiesVector pv = reqdProperties[k];
            IPartitioningProperty pp = pv.getPartitioningProperty();
            if (pp != null && pp.getNodeDomain() == null) {
                pp.setNodeDomain(childrenDomain);
            }
        }
    }
    // The child index of the child operator to optimize first.
    int startChildIndex = getStartChildIndex(op, pr, nestedPlan, context);
    IPartitioningProperty firstDeliveredPartitioning = null;
    // Enforce data properties in a top-down manner.
    for (int j = 0; j < op.getInputs().size(); j++) {
        // Starts from a partitioning-compatible child if any to loop over all children.
        int childIndex = (j + startChildIndex) % op.getInputs().size();
        IPhysicalPropertiesVector requiredProperty = reqdProperties[childIndex];
        AbstractLogicalOperator child = (AbstractLogicalOperator) op.getInputs().get(childIndex).getValue();
        IPhysicalPropertiesVector delivered = child.getDeliveredPhysicalProperties();
        AlgebricksConfig.ALGEBRICKS_LOGGER.finest(">>>> Properties delivered by " + child.getPhysicalOperator() + ": " + delivered + "\n");
        IPartitioningRequirementsCoordinator prc = pr.getPartitioningCoordinator();
        // Coordinates requirements by looking at the firstDeliveredPartitioning.
        Pair<Boolean, IPartitioningProperty> pbpp = prc.coordinateRequirements(requiredProperty.getPartitioningProperty(), firstDeliveredPartitioning, op, context);
        boolean mayExpandPartitioningProperties = pbpp.first;
        IPhysicalPropertiesVector rqd = new StructuralPropertiesVector(pbpp.second, requiredProperty.getLocalProperties());
        AlgebricksConfig.ALGEBRICKS_LOGGER.finest(">>>> Required properties for " + child.getPhysicalOperator() + ": " + rqd + "\n");
        // The partitioning property of reqdProperties[childIndex] could be updated here because
        // rqd.getPartitioningProperty() is the same object instance as requiredProperty.getPartitioningProperty().
        IPhysicalPropertiesVector diff = delivered.getUnsatisfiedPropertiesFrom(rqd, mayExpandPartitioningProperties, context.getEquivalenceClassMap(child), context.getFDList(child));
        if (isRedundantSort(opRef, delivered, diff, context)) {
            opIsRedundantSort = true;
        }
        if (diff != null) {
            changed = true;
            addEnforcers(op, childIndex, diff, rqd, delivered, childrenDomain, nestedPlan, context);
            AbstractLogicalOperator newChild = (AbstractLogicalOperator) op.getInputs().get(childIndex).getValue();
            if (newChild != child) {
                delivered = newChild.getDeliveredPhysicalProperties();
                IPhysicalPropertiesVector newDiff = newPropertiesDiff(newChild, rqd, mayExpandPartitioningProperties, context);
                AlgebricksConfig.ALGEBRICKS_LOGGER.finest(">>>> New properties diff: " + newDiff + "\n");
                if (isRedundantSort(opRef, delivered, newDiff, context)) {
                    opIsRedundantSort = true;
                    break;
                }
            }
        }
        if (firstDeliveredPartitioning == null) {
            firstDeliveredPartitioning = delivered.getPartitioningProperty();
        }
    }
    if (op.hasNestedPlans()) {
        AbstractOperatorWithNestedPlans nested = (AbstractOperatorWithNestedPlans) op;
        for (ILogicalPlan p : nested.getNestedPlans()) {
            if (physOptimizePlan(p, required, true, context)) {
                changed = true;
            }
        }
    }
    if (opIsRedundantSort) {
        if (AlgebricksConfig.DEBUG) {
            AlgebricksConfig.ALGEBRICKS_LOGGER.fine(">>>> Removing redundant SORT operator " + op.getPhysicalOperator() + "\n");
            printOp(op);
        }
        changed = true;
        AbstractLogicalOperator nextOp = (AbstractLogicalOperator) op.getInputs().get(0).getValue();
        if (nextOp.getOperatorTag() == LogicalOperatorTag.PROJECT) {
            nextOp = (AbstractLogicalOperator) nextOp.getInputs().get(0).getValue();
        }
        opRef.setValue(nextOp);
        // Now, transfer annotations from the original sort op. to this one.
        AbstractLogicalOperator transferTo = nextOp;
        if (transferTo.getOperatorTag() == LogicalOperatorTag.EXCHANGE) {
            // remove duplicate exchange operator
            transferTo = (AbstractLogicalOperator) transferTo.getInputs().get(0).getValue();
        }
        transferTo.getAnnotations().putAll(op.getAnnotations());
        physOptimizeOp(opRef, required, nestedPlan, context);
    }
    return changed;
}
Also used : StructuralPropertiesVector(org.apache.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector) AbstractLogicalOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator) INodeDomain(org.apache.hyracks.algebricks.core.algebra.properties.INodeDomain) IPartitioningRequirementsCoordinator(org.apache.hyracks.algebricks.core.algebra.properties.IPartitioningRequirementsCoordinator) IPhysicalPropertiesVector(org.apache.hyracks.algebricks.core.algebra.properties.IPhysicalPropertiesVector) PhysicalRequirements(org.apache.hyracks.algebricks.core.algebra.properties.PhysicalRequirements) IPartitioningProperty(org.apache.hyracks.algebricks.core.algebra.properties.IPartitioningProperty) Mutable(org.apache.commons.lang3.mutable.Mutable) ILogicalPlan(org.apache.hyracks.algebricks.core.algebra.base.ILogicalPlan) AbstractOperatorWithNestedPlans(org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractOperatorWithNestedPlans)

Example 2 with IPartitioningRequirementsCoordinator

use of org.apache.hyracks.algebricks.core.algebra.properties.IPartitioningRequirementsCoordinator in project asterixdb by apache.

the class AbstractHashJoinPOperator method getRequiredPropertiesForChildren.

@Override
public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op, IPhysicalPropertiesVector reqdByParent, IOptimizationContext context) {
    // In a cost-based optimizer, we would also try to propagate the
    // parent's partitioning requirements.
    IPartitioningProperty pp1;
    IPartitioningProperty pp2;
    switch(partitioningType) {
        case PAIRWISE:
            pp1 = new UnorderedPartitionedProperty(new ListSet<>(keysLeftBranch), context.getComputationNodeDomain());
            pp2 = new UnorderedPartitionedProperty(new ListSet<>(keysRightBranch), context.getComputationNodeDomain());
            break;
        case BROADCAST:
            pp1 = new RandomPartitioningProperty(context.getComputationNodeDomain());
            pp2 = new BroadcastPartitioningProperty(context.getComputationNodeDomain());
            break;
        default:
            throw new IllegalStateException();
    }
    StructuralPropertiesVector[] pv = new StructuralPropertiesVector[2];
    pv[0] = OperatorPropertiesUtil.checkUnpartitionedAndGetPropertiesVector(op, new StructuralPropertiesVector(pp1, null));
    pv[1] = OperatorPropertiesUtil.checkUnpartitionedAndGetPropertiesVector(op, new StructuralPropertiesVector(pp2, null));
    IPartitioningRequirementsCoordinator prc;
    switch(kind) {
        case INNER:
            {
                prc = IPartitioningRequirementsCoordinator.EQCLASS_PARTITIONING_COORDINATOR;
                break;
            }
        case LEFT_OUTER:
            {
                prc = new IPartitioningRequirementsCoordinator() {

                    @Override
                    public Pair<Boolean, IPartitioningProperty> coordinateRequirements(IPartitioningProperty requirements, IPartitioningProperty firstDeliveredPartitioning, ILogicalOperator op, IOptimizationContext context) throws AlgebricksException {
                        if (firstDeliveredPartitioning != null && requirements != null && firstDeliveredPartitioning.getPartitioningType() == requirements.getPartitioningType()) {
                            switch(requirements.getPartitioningType()) {
                                case UNORDERED_PARTITIONED:
                                    {
                                        UnorderedPartitionedProperty upp1 = (UnorderedPartitionedProperty) firstDeliveredPartitioning;
                                        Set<LogicalVariable> set1 = upp1.getColumnSet();
                                        UnorderedPartitionedProperty uppreq = (UnorderedPartitionedProperty) requirements;
                                        Set<LogicalVariable> modifuppreq = new ListSet<LogicalVariable>();
                                        Map<LogicalVariable, EquivalenceClass> eqmap = context.getEquivalenceClassMap(op);
                                        Set<LogicalVariable> covered = new ListSet<LogicalVariable>();
                                        Set<LogicalVariable> keysCurrent = uppreq.getColumnSet();
                                        List<LogicalVariable> keysFirst = (keysRightBranch.containsAll(keysCurrent)) ? keysRightBranch : keysLeftBranch;
                                        List<LogicalVariable> keysSecond = keysFirst == keysRightBranch ? keysLeftBranch : keysRightBranch;
                                        for (LogicalVariable r : uppreq.getColumnSet()) {
                                            EquivalenceClass ecSnd = eqmap.get(r);
                                            boolean found = false;
                                            int j = 0;
                                            for (LogicalVariable rvar : keysFirst) {
                                                if (rvar == r || ecSnd != null && eqmap.get(rvar) == ecSnd) {
                                                    found = true;
                                                    break;
                                                }
                                                j++;
                                            }
                                            if (!found) {
                                                throw new IllegalStateException("Did not find a variable equivalent to " + r + " among " + keysFirst);
                                            }
                                            LogicalVariable v2 = keysSecond.get(j);
                                            EquivalenceClass ecFst = eqmap.get(v2);
                                            for (LogicalVariable vset1 : set1) {
                                                if (vset1 == v2 || ecFst != null && eqmap.get(vset1) == ecFst) {
                                                    covered.add(vset1);
                                                    modifuppreq.add(r);
                                                    break;
                                                }
                                            }
                                            if (covered.equals(set1)) {
                                                break;
                                            }
                                        }
                                        if (!covered.equals(set1)) {
                                            throw new AlgebricksException("Could not modify " + requirements + " to agree with partitioning property " + firstDeliveredPartitioning + " delivered by previous input operator.");
                                        }
                                        UnorderedPartitionedProperty upp2 = new UnorderedPartitionedProperty(modifuppreq, requirements.getNodeDomain());
                                        return new Pair<>(false, upp2);
                                    }
                                case ORDERED_PARTITIONED:
                                    {
                                        throw new NotImplementedException();
                                    }
                            }
                        }
                        return new Pair<>(true, requirements);
                    }
                };
                break;
            }
        default:
            {
                throw new IllegalStateException();
            }
    }
    return new PhysicalRequirements(pv, prc);
}
Also used : StructuralPropertiesVector(org.apache.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector) LogicalVariable(org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable) UnorderedPartitionedProperty(org.apache.hyracks.algebricks.core.algebra.properties.UnorderedPartitionedProperty) BroadcastPartitioningProperty(org.apache.hyracks.algebricks.core.algebra.properties.BroadcastPartitioningProperty) Set(java.util.Set) ListSet(org.apache.hyracks.algebricks.common.utils.ListSet) IOptimizationContext(org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext) ILogicalOperator(org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator) NotImplementedException(org.apache.hyracks.algebricks.common.exceptions.NotImplementedException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) IPartitioningRequirementsCoordinator(org.apache.hyracks.algebricks.core.algebra.properties.IPartitioningRequirementsCoordinator) IPartitioningProperty(org.apache.hyracks.algebricks.core.algebra.properties.IPartitioningProperty) PhysicalRequirements(org.apache.hyracks.algebricks.core.algebra.properties.PhysicalRequirements) ListSet(org.apache.hyracks.algebricks.common.utils.ListSet) List(java.util.List) RandomPartitioningProperty(org.apache.hyracks.algebricks.core.algebra.properties.RandomPartitioningProperty) Map(java.util.Map) EquivalenceClass(org.apache.hyracks.algebricks.core.algebra.base.EquivalenceClass) Pair(org.apache.hyracks.algebricks.common.utils.Pair)

Aggregations

IPartitioningProperty (org.apache.hyracks.algebricks.core.algebra.properties.IPartitioningProperty)2 IPartitioningRequirementsCoordinator (org.apache.hyracks.algebricks.core.algebra.properties.IPartitioningRequirementsCoordinator)2 PhysicalRequirements (org.apache.hyracks.algebricks.core.algebra.properties.PhysicalRequirements)2 StructuralPropertiesVector (org.apache.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector)2 List (java.util.List)1 Map (java.util.Map)1 Set (java.util.Set)1 Mutable (org.apache.commons.lang3.mutable.Mutable)1 AlgebricksException (org.apache.hyracks.algebricks.common.exceptions.AlgebricksException)1 NotImplementedException (org.apache.hyracks.algebricks.common.exceptions.NotImplementedException)1 ListSet (org.apache.hyracks.algebricks.common.utils.ListSet)1 Pair (org.apache.hyracks.algebricks.common.utils.Pair)1 EquivalenceClass (org.apache.hyracks.algebricks.core.algebra.base.EquivalenceClass)1 ILogicalOperator (org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator)1 ILogicalPlan (org.apache.hyracks.algebricks.core.algebra.base.ILogicalPlan)1 IOptimizationContext (org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext)1 LogicalVariable (org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable)1 AbstractLogicalOperator (org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator)1 AbstractOperatorWithNestedPlans (org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractOperatorWithNestedPlans)1 BroadcastPartitioningProperty (org.apache.hyracks.algebricks.core.algebra.properties.BroadcastPartitioningProperty)1