Search in sources :

Example 1 with PhysicalRequirements

use of org.apache.hyracks.algebricks.core.algebra.properties.PhysicalRequirements in project asterixdb by apache.

the class EnforceStructuralPropertiesRule method physOptimizeOp.

private boolean physOptimizeOp(Mutable<ILogicalOperator> opRef, IPhysicalPropertiesVector required, boolean nestedPlan, IOptimizationContext context) throws AlgebricksException {
    boolean changed = false;
    AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
    optimizeUsingConstraintsAndEquivClasses(op);
    PhysicalRequirements pr = op.getRequiredPhysicalPropertiesForChildren(required, context);
    IPhysicalPropertiesVector[] reqdProperties = null;
    if (pr != null) {
        reqdProperties = pr.getRequiredProperties();
    }
    boolean opIsRedundantSort = false;
    // compute properties and figure out the domain
    INodeDomain childrenDomain = null;
    {
        int j = 0;
        for (Mutable<ILogicalOperator> childRef : op.getInputs()) {
            AbstractLogicalOperator child = (AbstractLogicalOperator) childRef.getValue();
            // recursive call
            if (physOptimizeOp(childRef, reqdProperties[j], nestedPlan, context)) {
                changed = true;
            }
            child.computeDeliveredPhysicalProperties(context);
            IPhysicalPropertiesVector delivered = child.getDeliveredPhysicalProperties();
            if (childrenDomain == null) {
                childrenDomain = delivered.getPartitioningProperty().getNodeDomain();
            } else {
                INodeDomain dom2 = delivered.getPartitioningProperty().getNodeDomain();
                if (!childrenDomain.sameAs(dom2)) {
                    childrenDomain = context.getComputationNodeDomain();
                }
            }
            j++;
        }
    }
    if (reqdProperties != null) {
        for (int k = 0; k < reqdProperties.length; k++) {
            IPhysicalPropertiesVector pv = reqdProperties[k];
            IPartitioningProperty pp = pv.getPartitioningProperty();
            if (pp != null && pp.getNodeDomain() == null) {
                pp.setNodeDomain(childrenDomain);
            }
        }
    }
    // The child index of the child operator to optimize first.
    int startChildIndex = getStartChildIndex(op, pr, nestedPlan, context);
    IPartitioningProperty firstDeliveredPartitioning = null;
    // Enforce data properties in a top-down manner.
    for (int j = 0; j < op.getInputs().size(); j++) {
        // Starts from a partitioning-compatible child if any to loop over all children.
        int childIndex = (j + startChildIndex) % op.getInputs().size();
        IPhysicalPropertiesVector requiredProperty = reqdProperties[childIndex];
        AbstractLogicalOperator child = (AbstractLogicalOperator) op.getInputs().get(childIndex).getValue();
        IPhysicalPropertiesVector delivered = child.getDeliveredPhysicalProperties();
        AlgebricksConfig.ALGEBRICKS_LOGGER.finest(">>>> Properties delivered by " + child.getPhysicalOperator() + ": " + delivered + "\n");
        IPartitioningRequirementsCoordinator prc = pr.getPartitioningCoordinator();
        // Coordinates requirements by looking at the firstDeliveredPartitioning.
        Pair<Boolean, IPartitioningProperty> pbpp = prc.coordinateRequirements(requiredProperty.getPartitioningProperty(), firstDeliveredPartitioning, op, context);
        boolean mayExpandPartitioningProperties = pbpp.first;
        IPhysicalPropertiesVector rqd = new StructuralPropertiesVector(pbpp.second, requiredProperty.getLocalProperties());
        AlgebricksConfig.ALGEBRICKS_LOGGER.finest(">>>> Required properties for " + child.getPhysicalOperator() + ": " + rqd + "\n");
        // The partitioning property of reqdProperties[childIndex] could be updated here because
        // rqd.getPartitioningProperty() is the same object instance as requiredProperty.getPartitioningProperty().
        IPhysicalPropertiesVector diff = delivered.getUnsatisfiedPropertiesFrom(rqd, mayExpandPartitioningProperties, context.getEquivalenceClassMap(child), context.getFDList(child));
        if (isRedundantSort(opRef, delivered, diff, context)) {
            opIsRedundantSort = true;
        }
        if (diff != null) {
            changed = true;
            addEnforcers(op, childIndex, diff, rqd, delivered, childrenDomain, nestedPlan, context);
            AbstractLogicalOperator newChild = (AbstractLogicalOperator) op.getInputs().get(childIndex).getValue();
            if (newChild != child) {
                delivered = newChild.getDeliveredPhysicalProperties();
                IPhysicalPropertiesVector newDiff = newPropertiesDiff(newChild, rqd, mayExpandPartitioningProperties, context);
                AlgebricksConfig.ALGEBRICKS_LOGGER.finest(">>>> New properties diff: " + newDiff + "\n");
                if (isRedundantSort(opRef, delivered, newDiff, context)) {
                    opIsRedundantSort = true;
                    break;
                }
            }
        }
        if (firstDeliveredPartitioning == null) {
            firstDeliveredPartitioning = delivered.getPartitioningProperty();
        }
    }
    if (op.hasNestedPlans()) {
        AbstractOperatorWithNestedPlans nested = (AbstractOperatorWithNestedPlans) op;
        for (ILogicalPlan p : nested.getNestedPlans()) {
            if (physOptimizePlan(p, required, true, context)) {
                changed = true;
            }
        }
    }
    if (opIsRedundantSort) {
        if (AlgebricksConfig.DEBUG) {
            AlgebricksConfig.ALGEBRICKS_LOGGER.fine(">>>> Removing redundant SORT operator " + op.getPhysicalOperator() + "\n");
            printOp(op);
        }
        changed = true;
        AbstractLogicalOperator nextOp = (AbstractLogicalOperator) op.getInputs().get(0).getValue();
        if (nextOp.getOperatorTag() == LogicalOperatorTag.PROJECT) {
            nextOp = (AbstractLogicalOperator) nextOp.getInputs().get(0).getValue();
        }
        opRef.setValue(nextOp);
        // Now, transfer annotations from the original sort op. to this one.
        AbstractLogicalOperator transferTo = nextOp;
        if (transferTo.getOperatorTag() == LogicalOperatorTag.EXCHANGE) {
            // remove duplicate exchange operator
            transferTo = (AbstractLogicalOperator) transferTo.getInputs().get(0).getValue();
        }
        transferTo.getAnnotations().putAll(op.getAnnotations());
        physOptimizeOp(opRef, required, nestedPlan, context);
    }
    return changed;
}
Also used : StructuralPropertiesVector(org.apache.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector) AbstractLogicalOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator) INodeDomain(org.apache.hyracks.algebricks.core.algebra.properties.INodeDomain) IPartitioningRequirementsCoordinator(org.apache.hyracks.algebricks.core.algebra.properties.IPartitioningRequirementsCoordinator) IPhysicalPropertiesVector(org.apache.hyracks.algebricks.core.algebra.properties.IPhysicalPropertiesVector) PhysicalRequirements(org.apache.hyracks.algebricks.core.algebra.properties.PhysicalRequirements) IPartitioningProperty(org.apache.hyracks.algebricks.core.algebra.properties.IPartitioningProperty) Mutable(org.apache.commons.lang3.mutable.Mutable) ILogicalPlan(org.apache.hyracks.algebricks.core.algebra.base.ILogicalPlan) AbstractOperatorWithNestedPlans(org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractOperatorWithNestedPlans)

Example 2 with PhysicalRequirements

use of org.apache.hyracks.algebricks.core.algebra.properties.PhysicalRequirements in project asterixdb by apache.

the class AbstractPreclusteredGroupByPOperator method getRequiredPropertiesForChildren.

@Override
public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op, IPhysicalPropertiesVector reqdByParent, IOptimizationContext context) {
    GroupByOperator gby = (GroupByOperator) op;
    StructuralPropertiesVector[] pv = new StructuralPropertiesVector[1];
    if (gby.isGroupAll() && gby.isGlobal()) {
        if (op.getExecutionMode() == ExecutionMode.UNPARTITIONED) {
            pv[0] = new StructuralPropertiesVector(IPartitioningProperty.UNPARTITIONED, null);
            return new PhysicalRequirements(pv, IPartitioningRequirementsCoordinator.NO_COORDINATION);
        } else {
            return emptyUnaryRequirements();
        }
    }
    List<ILocalStructuralProperty> localProps = new ArrayList<>();
    Set<LogicalVariable> gbvars = new ListSet<>(columnList);
    LocalGroupingProperty groupProp = new LocalGroupingProperty(gbvars, new ArrayList<>(columnList));
    boolean goon = true;
    for (ILogicalPlan p : gby.getNestedPlans()) {
        // groupings
        for (Mutable<ILogicalOperator> r : p.getRoots()) {
            AbstractLogicalOperator op1 = (AbstractLogicalOperator) r.getValue();
            if (op1.getOperatorTag() == LogicalOperatorTag.AGGREGATE) {
                AbstractLogicalOperator op2 = (AbstractLogicalOperator) op1.getInputs().get(0).getValue();
                IPhysicalOperator pop2 = op2.getPhysicalOperator();
                if (pop2 instanceof AbstractPreclusteredGroupByPOperator) {
                    List<LogicalVariable> gbyColumns = ((AbstractPreclusteredGroupByPOperator) pop2).getGbyColumns();
                    List<LogicalVariable> sndOrder = new ArrayList<>();
                    sndOrder.addAll(gbyColumns);
                    Set<LogicalVariable> freeVars = new HashSet<>();
                    try {
                        OperatorPropertiesUtil.getFreeVariablesInSelfOrDesc(op2, freeVars);
                    } catch (AlgebricksException e) {
                        throw new IllegalStateException(e);
                    }
                    // Only considers group key variables defined out-side the outer-most group-by operator.
                    sndOrder.retainAll(freeVars);
                    groupProp.getColumnSet().addAll(sndOrder);
                    groupProp.getPreferredOrderEnforcer().addAll(sndOrder);
                    goon = false;
                    break;
                }
            }
        }
        if (!goon) {
            break;
        }
    }
    localProps.add(groupProp);
    if (reqdByParent != null) {
        // propagate parent requirements
        List<ILocalStructuralProperty> lpPar = reqdByParent.getLocalProperties();
        if (lpPar != null) {
            boolean allOk = true;
            List<ILocalStructuralProperty> props = new ArrayList<>(lpPar.size());
            for (ILocalStructuralProperty prop : lpPar) {
                if (prop.getPropertyType() != PropertyType.LOCAL_ORDER_PROPERTY) {
                    allOk = false;
                    break;
                }
                LocalOrderProperty lop = (LocalOrderProperty) prop;
                List<OrderColumn> orderColumns = new ArrayList<>();
                List<OrderColumn> ords = lop.getOrderColumns();
                for (OrderColumn ord : ords) {
                    Pair<LogicalVariable, Mutable<ILogicalExpression>> p = getGbyPairByRhsVar(gby, ord.getColumn());
                    if (p == null) {
                        p = getDecorPairByRhsVar(gby, ord.getColumn());
                        if (p == null) {
                            allOk = false;
                            break;
                        }
                    }
                    ILogicalExpression e = p.second.getValue();
                    if (e.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
                        throw new IllegalStateException("Right hand side of group-by assignment should have been normalized to a variable reference.");
                    }
                    LogicalVariable v = ((VariableReferenceExpression) e).getVariableReference();
                    orderColumns.add(new OrderColumn(v, ord.getOrder()));
                }
                props.add(new LocalOrderProperty(orderColumns));
            }
            List<FunctionalDependency> fdList = new ArrayList<>();
            for (Pair<LogicalVariable, Mutable<ILogicalExpression>> decorPair : gby.getDecorList()) {
                List<LogicalVariable> hd = gby.getGbyVarList();
                List<LogicalVariable> tl = new ArrayList<>();
                tl.add(((VariableReferenceExpression) decorPair.second.getValue()).getVariableReference());
                fdList.add(new FunctionalDependency(hd, tl));
            }
            if (allOk && PropertiesUtil.matchLocalProperties(localProps, props, new HashMap<>(), fdList)) {
                localProps = props;
            }
        }
    }
    IPartitioningProperty pp = null;
    AbstractLogicalOperator aop = (AbstractLogicalOperator) op;
    if (aop.getExecutionMode() == ExecutionMode.PARTITIONED) {
        pp = new UnorderedPartitionedProperty(new ListSet<>(columnList), context.getComputationNodeDomain());
    }
    pv[0] = new StructuralPropertiesVector(pp, localProps);
    return new PhysicalRequirements(pv, IPartitioningRequirementsCoordinator.NO_COORDINATION);
}
Also used : HashMap(java.util.HashMap) LocalGroupingProperty(org.apache.hyracks.algebricks.core.algebra.properties.LocalGroupingProperty) ArrayList(java.util.ArrayList) IPartitioningProperty(org.apache.hyracks.algebricks.core.algebra.properties.IPartitioningProperty) ListSet(org.apache.hyracks.algebricks.common.utils.ListSet) FunctionalDependency(org.apache.hyracks.algebricks.core.algebra.properties.FunctionalDependency) IPhysicalOperator(org.apache.hyracks.algebricks.core.algebra.base.IPhysicalOperator) ILocalStructuralProperty(org.apache.hyracks.algebricks.core.algebra.properties.ILocalStructuralProperty) HashSet(java.util.HashSet) StructuralPropertiesVector(org.apache.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector) LogicalVariable(org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable) UnorderedPartitionedProperty(org.apache.hyracks.algebricks.core.algebra.properties.UnorderedPartitionedProperty) GroupByOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator) AbstractLogicalOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator) ILogicalOperator(org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator) OrderColumn(org.apache.hyracks.algebricks.core.algebra.properties.OrderColumn) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) PhysicalRequirements(org.apache.hyracks.algebricks.core.algebra.properties.PhysicalRequirements) Mutable(org.apache.commons.lang3.mutable.Mutable) ILogicalExpression(org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression) VariableReferenceExpression(org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression) LocalOrderProperty(org.apache.hyracks.algebricks.core.algebra.properties.LocalOrderProperty) ILogicalPlan(org.apache.hyracks.algebricks.core.algebra.base.ILogicalPlan)

Example 3 with PhysicalRequirements

use of org.apache.hyracks.algebricks.core.algebra.properties.PhysicalRequirements in project asterixdb by apache.

the class AbstractHashJoinPOperator method getRequiredPropertiesForChildren.

@Override
public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op, IPhysicalPropertiesVector reqdByParent, IOptimizationContext context) {
    // In a cost-based optimizer, we would also try to propagate the
    // parent's partitioning requirements.
    IPartitioningProperty pp1;
    IPartitioningProperty pp2;
    switch(partitioningType) {
        case PAIRWISE:
            pp1 = new UnorderedPartitionedProperty(new ListSet<>(keysLeftBranch), context.getComputationNodeDomain());
            pp2 = new UnorderedPartitionedProperty(new ListSet<>(keysRightBranch), context.getComputationNodeDomain());
            break;
        case BROADCAST:
            pp1 = new RandomPartitioningProperty(context.getComputationNodeDomain());
            pp2 = new BroadcastPartitioningProperty(context.getComputationNodeDomain());
            break;
        default:
            throw new IllegalStateException();
    }
    StructuralPropertiesVector[] pv = new StructuralPropertiesVector[2];
    pv[0] = OperatorPropertiesUtil.checkUnpartitionedAndGetPropertiesVector(op, new StructuralPropertiesVector(pp1, null));
    pv[1] = OperatorPropertiesUtil.checkUnpartitionedAndGetPropertiesVector(op, new StructuralPropertiesVector(pp2, null));
    IPartitioningRequirementsCoordinator prc;
    switch(kind) {
        case INNER:
            {
                prc = IPartitioningRequirementsCoordinator.EQCLASS_PARTITIONING_COORDINATOR;
                break;
            }
        case LEFT_OUTER:
            {
                prc = new IPartitioningRequirementsCoordinator() {

                    @Override
                    public Pair<Boolean, IPartitioningProperty> coordinateRequirements(IPartitioningProperty requirements, IPartitioningProperty firstDeliveredPartitioning, ILogicalOperator op, IOptimizationContext context) throws AlgebricksException {
                        if (firstDeliveredPartitioning != null && requirements != null && firstDeliveredPartitioning.getPartitioningType() == requirements.getPartitioningType()) {
                            switch(requirements.getPartitioningType()) {
                                case UNORDERED_PARTITIONED:
                                    {
                                        UnorderedPartitionedProperty upp1 = (UnorderedPartitionedProperty) firstDeliveredPartitioning;
                                        Set<LogicalVariable> set1 = upp1.getColumnSet();
                                        UnorderedPartitionedProperty uppreq = (UnorderedPartitionedProperty) requirements;
                                        Set<LogicalVariable> modifuppreq = new ListSet<LogicalVariable>();
                                        Map<LogicalVariable, EquivalenceClass> eqmap = context.getEquivalenceClassMap(op);
                                        Set<LogicalVariable> covered = new ListSet<LogicalVariable>();
                                        Set<LogicalVariable> keysCurrent = uppreq.getColumnSet();
                                        List<LogicalVariable> keysFirst = (keysRightBranch.containsAll(keysCurrent)) ? keysRightBranch : keysLeftBranch;
                                        List<LogicalVariable> keysSecond = keysFirst == keysRightBranch ? keysLeftBranch : keysRightBranch;
                                        for (LogicalVariable r : uppreq.getColumnSet()) {
                                            EquivalenceClass ecSnd = eqmap.get(r);
                                            boolean found = false;
                                            int j = 0;
                                            for (LogicalVariable rvar : keysFirst) {
                                                if (rvar == r || ecSnd != null && eqmap.get(rvar) == ecSnd) {
                                                    found = true;
                                                    break;
                                                }
                                                j++;
                                            }
                                            if (!found) {
                                                throw new IllegalStateException("Did not find a variable equivalent to " + r + " among " + keysFirst);
                                            }
                                            LogicalVariable v2 = keysSecond.get(j);
                                            EquivalenceClass ecFst = eqmap.get(v2);
                                            for (LogicalVariable vset1 : set1) {
                                                if (vset1 == v2 || ecFst != null && eqmap.get(vset1) == ecFst) {
                                                    covered.add(vset1);
                                                    modifuppreq.add(r);
                                                    break;
                                                }
                                            }
                                            if (covered.equals(set1)) {
                                                break;
                                            }
                                        }
                                        if (!covered.equals(set1)) {
                                            throw new AlgebricksException("Could not modify " + requirements + " to agree with partitioning property " + firstDeliveredPartitioning + " delivered by previous input operator.");
                                        }
                                        UnorderedPartitionedProperty upp2 = new UnorderedPartitionedProperty(modifuppreq, requirements.getNodeDomain());
                                        return new Pair<>(false, upp2);
                                    }
                                case ORDERED_PARTITIONED:
                                    {
                                        throw new NotImplementedException();
                                    }
                            }
                        }
                        return new Pair<>(true, requirements);
                    }
                };
                break;
            }
        default:
            {
                throw new IllegalStateException();
            }
    }
    return new PhysicalRequirements(pv, prc);
}
Also used : StructuralPropertiesVector(org.apache.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector) LogicalVariable(org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable) UnorderedPartitionedProperty(org.apache.hyracks.algebricks.core.algebra.properties.UnorderedPartitionedProperty) BroadcastPartitioningProperty(org.apache.hyracks.algebricks.core.algebra.properties.BroadcastPartitioningProperty) Set(java.util.Set) ListSet(org.apache.hyracks.algebricks.common.utils.ListSet) IOptimizationContext(org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext) ILogicalOperator(org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator) NotImplementedException(org.apache.hyracks.algebricks.common.exceptions.NotImplementedException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) IPartitioningRequirementsCoordinator(org.apache.hyracks.algebricks.core.algebra.properties.IPartitioningRequirementsCoordinator) IPartitioningProperty(org.apache.hyracks.algebricks.core.algebra.properties.IPartitioningProperty) PhysicalRequirements(org.apache.hyracks.algebricks.core.algebra.properties.PhysicalRequirements) ListSet(org.apache.hyracks.algebricks.common.utils.ListSet) List(java.util.List) RandomPartitioningProperty(org.apache.hyracks.algebricks.core.algebra.properties.RandomPartitioningProperty) Map(java.util.Map) EquivalenceClass(org.apache.hyracks.algebricks.core.algebra.base.EquivalenceClass) Pair(org.apache.hyracks.algebricks.common.utils.Pair)

Example 4 with PhysicalRequirements

use of org.apache.hyracks.algebricks.core.algebra.properties.PhysicalRequirements in project asterixdb by apache.

the class RangePartitionMergeExchangePOperator method getRequiredPropertiesForChildren.

@Override
public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op, IPhysicalPropertiesVector reqdByParent, IOptimizationContext context) {
    List<ILocalStructuralProperty> orderProps = new LinkedList<ILocalStructuralProperty>();
    List<OrderColumn> columns = new ArrayList<OrderColumn>();
    for (OrderColumn oc : partitioningFields) {
        LogicalVariable var = oc.getColumn();
        columns.add(new OrderColumn(var, oc.getOrder()));
    }
    orderProps.add(new LocalOrderProperty(columns));
    StructuralPropertiesVector[] r = new StructuralPropertiesVector[] { new StructuralPropertiesVector(null, orderProps) };
    return new PhysicalRequirements(r, IPartitioningRequirementsCoordinator.NO_COORDINATION);
}
Also used : LogicalVariable(org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable) StructuralPropertiesVector(org.apache.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector) OrderColumn(org.apache.hyracks.algebricks.core.algebra.properties.OrderColumn) LocalOrderProperty(org.apache.hyracks.algebricks.core.algebra.properties.LocalOrderProperty) ArrayList(java.util.ArrayList) ILocalStructuralProperty(org.apache.hyracks.algebricks.core.algebra.properties.ILocalStructuralProperty) LinkedList(java.util.LinkedList) PhysicalRequirements(org.apache.hyracks.algebricks.core.algebra.properties.PhysicalRequirements)

Example 5 with PhysicalRequirements

use of org.apache.hyracks.algebricks.core.algebra.properties.PhysicalRequirements in project asterixdb by apache.

the class PreSortedDistinctByPOperator method getRequiredPropertiesForChildren.

@Override
public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op, IPhysicalPropertiesVector reqdByParent, IOptimizationContext context) {
    StructuralPropertiesVector[] pv = new StructuralPropertiesVector[1];
    List<ILocalStructuralProperty> localProps = new ArrayList<ILocalStructuralProperty>();
    List<OrderColumn> orderColumns = new ArrayList<OrderColumn>();
    for (LogicalVariable column : columnList) {
        orderColumns.add(new OrderColumn(column, OrderKind.ASC));
    }
    localProps.add(new LocalOrderProperty(orderColumns));
    IPartitioningProperty pp = null;
    AbstractLogicalOperator aop = (AbstractLogicalOperator) op;
    if (aop.getExecutionMode() == ExecutionMode.PARTITIONED) {
        pp = new UnorderedPartitionedProperty(new ListSet<LogicalVariable>(columnList), context.getComputationNodeDomain());
    }
    pv[0] = new StructuralPropertiesVector(pp, localProps);
    return new PhysicalRequirements(pv, IPartitioningRequirementsCoordinator.NO_COORDINATION);
}
Also used : StructuralPropertiesVector(org.apache.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector) LogicalVariable(org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable) UnorderedPartitionedProperty(org.apache.hyracks.algebricks.core.algebra.properties.UnorderedPartitionedProperty) AbstractLogicalOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator) OrderColumn(org.apache.hyracks.algebricks.core.algebra.properties.OrderColumn) ArrayList(java.util.ArrayList) IPartitioningProperty(org.apache.hyracks.algebricks.core.algebra.properties.IPartitioningProperty) PhysicalRequirements(org.apache.hyracks.algebricks.core.algebra.properties.PhysicalRequirements) ListSet(org.apache.hyracks.algebricks.common.utils.ListSet) LocalOrderProperty(org.apache.hyracks.algebricks.core.algebra.properties.LocalOrderProperty) ILocalStructuralProperty(org.apache.hyracks.algebricks.core.algebra.properties.ILocalStructuralProperty)

Aggregations

PhysicalRequirements (org.apache.hyracks.algebricks.core.algebra.properties.PhysicalRequirements)24 StructuralPropertiesVector (org.apache.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector)21 LogicalVariable (org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable)13 ArrayList (java.util.ArrayList)12 ILocalStructuralProperty (org.apache.hyracks.algebricks.core.algebra.properties.ILocalStructuralProperty)8 LocalOrderProperty (org.apache.hyracks.algebricks.core.algebra.properties.LocalOrderProperty)8 IPartitioningProperty (org.apache.hyracks.algebricks.core.algebra.properties.IPartitioningProperty)7 OrderColumn (org.apache.hyracks.algebricks.core.algebra.properties.OrderColumn)7 BroadcastPartitioningProperty (org.apache.hyracks.algebricks.core.algebra.properties.BroadcastPartitioningProperty)6 IPhysicalPropertiesVector (org.apache.hyracks.algebricks.core.algebra.properties.IPhysicalPropertiesVector)6 UnorderedPartitionedProperty (org.apache.hyracks.algebricks.core.algebra.properties.UnorderedPartitionedProperty)6 AbstractLogicalOperator (org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator)5 ListSet (org.apache.hyracks.algebricks.common.utils.ListSet)4 ILogicalOperator (org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator)3 RandomPartitioningProperty (org.apache.hyracks.algebricks.core.algebra.properties.RandomPartitioningProperty)3 HashMap (java.util.HashMap)2 HashSet (java.util.HashSet)2 LinkedList (java.util.LinkedList)2 Mutable (org.apache.commons.lang3.mutable.Mutable)2 AlgebricksPartitionConstraint (org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint)2