Search in sources :

Example 1 with EquivalenceClass

use of org.apache.hyracks.algebricks.core.algebra.base.EquivalenceClass in project asterixdb by apache.

the class AbstractHashJoinPOperator method getRequiredPropertiesForChildren.

@Override
public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op, IPhysicalPropertiesVector reqdByParent, IOptimizationContext context) {
    // In a cost-based optimizer, we would also try to propagate the
    // parent's partitioning requirements.
    IPartitioningProperty pp1;
    IPartitioningProperty pp2;
    switch(partitioningType) {
        case PAIRWISE:
            pp1 = new UnorderedPartitionedProperty(new ListSet<>(keysLeftBranch), context.getComputationNodeDomain());
            pp2 = new UnorderedPartitionedProperty(new ListSet<>(keysRightBranch), context.getComputationNodeDomain());
            break;
        case BROADCAST:
            pp1 = new RandomPartitioningProperty(context.getComputationNodeDomain());
            pp2 = new BroadcastPartitioningProperty(context.getComputationNodeDomain());
            break;
        default:
            throw new IllegalStateException();
    }
    StructuralPropertiesVector[] pv = new StructuralPropertiesVector[2];
    pv[0] = OperatorPropertiesUtil.checkUnpartitionedAndGetPropertiesVector(op, new StructuralPropertiesVector(pp1, null));
    pv[1] = OperatorPropertiesUtil.checkUnpartitionedAndGetPropertiesVector(op, new StructuralPropertiesVector(pp2, null));
    IPartitioningRequirementsCoordinator prc;
    switch(kind) {
        case INNER:
            {
                prc = IPartitioningRequirementsCoordinator.EQCLASS_PARTITIONING_COORDINATOR;
                break;
            }
        case LEFT_OUTER:
            {
                prc = new IPartitioningRequirementsCoordinator() {

                    @Override
                    public Pair<Boolean, IPartitioningProperty> coordinateRequirements(IPartitioningProperty requirements, IPartitioningProperty firstDeliveredPartitioning, ILogicalOperator op, IOptimizationContext context) throws AlgebricksException {
                        if (firstDeliveredPartitioning != null && requirements != null && firstDeliveredPartitioning.getPartitioningType() == requirements.getPartitioningType()) {
                            switch(requirements.getPartitioningType()) {
                                case UNORDERED_PARTITIONED:
                                    {
                                        UnorderedPartitionedProperty upp1 = (UnorderedPartitionedProperty) firstDeliveredPartitioning;
                                        Set<LogicalVariable> set1 = upp1.getColumnSet();
                                        UnorderedPartitionedProperty uppreq = (UnorderedPartitionedProperty) requirements;
                                        Set<LogicalVariable> modifuppreq = new ListSet<LogicalVariable>();
                                        Map<LogicalVariable, EquivalenceClass> eqmap = context.getEquivalenceClassMap(op);
                                        Set<LogicalVariable> covered = new ListSet<LogicalVariable>();
                                        Set<LogicalVariable> keysCurrent = uppreq.getColumnSet();
                                        List<LogicalVariable> keysFirst = (keysRightBranch.containsAll(keysCurrent)) ? keysRightBranch : keysLeftBranch;
                                        List<LogicalVariable> keysSecond = keysFirst == keysRightBranch ? keysLeftBranch : keysRightBranch;
                                        for (LogicalVariable r : uppreq.getColumnSet()) {
                                            EquivalenceClass ecSnd = eqmap.get(r);
                                            boolean found = false;
                                            int j = 0;
                                            for (LogicalVariable rvar : keysFirst) {
                                                if (rvar == r || ecSnd != null && eqmap.get(rvar) == ecSnd) {
                                                    found = true;
                                                    break;
                                                }
                                                j++;
                                            }
                                            if (!found) {
                                                throw new IllegalStateException("Did not find a variable equivalent to " + r + " among " + keysFirst);
                                            }
                                            LogicalVariable v2 = keysSecond.get(j);
                                            EquivalenceClass ecFst = eqmap.get(v2);
                                            for (LogicalVariable vset1 : set1) {
                                                if (vset1 == v2 || ecFst != null && eqmap.get(vset1) == ecFst) {
                                                    covered.add(vset1);
                                                    modifuppreq.add(r);
                                                    break;
                                                }
                                            }
                                            if (covered.equals(set1)) {
                                                break;
                                            }
                                        }
                                        if (!covered.equals(set1)) {
                                            throw new AlgebricksException("Could not modify " + requirements + " to agree with partitioning property " + firstDeliveredPartitioning + " delivered by previous input operator.");
                                        }
                                        UnorderedPartitionedProperty upp2 = new UnorderedPartitionedProperty(modifuppreq, requirements.getNodeDomain());
                                        return new Pair<>(false, upp2);
                                    }
                                case ORDERED_PARTITIONED:
                                    {
                                        throw new NotImplementedException();
                                    }
                            }
                        }
                        return new Pair<>(true, requirements);
                    }
                };
                break;
            }
        default:
            {
                throw new IllegalStateException();
            }
    }
    return new PhysicalRequirements(pv, prc);
}
Also used : StructuralPropertiesVector(org.apache.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector) LogicalVariable(org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable) UnorderedPartitionedProperty(org.apache.hyracks.algebricks.core.algebra.properties.UnorderedPartitionedProperty) BroadcastPartitioningProperty(org.apache.hyracks.algebricks.core.algebra.properties.BroadcastPartitioningProperty) Set(java.util.Set) ListSet(org.apache.hyracks.algebricks.common.utils.ListSet) IOptimizationContext(org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext) ILogicalOperator(org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator) NotImplementedException(org.apache.hyracks.algebricks.common.exceptions.NotImplementedException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) IPartitioningRequirementsCoordinator(org.apache.hyracks.algebricks.core.algebra.properties.IPartitioningRequirementsCoordinator) IPartitioningProperty(org.apache.hyracks.algebricks.core.algebra.properties.IPartitioningProperty) PhysicalRequirements(org.apache.hyracks.algebricks.core.algebra.properties.PhysicalRequirements) ListSet(org.apache.hyracks.algebricks.common.utils.ListSet) List(java.util.List) RandomPartitioningProperty(org.apache.hyracks.algebricks.core.algebra.properties.RandomPartitioningProperty) Map(java.util.Map) EquivalenceClass(org.apache.hyracks.algebricks.core.algebra.base.EquivalenceClass) Pair(org.apache.hyracks.algebricks.common.utils.Pair)

Example 2 with EquivalenceClass

use of org.apache.hyracks.algebricks.core.algebra.base.EquivalenceClass in project asterixdb by apache.

the class EquivalenceClassUtils method addEquivalenceClassesForPrimaryIndexAccess.

/**
     * Adds equivalent classes for primary index accesses, including unnest-map for
     * primary index access and data source scan through primary index ---
     * one equivalent class between a primary key variable and a record field-access expression.
     *
     * @param operator
     *            , the primary index access operator.
     * @param indexSearchVars
     *            , the returned variables from primary index access. The last variable
     *            is the record variable.
     * @param recordType
     *            , the record type of an index payload record.
     * @param metaRecordType
     *            , the type of a meta record associated with an index payload record.
     * @param dataset
     *            , the accessed dataset.
     * @param context
     *            , the optimization context.
     * @throws AlgebricksException
     */
@SuppressWarnings("unchecked")
public static void addEquivalenceClassesForPrimaryIndexAccess(ILogicalOperator operator, List<LogicalVariable> indexSearchVars, ARecordType recordType, ARecordType metaRecordType, Dataset dataset, IOptimizationContext context) throws AlgebricksException {
    if (dataset.getDatasetDetails().getDatasetType() != DatasetType.INTERNAL) {
        return;
    }
    InternalDatasetDetails datasetDetails = (InternalDatasetDetails) dataset.getDatasetDetails();
    List<List<String>> primaryKey = datasetDetails.getPrimaryKey();
    Map<String, Integer> fieldNameToIndexMap = new HashMap<String, Integer>();
    String[] fieldNames = recordType.getFieldNames();
    for (int fieldIndex = 0; fieldIndex < fieldNames.length; ++fieldIndex) {
        fieldNameToIndexMap.put(fieldNames[fieldIndex], fieldIndex);
    }
    boolean hasMeta = dataset.hasMetaPart();
    Map<String, Integer> metaFieldNameToIndexMap = new HashMap<>();
    if (hasMeta) {
        String[] metaFieldNames = metaRecordType.getFieldNames();
        for (int metaFieldIndex = 0; metaFieldIndex < metaFieldNames.length; ++metaFieldIndex) {
            metaFieldNameToIndexMap.put(metaFieldNames[metaFieldIndex], metaFieldIndex);
        }
    }
    List<Integer> keySourceIndicators = datasetDetails.getKeySourceIndicator();
    LogicalVariable recordVar = hasMeta ? indexSearchVars.get(indexSearchVars.size() - 2) : indexSearchVars.get(indexSearchVars.size() - 1);
    LogicalVariable metaRecordVar = hasMeta ? indexSearchVars.get(indexSearchVars.size() - 1) : null;
    for (int pkIndex = 0; pkIndex < primaryKey.size(); ++pkIndex) {
        LogicalVariable referredRecordVar = recordVar;
        String pkFieldName = primaryKey.get(pkIndex).get(0);
        int source = keySourceIndicators.get(pkIndex);
        Integer fieldIndexInRecord;
        if (source == 0) {
            // The field is from the main record.
            fieldIndexInRecord = fieldNameToIndexMap.get(pkFieldName);
        } else {
            // The field is from the auxiliary meta record.
            referredRecordVar = metaRecordVar;
            fieldIndexInRecord = metaFieldNameToIndexMap.get(pkFieldName);
        }
        LogicalVariable var = indexSearchVars.get(pkIndex);
        ILogicalExpression expr = new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.FIELD_ACCESS_BY_INDEX), new MutableObject<ILogicalExpression>(new VariableReferenceExpression(referredRecordVar)), new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(new AInt32(fieldIndexInRecord)))));
        EquivalenceClass equivClass = new EquivalenceClass(Collections.singletonList(var), var, Collections.singletonList(expr));
        Map<LogicalVariable, EquivalenceClass> equivalenceMap = context.getEquivalenceClassMap(operator);
        if (equivalenceMap == null) {
            equivalenceMap = new HashMap<LogicalVariable, EquivalenceClass>();
            context.putEquivalenceClassMap(operator, equivalenceMap);
        }
        equivalenceMap.put(var, equivClass);
    }
}
Also used : LogicalVariable(org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable) HashMap(java.util.HashMap) InternalDatasetDetails(org.apache.asterix.metadata.entities.InternalDatasetDetails) ConstantExpression(org.apache.hyracks.algebricks.core.algebra.expressions.ConstantExpression) AInt32(org.apache.asterix.om.base.AInt32) ILogicalExpression(org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression) AsterixConstantValue(org.apache.asterix.om.constants.AsterixConstantValue) VariableReferenceExpression(org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression) ArrayList(java.util.ArrayList) List(java.util.List) EquivalenceClass(org.apache.hyracks.algebricks.core.algebra.base.EquivalenceClass) ScalarFunctionCallExpression(org.apache.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression)

Example 3 with EquivalenceClass

use of org.apache.hyracks.algebricks.core.algebra.base.EquivalenceClass in project asterixdb by apache.

the class EnforceStructuralPropertiesRule method newPropertiesDiff.

private IPhysicalPropertiesVector newPropertiesDiff(AbstractLogicalOperator newChild, IPhysicalPropertiesVector required, boolean mayExpandPartitioningProperties, IOptimizationContext context) throws AlgebricksException {
    IPhysicalPropertiesVector newDelivered = newChild.getDeliveredPhysicalProperties();
    Map<LogicalVariable, EquivalenceClass> newChildEqClasses = context.getEquivalenceClassMap(newChild);
    List<FunctionalDependency> newChildFDs = context.getFDList(newChild);
    if (newChildEqClasses == null || newChildFDs == null) {
        FDsAndEquivClassesVisitor fdsVisitor = new FDsAndEquivClassesVisitor();
        newChild.accept(fdsVisitor, context);
        newChildEqClasses = context.getEquivalenceClassMap(newChild);
        newChildFDs = context.getFDList(newChild);
    }
    AlgebricksConfig.ALGEBRICKS_LOGGER.finest(">>>> Required properties for new op. " + newChild.getPhysicalOperator() + ": " + required + "\n");
    return newDelivered.getUnsatisfiedPropertiesFrom(required, mayExpandPartitioningProperties, newChildEqClasses, newChildFDs);
}
Also used : LogicalVariable(org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable) FunctionalDependency(org.apache.hyracks.algebricks.core.algebra.properties.FunctionalDependency) FDsAndEquivClassesVisitor(org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors.FDsAndEquivClassesVisitor) IPhysicalPropertiesVector(org.apache.hyracks.algebricks.core.algebra.properties.IPhysicalPropertiesVector) EquivalenceClass(org.apache.hyracks.algebricks.core.algebra.base.EquivalenceClass)

Example 4 with EquivalenceClass

use of org.apache.hyracks.algebricks.core.algebra.base.EquivalenceClass in project asterixdb by apache.

the class FDsAndEquivClassesVisitor method visitDataScanOperator.

@Override
public Void visitDataScanOperator(DataSourceScanOperator op, IOptimizationContext ctx) throws AlgebricksException {
    ILogicalOperator inp1 = op.getInputs().get(0).getValue();
    Map<LogicalVariable, EquivalenceClass> eqClasses = getOrCreateEqClasses(op, ctx);
    Map<LogicalVariable, EquivalenceClass> propagatedEqClasses = getOrComputeEqClasses(inp1, ctx);
    eqClasses.putAll(propagatedEqClasses);
    ctx.putEquivalenceClassMap(op, eqClasses);
    List<FunctionalDependency> fds = new ArrayList<FunctionalDependency>(getOrComputeFDs(inp1, ctx));
    ctx.putFDList(op, fds);
    op.getDataSource().computeFDs(op.getVariables(), fds);
    return null;
}
Also used : LogicalVariable(org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable) ILogicalOperator(org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator) FunctionalDependency(org.apache.hyracks.algebricks.core.algebra.properties.FunctionalDependency) ArrayList(java.util.ArrayList) EquivalenceClass(org.apache.hyracks.algebricks.core.algebra.base.EquivalenceClass)

Example 5 with EquivalenceClass

use of org.apache.hyracks.algebricks.core.algebra.base.EquivalenceClass in project asterixdb by apache.

the class FDsAndEquivClassesVisitor method visitLeftOuterUnnestMapOperator.

@Override
public Void visitLeftOuterUnnestMapOperator(LeftOuterUnnestMapOperator op, IOptimizationContext ctx) throws AlgebricksException {
    // Unlike the unnest-map operator, we propagate all inputs since
    // propagateInuput is always true.
    Map<LogicalVariable, EquivalenceClass> equivalenceClasses = new HashMap<LogicalVariable, EquivalenceClass>();
    List<FunctionalDependency> functionalDependencies = new ArrayList<FunctionalDependency>();
    ctx.putEquivalenceClassMap(op, equivalenceClasses);
    ctx.putFDList(op, functionalDependencies);
    ILogicalOperator childOp = op.getInputs().get(0).getValue();
    functionalDependencies.addAll(getOrComputeFDs(childOp, ctx));
    equivalenceClasses.putAll(getOrComputeEqClasses(childOp, ctx));
    // Like Left-Outer join case, we add functional dependencies.
    List<LogicalVariable> leftSideVars = new ArrayList<LogicalVariable>();
    List<LogicalVariable> producedVars = new ArrayList<LogicalVariable>();
    VariableUtilities.getUsedVariables(op, leftSideVars);
    VariableUtilities.getProducedVariables(op, leftSideVars);
    functionalDependencies.add(new FunctionalDependency(leftSideVars, producedVars));
    return null;
}
Also used : LogicalVariable(org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable) HashMap(java.util.HashMap) FunctionalDependency(org.apache.hyracks.algebricks.core.algebra.properties.FunctionalDependency) ILogicalOperator(org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator) ArrayList(java.util.ArrayList) EquivalenceClass(org.apache.hyracks.algebricks.core.algebra.base.EquivalenceClass)

Aggregations

EquivalenceClass (org.apache.hyracks.algebricks.core.algebra.base.EquivalenceClass)21 LogicalVariable (org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable)21 FunctionalDependency (org.apache.hyracks.algebricks.core.algebra.properties.FunctionalDependency)16 ArrayList (java.util.ArrayList)14 ILogicalOperator (org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator)14 ILogicalExpression (org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression)11 HashMap (java.util.HashMap)7 LinkedList (java.util.LinkedList)5 VariableReferenceExpression (org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression)4 HashSet (java.util.HashSet)3 Mutable (org.apache.commons.lang3.mutable.Mutable)3 ListSet (org.apache.hyracks.algebricks.common.utils.ListSet)3 List (java.util.List)2 AInt32 (org.apache.asterix.om.base.AInt32)2 AsterixConstantValue (org.apache.asterix.om.constants.AsterixConstantValue)2 Pair (org.apache.hyracks.algebricks.common.utils.Pair)2 ILogicalPlan (org.apache.hyracks.algebricks.core.algebra.base.ILogicalPlan)2 ConstantExpression (org.apache.hyracks.algebricks.core.algebra.expressions.ConstantExpression)2 ScalarFunctionCallExpression (org.apache.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression)2 ILocalStructuralProperty (org.apache.hyracks.algebricks.core.algebra.properties.ILocalStructuralProperty)2