use of org.apache.hyracks.algebricks.core.algebra.base.EquivalenceClass in project asterixdb by apache.
the class AbstractHashJoinPOperator method getRequiredPropertiesForChildren.
@Override
public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op, IPhysicalPropertiesVector reqdByParent, IOptimizationContext context) {
// In a cost-based optimizer, we would also try to propagate the
// parent's partitioning requirements.
IPartitioningProperty pp1;
IPartitioningProperty pp2;
switch(partitioningType) {
case PAIRWISE:
pp1 = new UnorderedPartitionedProperty(new ListSet<>(keysLeftBranch), context.getComputationNodeDomain());
pp2 = new UnorderedPartitionedProperty(new ListSet<>(keysRightBranch), context.getComputationNodeDomain());
break;
case BROADCAST:
pp1 = new RandomPartitioningProperty(context.getComputationNodeDomain());
pp2 = new BroadcastPartitioningProperty(context.getComputationNodeDomain());
break;
default:
throw new IllegalStateException();
}
StructuralPropertiesVector[] pv = new StructuralPropertiesVector[2];
pv[0] = OperatorPropertiesUtil.checkUnpartitionedAndGetPropertiesVector(op, new StructuralPropertiesVector(pp1, null));
pv[1] = OperatorPropertiesUtil.checkUnpartitionedAndGetPropertiesVector(op, new StructuralPropertiesVector(pp2, null));
IPartitioningRequirementsCoordinator prc;
switch(kind) {
case INNER:
{
prc = IPartitioningRequirementsCoordinator.EQCLASS_PARTITIONING_COORDINATOR;
break;
}
case LEFT_OUTER:
{
prc = new IPartitioningRequirementsCoordinator() {
@Override
public Pair<Boolean, IPartitioningProperty> coordinateRequirements(IPartitioningProperty requirements, IPartitioningProperty firstDeliveredPartitioning, ILogicalOperator op, IOptimizationContext context) throws AlgebricksException {
if (firstDeliveredPartitioning != null && requirements != null && firstDeliveredPartitioning.getPartitioningType() == requirements.getPartitioningType()) {
switch(requirements.getPartitioningType()) {
case UNORDERED_PARTITIONED:
{
UnorderedPartitionedProperty upp1 = (UnorderedPartitionedProperty) firstDeliveredPartitioning;
Set<LogicalVariable> set1 = upp1.getColumnSet();
UnorderedPartitionedProperty uppreq = (UnorderedPartitionedProperty) requirements;
Set<LogicalVariable> modifuppreq = new ListSet<LogicalVariable>();
Map<LogicalVariable, EquivalenceClass> eqmap = context.getEquivalenceClassMap(op);
Set<LogicalVariable> covered = new ListSet<LogicalVariable>();
Set<LogicalVariable> keysCurrent = uppreq.getColumnSet();
List<LogicalVariable> keysFirst = (keysRightBranch.containsAll(keysCurrent)) ? keysRightBranch : keysLeftBranch;
List<LogicalVariable> keysSecond = keysFirst == keysRightBranch ? keysLeftBranch : keysRightBranch;
for (LogicalVariable r : uppreq.getColumnSet()) {
EquivalenceClass ecSnd = eqmap.get(r);
boolean found = false;
int j = 0;
for (LogicalVariable rvar : keysFirst) {
if (rvar == r || ecSnd != null && eqmap.get(rvar) == ecSnd) {
found = true;
break;
}
j++;
}
if (!found) {
throw new IllegalStateException("Did not find a variable equivalent to " + r + " among " + keysFirst);
}
LogicalVariable v2 = keysSecond.get(j);
EquivalenceClass ecFst = eqmap.get(v2);
for (LogicalVariable vset1 : set1) {
if (vset1 == v2 || ecFst != null && eqmap.get(vset1) == ecFst) {
covered.add(vset1);
modifuppreq.add(r);
break;
}
}
if (covered.equals(set1)) {
break;
}
}
if (!covered.equals(set1)) {
throw new AlgebricksException("Could not modify " + requirements + " to agree with partitioning property " + firstDeliveredPartitioning + " delivered by previous input operator.");
}
UnorderedPartitionedProperty upp2 = new UnorderedPartitionedProperty(modifuppreq, requirements.getNodeDomain());
return new Pair<>(false, upp2);
}
case ORDERED_PARTITIONED:
{
throw new NotImplementedException();
}
}
}
return new Pair<>(true, requirements);
}
};
break;
}
default:
{
throw new IllegalStateException();
}
}
return new PhysicalRequirements(pv, prc);
}
use of org.apache.hyracks.algebricks.core.algebra.base.EquivalenceClass in project asterixdb by apache.
the class EquivalenceClassUtils method addEquivalenceClassesForPrimaryIndexAccess.
/**
* Adds equivalent classes for primary index accesses, including unnest-map for
* primary index access and data source scan through primary index ---
* one equivalent class between a primary key variable and a record field-access expression.
*
* @param operator
* , the primary index access operator.
* @param indexSearchVars
* , the returned variables from primary index access. The last variable
* is the record variable.
* @param recordType
* , the record type of an index payload record.
* @param metaRecordType
* , the type of a meta record associated with an index payload record.
* @param dataset
* , the accessed dataset.
* @param context
* , the optimization context.
* @throws AlgebricksException
*/
@SuppressWarnings("unchecked")
public static void addEquivalenceClassesForPrimaryIndexAccess(ILogicalOperator operator, List<LogicalVariable> indexSearchVars, ARecordType recordType, ARecordType metaRecordType, Dataset dataset, IOptimizationContext context) throws AlgebricksException {
if (dataset.getDatasetDetails().getDatasetType() != DatasetType.INTERNAL) {
return;
}
InternalDatasetDetails datasetDetails = (InternalDatasetDetails) dataset.getDatasetDetails();
List<List<String>> primaryKey = datasetDetails.getPrimaryKey();
Map<String, Integer> fieldNameToIndexMap = new HashMap<String, Integer>();
String[] fieldNames = recordType.getFieldNames();
for (int fieldIndex = 0; fieldIndex < fieldNames.length; ++fieldIndex) {
fieldNameToIndexMap.put(fieldNames[fieldIndex], fieldIndex);
}
boolean hasMeta = dataset.hasMetaPart();
Map<String, Integer> metaFieldNameToIndexMap = new HashMap<>();
if (hasMeta) {
String[] metaFieldNames = metaRecordType.getFieldNames();
for (int metaFieldIndex = 0; metaFieldIndex < metaFieldNames.length; ++metaFieldIndex) {
metaFieldNameToIndexMap.put(metaFieldNames[metaFieldIndex], metaFieldIndex);
}
}
List<Integer> keySourceIndicators = datasetDetails.getKeySourceIndicator();
LogicalVariable recordVar = hasMeta ? indexSearchVars.get(indexSearchVars.size() - 2) : indexSearchVars.get(indexSearchVars.size() - 1);
LogicalVariable metaRecordVar = hasMeta ? indexSearchVars.get(indexSearchVars.size() - 1) : null;
for (int pkIndex = 0; pkIndex < primaryKey.size(); ++pkIndex) {
LogicalVariable referredRecordVar = recordVar;
String pkFieldName = primaryKey.get(pkIndex).get(0);
int source = keySourceIndicators.get(pkIndex);
Integer fieldIndexInRecord;
if (source == 0) {
// The field is from the main record.
fieldIndexInRecord = fieldNameToIndexMap.get(pkFieldName);
} else {
// The field is from the auxiliary meta record.
referredRecordVar = metaRecordVar;
fieldIndexInRecord = metaFieldNameToIndexMap.get(pkFieldName);
}
LogicalVariable var = indexSearchVars.get(pkIndex);
ILogicalExpression expr = new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.FIELD_ACCESS_BY_INDEX), new MutableObject<ILogicalExpression>(new VariableReferenceExpression(referredRecordVar)), new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(new AInt32(fieldIndexInRecord)))));
EquivalenceClass equivClass = new EquivalenceClass(Collections.singletonList(var), var, Collections.singletonList(expr));
Map<LogicalVariable, EquivalenceClass> equivalenceMap = context.getEquivalenceClassMap(operator);
if (equivalenceMap == null) {
equivalenceMap = new HashMap<LogicalVariable, EquivalenceClass>();
context.putEquivalenceClassMap(operator, equivalenceMap);
}
equivalenceMap.put(var, equivClass);
}
}
use of org.apache.hyracks.algebricks.core.algebra.base.EquivalenceClass in project asterixdb by apache.
the class EnforceStructuralPropertiesRule method newPropertiesDiff.
private IPhysicalPropertiesVector newPropertiesDiff(AbstractLogicalOperator newChild, IPhysicalPropertiesVector required, boolean mayExpandPartitioningProperties, IOptimizationContext context) throws AlgebricksException {
IPhysicalPropertiesVector newDelivered = newChild.getDeliveredPhysicalProperties();
Map<LogicalVariable, EquivalenceClass> newChildEqClasses = context.getEquivalenceClassMap(newChild);
List<FunctionalDependency> newChildFDs = context.getFDList(newChild);
if (newChildEqClasses == null || newChildFDs == null) {
FDsAndEquivClassesVisitor fdsVisitor = new FDsAndEquivClassesVisitor();
newChild.accept(fdsVisitor, context);
newChildEqClasses = context.getEquivalenceClassMap(newChild);
newChildFDs = context.getFDList(newChild);
}
AlgebricksConfig.ALGEBRICKS_LOGGER.finest(">>>> Required properties for new op. " + newChild.getPhysicalOperator() + ": " + required + "\n");
return newDelivered.getUnsatisfiedPropertiesFrom(required, mayExpandPartitioningProperties, newChildEqClasses, newChildFDs);
}
use of org.apache.hyracks.algebricks.core.algebra.base.EquivalenceClass in project asterixdb by apache.
the class FDsAndEquivClassesVisitor method visitDataScanOperator.
@Override
public Void visitDataScanOperator(DataSourceScanOperator op, IOptimizationContext ctx) throws AlgebricksException {
ILogicalOperator inp1 = op.getInputs().get(0).getValue();
Map<LogicalVariable, EquivalenceClass> eqClasses = getOrCreateEqClasses(op, ctx);
Map<LogicalVariable, EquivalenceClass> propagatedEqClasses = getOrComputeEqClasses(inp1, ctx);
eqClasses.putAll(propagatedEqClasses);
ctx.putEquivalenceClassMap(op, eqClasses);
List<FunctionalDependency> fds = new ArrayList<FunctionalDependency>(getOrComputeFDs(inp1, ctx));
ctx.putFDList(op, fds);
op.getDataSource().computeFDs(op.getVariables(), fds);
return null;
}
use of org.apache.hyracks.algebricks.core.algebra.base.EquivalenceClass in project asterixdb by apache.
the class FDsAndEquivClassesVisitor method visitLeftOuterUnnestMapOperator.
@Override
public Void visitLeftOuterUnnestMapOperator(LeftOuterUnnestMapOperator op, IOptimizationContext ctx) throws AlgebricksException {
// Unlike the unnest-map operator, we propagate all inputs since
// propagateInuput is always true.
Map<LogicalVariable, EquivalenceClass> equivalenceClasses = new HashMap<LogicalVariable, EquivalenceClass>();
List<FunctionalDependency> functionalDependencies = new ArrayList<FunctionalDependency>();
ctx.putEquivalenceClassMap(op, equivalenceClasses);
ctx.putFDList(op, functionalDependencies);
ILogicalOperator childOp = op.getInputs().get(0).getValue();
functionalDependencies.addAll(getOrComputeFDs(childOp, ctx));
equivalenceClasses.putAll(getOrComputeEqClasses(childOp, ctx));
// Like Left-Outer join case, we add functional dependencies.
List<LogicalVariable> leftSideVars = new ArrayList<LogicalVariable>();
List<LogicalVariable> producedVars = new ArrayList<LogicalVariable>();
VariableUtilities.getUsedVariables(op, leftSideVars);
VariableUtilities.getProducedVariables(op, leftSideVars);
functionalDependencies.add(new FunctionalDependency(leftSideVars, producedVars));
return null;
}
Aggregations