use of org.apache.hyracks.algebricks.core.algebra.base.IPhysicalOperator in project asterixdb by apache.
the class AbstractPreclusteredGroupByPOperator method getRequiredPropertiesForChildren.
@Override
public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op, IPhysicalPropertiesVector reqdByParent, IOptimizationContext context) {
GroupByOperator gby = (GroupByOperator) op;
StructuralPropertiesVector[] pv = new StructuralPropertiesVector[1];
if (gby.isGroupAll() && gby.isGlobal()) {
if (op.getExecutionMode() == ExecutionMode.UNPARTITIONED) {
pv[0] = new StructuralPropertiesVector(IPartitioningProperty.UNPARTITIONED, null);
return new PhysicalRequirements(pv, IPartitioningRequirementsCoordinator.NO_COORDINATION);
} else {
return emptyUnaryRequirements();
}
}
List<ILocalStructuralProperty> localProps = new ArrayList<>();
Set<LogicalVariable> gbvars = new ListSet<>(columnList);
LocalGroupingProperty groupProp = new LocalGroupingProperty(gbvars, new ArrayList<>(columnList));
boolean goon = true;
for (ILogicalPlan p : gby.getNestedPlans()) {
// groupings
for (Mutable<ILogicalOperator> r : p.getRoots()) {
AbstractLogicalOperator op1 = (AbstractLogicalOperator) r.getValue();
if (op1.getOperatorTag() == LogicalOperatorTag.AGGREGATE) {
AbstractLogicalOperator op2 = (AbstractLogicalOperator) op1.getInputs().get(0).getValue();
IPhysicalOperator pop2 = op2.getPhysicalOperator();
if (pop2 instanceof AbstractPreclusteredGroupByPOperator) {
List<LogicalVariable> gbyColumns = ((AbstractPreclusteredGroupByPOperator) pop2).getGbyColumns();
List<LogicalVariable> sndOrder = new ArrayList<>();
sndOrder.addAll(gbyColumns);
Set<LogicalVariable> freeVars = new HashSet<>();
try {
OperatorPropertiesUtil.getFreeVariablesInSelfOrDesc(op2, freeVars);
} catch (AlgebricksException e) {
throw new IllegalStateException(e);
}
// Only considers group key variables defined out-side the outer-most group-by operator.
sndOrder.retainAll(freeVars);
groupProp.getColumnSet().addAll(sndOrder);
groupProp.getPreferredOrderEnforcer().addAll(sndOrder);
goon = false;
break;
}
}
}
if (!goon) {
break;
}
}
localProps.add(groupProp);
if (reqdByParent != null) {
// propagate parent requirements
List<ILocalStructuralProperty> lpPar = reqdByParent.getLocalProperties();
if (lpPar != null) {
boolean allOk = true;
List<ILocalStructuralProperty> props = new ArrayList<>(lpPar.size());
for (ILocalStructuralProperty prop : lpPar) {
if (prop.getPropertyType() != PropertyType.LOCAL_ORDER_PROPERTY) {
allOk = false;
break;
}
LocalOrderProperty lop = (LocalOrderProperty) prop;
List<OrderColumn> orderColumns = new ArrayList<>();
List<OrderColumn> ords = lop.getOrderColumns();
for (OrderColumn ord : ords) {
Pair<LogicalVariable, Mutable<ILogicalExpression>> p = getGbyPairByRhsVar(gby, ord.getColumn());
if (p == null) {
p = getDecorPairByRhsVar(gby, ord.getColumn());
if (p == null) {
allOk = false;
break;
}
}
ILogicalExpression e = p.second.getValue();
if (e.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
throw new IllegalStateException("Right hand side of group-by assignment should have been normalized to a variable reference.");
}
LogicalVariable v = ((VariableReferenceExpression) e).getVariableReference();
orderColumns.add(new OrderColumn(v, ord.getOrder()));
}
props.add(new LocalOrderProperty(orderColumns));
}
List<FunctionalDependency> fdList = new ArrayList<>();
for (Pair<LogicalVariable, Mutable<ILogicalExpression>> decorPair : gby.getDecorList()) {
List<LogicalVariable> hd = gby.getGbyVarList();
List<LogicalVariable> tl = new ArrayList<>();
tl.add(((VariableReferenceExpression) decorPair.second.getValue()).getVariableReference());
fdList.add(new FunctionalDependency(hd, tl));
}
if (allOk && PropertiesUtil.matchLocalProperties(localProps, props, new HashMap<>(), fdList)) {
localProps = props;
}
}
}
IPartitioningProperty pp = null;
AbstractLogicalOperator aop = (AbstractLogicalOperator) op;
if (aop.getExecutionMode() == ExecutionMode.PARTITIONED) {
pp = new UnorderedPartitionedProperty(new ListSet<>(columnList), context.getComputationNodeDomain());
}
pv[0] = new StructuralPropertiesVector(pp, localProps);
return new PhysicalRequirements(pv, IPartitioningRequirementsCoordinator.NO_COORDINATION);
}
use of org.apache.hyracks.algebricks.core.algebra.base.IPhysicalOperator in project asterixdb by apache.
the class PlanPrettyPrinter method printOperator.
public static void printOperator(AbstractLogicalOperator op, LogicalOperatorPrettyPrintVisitor pvisitor, int indent) throws AlgebricksException {
final AlgebricksAppendable out = pvisitor.get();
op.accept(pvisitor, indent);
IPhysicalOperator pOp = op.getPhysicalOperator();
if (pOp != null) {
out.append("\n");
pad(out, indent);
appendln(out, "-- " + pOp.toString() + " |" + op.getExecutionMode() + "|");
} else {
appendln(out, " -- |" + op.getExecutionMode() + "|");
}
for (Mutable<ILogicalOperator> i : op.getInputs()) {
printOperator((AbstractLogicalOperator) i.getValue(), pvisitor, indent + 2);
}
}
use of org.apache.hyracks.algebricks.core.algebra.base.IPhysicalOperator in project asterixdb by apache.
the class PlanPrettyPrinter method printPhysicalOperator.
private static void printPhysicalOperator(AbstractLogicalOperator op, int indent, AlgebricksAppendable out) throws AlgebricksException {
IPhysicalOperator pOp = op.getPhysicalOperator();
pad(out, indent);
appendln(out, "-- " + pOp.toString() + " |" + op.getExecutionMode() + "|");
if (op.hasNestedPlans()) {
AbstractOperatorWithNestedPlans opNest = (AbstractOperatorWithNestedPlans) op;
for (ILogicalPlan p : opNest.getNestedPlans()) {
pad(out, indent + 8);
appendln(out, "{");
printPhysicalOps(p, out, indent + 10);
pad(out, indent + 8);
appendln(out, "}");
}
}
for (Mutable<ILogicalOperator> i : op.getInputs()) {
printPhysicalOperator((AbstractLogicalOperator) i.getValue(), indent + 2, out);
}
}
use of org.apache.hyracks.algebricks.core.algebra.base.IPhysicalOperator in project asterixdb by apache.
the class RemoveUnnecessarySortMergeExchange method rewritePost.
@Override
public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
if (op1.getPhysicalOperator() == null || (op1.getPhysicalOperator().getOperatorTag() != PhysicalOperatorTag.HASH_PARTITION_EXCHANGE && op1.getPhysicalOperator().getOperatorTag() != PhysicalOperatorTag.HASH_PARTITION_MERGE_EXCHANGE)) {
return false;
}
Mutable<ILogicalOperator> currentOpRef = op1.getInputs().get(0);
AbstractLogicalOperator currentOp = (AbstractLogicalOperator) currentOpRef.getValue();
// Goes down the pipeline to find a qualified SortMergeExchange to eliminate.
while (currentOp != null) {
IPhysicalOperator physicalOp = currentOp.getPhysicalOperator();
if (physicalOp == null) {
return false;
} else if (physicalOp.getOperatorTag() == PhysicalOperatorTag.SORT_MERGE_EXCHANGE) {
break;
} else if (!currentOp.isMap() || currentOp.getOperatorTag() == LogicalOperatorTag.UNNEST || currentOp.getOperatorTag() == LogicalOperatorTag.LIMIT) {
// we need to use his new property in logical operator to check order sensitivity.
return false;
} else if (currentOp.getInputs().size() == 1) {
currentOpRef = currentOp.getInputs().get(0);
currentOp = (AbstractLogicalOperator) currentOpRef.getValue();
} else {
currentOp = null;
}
}
if (currentOp == null) {
// There is no such qualified SortMergeExchange.
return false;
}
if (op1.getPhysicalOperator().getOperatorTag() == PhysicalOperatorTag.HASH_PARTITION_MERGE_EXCHANGE) {
// If op1 is a hash_partition_merge_exchange, the sort_merge_exchange can be simply removed.
currentOpRef.setValue(currentOp.getInputs().get(0).getValue());
op1.computeDeliveredPhysicalProperties(context);
return true;
}
// Checks whether sort columns in the SortMergeExchange are still available at op1.
// If yes, we use HashMergeExchange; otherwise, we use HashExchange.
SortMergeExchangePOperator sme = (SortMergeExchangePOperator) currentOp.getPhysicalOperator();
HashPartitionExchangePOperator hpe = (HashPartitionExchangePOperator) op1.getPhysicalOperator();
Set<LogicalVariable> liveVars = new HashSet<LogicalVariable>();
VariableUtilities.getLiveVariables(op1, liveVars);
boolean usingHashMergeExchange = true;
for (OrderColumn oc : sme.getSortColumns()) {
if (!liveVars.contains(oc.getColumn())) {
usingHashMergeExchange = false;
}
}
if (usingHashMergeExchange) {
// Add sort columns from the SortMergeExchange into a new HashMergeExchange.
List<OrderColumn> ocList = new ArrayList<OrderColumn>();
for (OrderColumn oc : sme.getSortColumns()) {
ocList.add(oc);
}
HashPartitionMergeExchangePOperator hpme = new HashPartitionMergeExchangePOperator(ocList, hpe.getHashFields(), hpe.getDomain());
op1.setPhysicalOperator(hpme);
}
// Remove the SortMergeExchange op.
currentOpRef.setValue(currentOp.getInputs().get(0).getValue());
// Re-compute delivered properties at op1.
op1.computeDeliveredPhysicalProperties(context);
return true;
}
use of org.apache.hyracks.algebricks.core.algebra.base.IPhysicalOperator in project asterixdb by apache.
the class IsolateHyracksOperatorsRule method rewritePost.
@Override
public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
IPhysicalOperator pt = op.getPhysicalOperator();
if (pt == null || op.getOperatorTag() == LogicalOperatorTag.EXCHANGE) {
return false;
}
if (!pt.isMicroOperator()) {
return testIfExchangeBelow(opRef, context);
} else {
return testIfExchangeAbove(opRef, context);
}
}
Aggregations