use of org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext in project asterixdb by apache.
the class LoadRecordFieldsRule method findAndEliminateRedundantFieldAccess.
/**
* Rewrite
* assign $x := field-access($y, "field")
* assign $y := record-constructor { "field": Expr, ... }
* into
* assign $x := Expr
* assign $y := record-constructor { "field": Expr, ... }
*/
private static boolean findAndEliminateRedundantFieldAccess(AssignOperator assign, IOptimizationContext context) throws AlgebricksException {
ILogicalExpression expr = getFirstExpr(assign);
AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) expr;
ILogicalExpression arg0 = f.getArguments().get(0).getValue();
if (arg0.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
return false;
}
VariableReferenceExpression vre = (VariableReferenceExpression) arg0;
LogicalVariable recordVar = vre.getVariableReference();
ILogicalExpression arg1 = f.getArguments().get(1).getValue();
if (arg1.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
return false;
}
IVariableTypeEnvironment typeEnvironment = context.getOutputTypeEnvironment(assign);
ConstantExpression ce = (ConstantExpression) arg1;
ILogicalExpression fldExpr;
if (f.getFunctionIdentifier().equals(BuiltinFunctions.FIELD_ACCESS_BY_NAME)) {
String fldName = ((AString) ((AsterixConstantValue) ce.getValue()).getObject()).getStringValue();
fldExpr = findFieldExpression(assign, recordVar, fldName, typeEnvironment, (name, expression, env) -> findFieldByNameFromRecordConstructor(name, expression));
} else if (f.getFunctionIdentifier().equals(BuiltinFunctions.FIELD_ACCESS_BY_INDEX)) {
Integer fldIdx = ((AInt32) ((AsterixConstantValue) ce.getValue()).getObject()).getIntegerValue();
fldExpr = findFieldExpression(assign, recordVar, fldIdx, typeEnvironment, LoadRecordFieldsRule::findFieldByIndexFromRecordConstructor);
} else if (f.getFunctionIdentifier().equals(BuiltinFunctions.FIELD_ACCESS_NESTED)) {
return false;
} else {
throw new IllegalStateException();
}
if (fldExpr == null) {
return false;
}
// check the liveness of the new expression
List<LogicalVariable> usedVariables = new ArrayList<>();
fldExpr.getUsedVariables(usedVariables);
List<LogicalVariable> liveInputVars = new ArrayList<>();
VariableUtilities.getLiveVariables(assign, liveInputVars);
usedVariables.removeAll(liveInputVars);
if (usedVariables.isEmpty()) {
assign.getExpressions().get(0).setValue(fldExpr);
return true;
} else {
return false;
}
}
use of org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext in project asterixdb by apache.
the class AbstractHashJoinPOperator method getRequiredPropertiesForChildren.
@Override
public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op, IPhysicalPropertiesVector reqdByParent, IOptimizationContext context) {
// In a cost-based optimizer, we would also try to propagate the
// parent's partitioning requirements.
IPartitioningProperty pp1;
IPartitioningProperty pp2;
switch(partitioningType) {
case PAIRWISE:
pp1 = new UnorderedPartitionedProperty(new ListSet<>(keysLeftBranch), context.getComputationNodeDomain());
pp2 = new UnorderedPartitionedProperty(new ListSet<>(keysRightBranch), context.getComputationNodeDomain());
break;
case BROADCAST:
pp1 = new RandomPartitioningProperty(context.getComputationNodeDomain());
pp2 = new BroadcastPartitioningProperty(context.getComputationNodeDomain());
break;
default:
throw new IllegalStateException();
}
StructuralPropertiesVector[] pv = new StructuralPropertiesVector[2];
pv[0] = OperatorPropertiesUtil.checkUnpartitionedAndGetPropertiesVector(op, new StructuralPropertiesVector(pp1, null));
pv[1] = OperatorPropertiesUtil.checkUnpartitionedAndGetPropertiesVector(op, new StructuralPropertiesVector(pp2, null));
IPartitioningRequirementsCoordinator prc;
switch(kind) {
case INNER:
{
prc = IPartitioningRequirementsCoordinator.EQCLASS_PARTITIONING_COORDINATOR;
break;
}
case LEFT_OUTER:
{
prc = new IPartitioningRequirementsCoordinator() {
@Override
public Pair<Boolean, IPartitioningProperty> coordinateRequirements(IPartitioningProperty requirements, IPartitioningProperty firstDeliveredPartitioning, ILogicalOperator op, IOptimizationContext context) throws AlgebricksException {
if (firstDeliveredPartitioning != null && requirements != null && firstDeliveredPartitioning.getPartitioningType() == requirements.getPartitioningType()) {
switch(requirements.getPartitioningType()) {
case UNORDERED_PARTITIONED:
{
UnorderedPartitionedProperty upp1 = (UnorderedPartitionedProperty) firstDeliveredPartitioning;
Set<LogicalVariable> set1 = upp1.getColumnSet();
UnorderedPartitionedProperty uppreq = (UnorderedPartitionedProperty) requirements;
Set<LogicalVariable> modifuppreq = new ListSet<LogicalVariable>();
Map<LogicalVariable, EquivalenceClass> eqmap = context.getEquivalenceClassMap(op);
Set<LogicalVariable> covered = new ListSet<LogicalVariable>();
Set<LogicalVariable> keysCurrent = uppreq.getColumnSet();
List<LogicalVariable> keysFirst = (keysRightBranch.containsAll(keysCurrent)) ? keysRightBranch : keysLeftBranch;
List<LogicalVariable> keysSecond = keysFirst == keysRightBranch ? keysLeftBranch : keysRightBranch;
for (LogicalVariable r : uppreq.getColumnSet()) {
EquivalenceClass ecSnd = eqmap.get(r);
boolean found = false;
int j = 0;
for (LogicalVariable rvar : keysFirst) {
if (rvar == r || ecSnd != null && eqmap.get(rvar) == ecSnd) {
found = true;
break;
}
j++;
}
if (!found) {
throw new IllegalStateException("Did not find a variable equivalent to " + r + " among " + keysFirst);
}
LogicalVariable v2 = keysSecond.get(j);
EquivalenceClass ecFst = eqmap.get(v2);
for (LogicalVariable vset1 : set1) {
if (vset1 == v2 || ecFst != null && eqmap.get(vset1) == ecFst) {
covered.add(vset1);
modifuppreq.add(r);
break;
}
}
if (covered.equals(set1)) {
break;
}
}
if (!covered.equals(set1)) {
throw new AlgebricksException("Could not modify " + requirements + " to agree with partitioning property " + firstDeliveredPartitioning + " delivered by previous input operator.");
}
UnorderedPartitionedProperty upp2 = new UnorderedPartitionedProperty(modifuppreq, requirements.getNodeDomain());
return new Pair<>(false, upp2);
}
case ORDERED_PARTITIONED:
{
throw new NotImplementedException();
}
}
}
return new Pair<>(true, requirements);
}
};
break;
}
default:
{
throw new IllegalStateException();
}
}
return new PhysicalRequirements(pv, prc);
}
use of org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext in project asterixdb by apache.
the class IntroduceSelectAccessMethodRule method intersectAllSecondaryIndexes.
/**
* Construct all applicable secondary index-based access paths in the given selection plan and
* intersect them using INTERSECT operator to guide to the common primary index search.
* In case where the applicable index is one, we only construct one path.
*/
private boolean intersectAllSecondaryIndexes(List<Pair<IAccessMethod, Index>> chosenIndexes, Map<IAccessMethod, AccessMethodAnalysisContext> analyzedAMs, IOptimizationContext context) throws AlgebricksException {
Pair<IAccessMethod, Index> chosenIndex = null;
Optional<Pair<IAccessMethod, Index>> primaryIndex = chosenIndexes.stream().filter(pair -> pair.second.isPrimaryIndex()).findFirst();
if (chosenIndexes.size() == 1) {
chosenIndex = chosenIndexes.get(0);
} else if (primaryIndex.isPresent()) {
// one primary + secondary indexes, choose the primary index directly.
chosenIndex = primaryIndex.get();
}
if (chosenIndex != null) {
AccessMethodAnalysisContext analysisCtx = analyzedAMs.get(chosenIndex.first);
return chosenIndex.first.applySelectPlanTransformation(afterSelectRefs, selectRef, subTree, chosenIndex.second, analysisCtx, context);
}
// Intersect all secondary indexes, and postpone the primary index search.
Mutable<ILogicalExpression> conditionRef = selectOp.getCondition();
List<ILogicalOperator> subRoots = new ArrayList<>();
for (Pair<IAccessMethod, Index> pair : chosenIndexes) {
AccessMethodAnalysisContext analysisCtx = analyzedAMs.get(pair.first);
subRoots.add(pair.first.createSecondaryToPrimaryPlan(conditionRef, subTree, null, pair.second, analysisCtx, AccessMethodUtils.retainInputs(subTree.getDataSourceVariables(), subTree.getDataSourceRef().getValue(), afterSelectRefs), false, subTree.getDataSourceRef().getValue().getInputs().get(0).getValue().getExecutionMode() == ExecutionMode.UNPARTITIONED, context));
}
// Connect each secondary index utilization plan to a common intersect operator.
ILogicalOperator primaryUnnestOp = connectAll2ndarySearchPlanWithIntersect(subRoots, context);
subTree.getDataSourceRef().setValue(primaryUnnestOp);
return primaryUnnestOp != null;
}
use of org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext in project asterixdb by apache.
the class HeuristicCompilerFactoryBuilder method create.
@Override
public ICompilerFactory create() {
return new ICompilerFactory() {
@Override
public ICompiler createCompiler(final ILogicalPlan plan, final IMetadataProvider<?, ?> metadata, int varCounter) {
final IOptimizationContext oc = optCtxFactory.createOptimizationContext(varCounter, expressionEvalSizeComputer, mergeAggregationExpressionFactory, expressionTypeComputer, missableTypeComputer, conflictingTypeResolver, physicalOptimizationConfig, clusterLocations);
oc.setMetadataDeclarations(metadata);
final HeuristicOptimizer opt = new HeuristicOptimizer(plan, logicalRewrites, physicalRewrites, oc);
return new ICompiler() {
@Override
public void optimize() throws AlgebricksException {
opt.optimize();
}
@Override
public JobSpecification createJob(Object appContext, IJobletEventListenerFactory jobEventListenerFactory) throws AlgebricksException {
AlgebricksConfig.ALGEBRICKS_LOGGER.fine("Starting Job Generation.\n");
JobGenContext context = new JobGenContext(null, metadata, appContext, serializerDeserializerProvider, hashFunctionFactoryProvider, hashFunctionFamilyProvider, comparatorFactoryProvider, typeTraitProvider, binaryBooleanInspectorFactory, binaryIntegerInspectorFactory, printerProvider, missingWriterFactory, normalizedKeyComputerFactoryProvider, expressionRuntimeProvider, expressionTypeComputer, oc, expressionEvalSizeComputer, partialAggregationTypeComputer, predEvaluatorFactoryProvider, physicalOptimizationConfig.getFrameSize(), clusterLocations);
PlanCompiler pc = new PlanCompiler(context);
return pc.compilePlan(plan, null, jobEventListenerFactory);
}
};
}
};
}
Aggregations