use of org.apache.beam.repackaged.core.org.apache.commons.lang3.mutable.MutableObject in project asterixdb by apache.
the class ResolveVariableRule method resolveAsFieldAccess.
// Resolves a "resolve" function call as a field access.
private void resolveAsFieldAccess(Mutable<ILogicalExpression> funcRef, Pair<ILogicalExpression, List<String>> varAndPath) {
// Rewrites to field-access-by-names.
ILogicalExpression expr = varAndPath.first;
List<String> path = varAndPath.second;
Mutable<ILogicalExpression> firstArgRef = new MutableObject<>(expr);
ILogicalExpression newFunc = null;
for (String fieldName : path) {
List<Mutable<ILogicalExpression>> args = new ArrayList<>();
args.add(firstArgRef);
args.add(new MutableObject<>(new ConstantExpression(new AsterixConstantValue(new AString(fieldName)))));
newFunc = new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.FIELD_ACCESS_BY_NAME), args);
firstArgRef = new MutableObject<>(newFunc);
}
funcRef.setValue(newFunc);
}
use of org.apache.beam.repackaged.core.org.apache.commons.lang3.mutable.MutableObject in project asterixdb by apache.
the class AccessMethodUtils method addStringArg.
private static void addStringArg(String argument, List<Mutable<ILogicalExpression>> funcArgs) {
Mutable<ILogicalExpression> stringRef = new MutableObject<>(new ConstantExpression(new AsterixConstantValue(new AString(argument))));
funcArgs.add(stringRef);
}
use of org.apache.beam.repackaged.core.org.apache.commons.lang3.mutable.MutableObject in project asterixdb by apache.
the class InvertedIndexAccessMethod method copyAndReinitProbeSubTree.
/**
* Copies the probeSubTree (using new variables), and reinitializes the probeSubTree to it.
* Accordingly replaces the variables in the given joinCond, and the optFuncExpr.
* Returns a reference to the original plan root.
*/
private Mutable<ILogicalOperator> copyAndReinitProbeSubTree(OptimizableOperatorSubTree probeSubTree, ILogicalExpression joinCond, IOptimizableFuncExpr optFuncExpr, List<LogicalVariable> originalSubTreePKs, List<LogicalVariable> surrogateSubTreePKs, IOptimizationContext context) throws AlgebricksException {
probeSubTree.getPrimaryKeyVars(null, originalSubTreePKs);
// Create two copies of the original probe subtree.
// The first copy, which becomes the new probe subtree, will retain the primary-key and secondary-search key variables,
// but have all other variables replaced with new ones.
// The second copy, which will become an input to the top-level equi-join to resolve the surrogates,
// will have all primary-key and secondary-search keys replaced, but retains all other original variables.
// Variable replacement map for the first copy.
LinkedHashMap<LogicalVariable, LogicalVariable> newProbeSubTreeVarMap = new LinkedHashMap<>();
// Variable replacement map for the second copy.
LinkedHashMap<LogicalVariable, LogicalVariable> joinInputSubTreeVarMap = new LinkedHashMap<>();
// Init with all live vars.
List<LogicalVariable> liveVars = new ArrayList<LogicalVariable>();
VariableUtilities.getLiveVariables(probeSubTree.getRoot(), liveVars);
for (LogicalVariable var : liveVars) {
joinInputSubTreeVarMap.put(var, var);
}
// Fill variable replacement maps.
for (int i = 0; i < optFuncExpr.getNumLogicalVars(); i++) {
joinInputSubTreeVarMap.put(optFuncExpr.getLogicalVar(i), context.newVar());
newProbeSubTreeVarMap.put(optFuncExpr.getLogicalVar(i), optFuncExpr.getLogicalVar(i));
}
for (int i = 0; i < originalSubTreePKs.size(); i++) {
LogicalVariable newPKVar = context.newVar();
surrogateSubTreePKs.add(newPKVar);
joinInputSubTreeVarMap.put(originalSubTreePKs.get(i), newPKVar);
newProbeSubTreeVarMap.put(originalSubTreePKs.get(i), originalSubTreePKs.get(i));
}
// Create first copy.
LogicalOperatorDeepCopyWithNewVariablesVisitor firstDeepCopyVisitor = new LogicalOperatorDeepCopyWithNewVariablesVisitor(context, context, newProbeSubTreeVarMap);
ILogicalOperator newProbeSubTree = firstDeepCopyVisitor.deepCopy(probeSubTree.getRoot());
inferTypes(newProbeSubTree, context);
Mutable<ILogicalOperator> newProbeSubTreeRootRef = new MutableObject<ILogicalOperator>(newProbeSubTree);
// Create second copy.
LogicalOperatorDeepCopyWithNewVariablesVisitor secondDeepCopyVisitor = new LogicalOperatorDeepCopyWithNewVariablesVisitor(context, context, joinInputSubTreeVarMap);
ILogicalOperator joinInputSubTree = secondDeepCopyVisitor.deepCopy(probeSubTree.getRoot());
inferTypes(joinInputSubTree, context);
probeSubTree.getRootRef().setValue(joinInputSubTree);
// Remember the original probe subtree reference so we can return it.
Mutable<ILogicalOperator> originalProbeSubTreeRootRef = probeSubTree.getRootRef();
// Replace the original probe subtree with its copy.
Dataset origDataset = probeSubTree.getDataset();
ARecordType origRecordType = probeSubTree.getRecordType();
probeSubTree.initFromSubTree(newProbeSubTreeRootRef);
probeSubTree.setDataset(origDataset);
probeSubTree.setRecordType(origRecordType);
// Replace the variables in the join condition based on the mapping of variables
// in the new probe subtree.
Map<LogicalVariable, LogicalVariable> varMapping = firstDeepCopyVisitor.getInputToOutputVariableMapping();
for (Map.Entry<LogicalVariable, LogicalVariable> varMapEntry : varMapping.entrySet()) {
if (varMapEntry.getKey() != varMapEntry.getValue()) {
joinCond.substituteVar(varMapEntry.getKey(), varMapEntry.getValue());
}
}
return originalProbeSubTreeRootRef;
}
use of org.apache.beam.repackaged.core.org.apache.commons.lang3.mutable.MutableObject in project asterixdb by apache.
the class BTreeAccessMethod method createSecondaryToPrimaryPlan.
@Override
public ILogicalOperator createSecondaryToPrimaryPlan(Mutable<ILogicalExpression> conditionRef, OptimizableOperatorSubTree indexSubTree, OptimizableOperatorSubTree probeSubTree, Index chosenIndex, AccessMethodAnalysisContext analysisCtx, boolean retainInput, boolean retainNull, boolean requiresBroadcast, IOptimizationContext context) throws AlgebricksException {
Dataset dataset = indexSubTree.getDataset();
ARecordType recordType = indexSubTree.getRecordType();
ARecordType metaRecordType = indexSubTree.getMetaRecordType();
// we made sure indexSubTree has datasource scan
AbstractDataSourceOperator dataSourceOp = (AbstractDataSourceOperator) indexSubTree.getDataSourceRef().getValue();
List<Pair<Integer, Integer>> exprAndVarList = analysisCtx.getIndexExprsFromIndexExprsAndVars(chosenIndex);
int numSecondaryKeys = analysisCtx.getNumberOfMatchedKeys(chosenIndex);
// List of function expressions that will be replaced by the secondary-index search.
// These func exprs will be removed from the select condition at the very end of this method.
Set<ILogicalExpression> replacedFuncExprs = new HashSet<>();
// Info on high and low keys for the BTree search predicate.
ILogicalExpression[] lowKeyExprs = new ILogicalExpression[numSecondaryKeys];
ILogicalExpression[] highKeyExprs = new ILogicalExpression[numSecondaryKeys];
LimitType[] lowKeyLimits = new LimitType[numSecondaryKeys];
LimitType[] highKeyLimits = new LimitType[numSecondaryKeys];
boolean[] lowKeyInclusive = new boolean[numSecondaryKeys];
boolean[] highKeyInclusive = new boolean[numSecondaryKeys];
ILogicalExpression[] constantAtRuntimeExpressions = new ILogicalExpression[numSecondaryKeys];
LogicalVariable[] constAtRuntimeExprVars = new LogicalVariable[numSecondaryKeys];
/* TODO: For now we don't do any sophisticated analysis of the func exprs to come up with "the best" range
* predicate. If we can't figure out how to integrate a certain funcExpr into the current predicate,
* we just bail by setting this flag.*/
boolean couldntFigureOut = false;
boolean doneWithExprs = false;
boolean isEqCondition = false;
BitSet setLowKeys = new BitSet(numSecondaryKeys);
BitSet setHighKeys = new BitSet(numSecondaryKeys);
// Go through the func exprs listed as optimizable by the chosen index,
// and formulate a range predicate on the secondary-index keys.
// checks whether a type casting happened from a real (FLOAT, DOUBLE) value to an INT value
// since we have a round issues when dealing with LT(<) OR GT(>) operator.
boolean realTypeConvertedToIntegerType;
for (Pair<Integer, Integer> exprIndex : exprAndVarList) {
// Position of the field of matchedFuncExprs.get(exprIndex) in the chosen index's indexed exprs.
IOptimizableFuncExpr optFuncExpr = analysisCtx.getMatchedFuncExpr(exprIndex.first);
int keyPos = indexOf(optFuncExpr.getFieldName(0), chosenIndex.getKeyFieldNames());
if (keyPos < 0 && optFuncExpr.getNumLogicalVars() > 1) {
// If we are optimizing a join, the matching field may be the second field name.
keyPos = indexOf(optFuncExpr.getFieldName(1), chosenIndex.getKeyFieldNames());
}
if (keyPos < 0) {
throw CompilationException.create(ErrorCode.NO_INDEX_FIELD_NAME_FOR_GIVEN_FUNC_EXPR);
}
Pair<ILogicalExpression, Boolean> returnedSearchKeyExpr = AccessMethodUtils.createSearchKeyExpr(optFuncExpr, indexSubTree, probeSubTree);
ILogicalExpression searchKeyExpr = returnedSearchKeyExpr.first;
if (searchKeyExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
constantAtRuntimeExpressions[keyPos] = searchKeyExpr;
constAtRuntimeExprVars[keyPos] = context.newVar();
searchKeyExpr = new VariableReferenceExpression(constAtRuntimeExprVars[keyPos]);
}
realTypeConvertedToIntegerType = returnedSearchKeyExpr.second;
LimitType limit = getLimitType(optFuncExpr, probeSubTree);
//
if (realTypeConvertedToIntegerType) {
if (limit == LimitType.HIGH_EXCLUSIVE) {
limit = LimitType.HIGH_INCLUSIVE;
} else if (limit == LimitType.LOW_EXCLUSIVE) {
limit = LimitType.LOW_INCLUSIVE;
}
}
switch(limit) {
case EQUAL:
{
if (lowKeyLimits[keyPos] == null && highKeyLimits[keyPos] == null) {
lowKeyLimits[keyPos] = highKeyLimits[keyPos] = limit;
lowKeyInclusive[keyPos] = highKeyInclusive[keyPos] = true;
lowKeyExprs[keyPos] = highKeyExprs[keyPos] = searchKeyExpr;
setLowKeys.set(keyPos);
setHighKeys.set(keyPos);
isEqCondition = true;
} else {
// (once from analyzing each side of the join)
if (lowKeyLimits[keyPos] == limit && lowKeyInclusive[keyPos] == true && lowKeyExprs[keyPos].equals(searchKeyExpr) && highKeyLimits[keyPos] == limit && highKeyInclusive[keyPos] == true && highKeyExprs[keyPos].equals(searchKeyExpr)) {
isEqCondition = true;
break;
}
couldntFigureOut = true;
}
// If high and low keys are set, we exit for now.
if (setLowKeys.cardinality() == numSecondaryKeys && setHighKeys.cardinality() == numSecondaryKeys) {
doneWithExprs = true;
}
break;
}
case HIGH_EXCLUSIVE:
{
if (highKeyLimits[keyPos] == null || (highKeyLimits[keyPos] != null && highKeyInclusive[keyPos])) {
highKeyLimits[keyPos] = limit;
highKeyExprs[keyPos] = searchKeyExpr;
highKeyInclusive[keyPos] = false;
} else {
// (once from analyzing each side of the join)
if (highKeyLimits[keyPos] == limit && highKeyInclusive[keyPos] == false && highKeyExprs[keyPos].equals(searchKeyExpr)) {
break;
}
couldntFigureOut = true;
doneWithExprs = true;
}
break;
}
case HIGH_INCLUSIVE:
{
if (highKeyLimits[keyPos] == null) {
highKeyLimits[keyPos] = limit;
highKeyExprs[keyPos] = searchKeyExpr;
highKeyInclusive[keyPos] = true;
} else {
// (once from analyzing each side of the join)
if (highKeyLimits[keyPos] == limit && highKeyInclusive[keyPos] == true && highKeyExprs[keyPos].equals(searchKeyExpr)) {
break;
}
couldntFigureOut = true;
doneWithExprs = true;
}
break;
}
case LOW_EXCLUSIVE:
{
if (lowKeyLimits[keyPos] == null || (lowKeyLimits[keyPos] != null && lowKeyInclusive[keyPos])) {
lowKeyLimits[keyPos] = limit;
lowKeyExprs[keyPos] = searchKeyExpr;
lowKeyInclusive[keyPos] = false;
} else {
// (once from analyzing each side of the join)
if (lowKeyLimits[keyPos] == limit && lowKeyInclusive[keyPos] == false && lowKeyExprs[keyPos].equals(searchKeyExpr)) {
break;
}
couldntFigureOut = true;
doneWithExprs = true;
}
break;
}
case LOW_INCLUSIVE:
{
if (lowKeyLimits[keyPos] == null) {
lowKeyLimits[keyPos] = limit;
lowKeyExprs[keyPos] = searchKeyExpr;
lowKeyInclusive[keyPos] = true;
} else {
// (once from analyzing each side of the join)
if (lowKeyLimits[keyPos] == limit && lowKeyInclusive[keyPos] == true && lowKeyExprs[keyPos].equals(searchKeyExpr)) {
break;
}
couldntFigureOut = true;
doneWithExprs = true;
}
break;
}
default:
{
throw new IllegalStateException();
}
}
if (!couldntFigureOut) {
// Remember to remove this funcExpr later.
replacedFuncExprs.add(analysisCtx.getMatchedFuncExpr(exprIndex.first).getFuncExpr());
}
if (doneWithExprs) {
break;
}
}
if (couldntFigureOut) {
return null;
}
// If the select condition contains mixed open/closed intervals on multiple keys, then we make all intervals
// closed to obtain a superset of answers and leave the original selection in place.
boolean primaryIndexPostProccessingIsNeeded = false;
for (int i = 1; i < numSecondaryKeys; ++i) {
if (lowKeyInclusive[i] != lowKeyInclusive[0]) {
Arrays.fill(lowKeyInclusive, true);
primaryIndexPostProccessingIsNeeded = true;
break;
}
}
for (int i = 1; i < numSecondaryKeys; ++i) {
if (highKeyInclusive[i] != highKeyInclusive[0]) {
Arrays.fill(highKeyInclusive, true);
primaryIndexPostProccessingIsNeeded = true;
break;
}
}
// determine cases when prefix search could be applied
for (int i = 1; i < lowKeyExprs.length; i++) {
if (lowKeyLimits[0] == null && lowKeyLimits[i] != null || lowKeyLimits[0] != null && lowKeyLimits[i] == null || highKeyLimits[0] == null && highKeyLimits[i] != null || highKeyLimits[0] != null && highKeyLimits[i] == null) {
numSecondaryKeys--;
primaryIndexPostProccessingIsNeeded = true;
}
}
if (lowKeyLimits[0] == null) {
lowKeyInclusive[0] = true;
}
if (highKeyLimits[0] == null) {
highKeyInclusive[0] = true;
}
// Here we generate vars and funcs for assigning the secondary-index keys to be fed into the secondary-index
// search.
// List of variables for the assign.
ArrayList<LogicalVariable> keyVarList = new ArrayList<>();
// List of variables and expressions for the assign.
ArrayList<LogicalVariable> assignKeyVarList = new ArrayList<>();
ArrayList<Mutable<ILogicalExpression>> assignKeyExprList = new ArrayList<>();
int numLowKeys = createKeyVarsAndExprs(numSecondaryKeys, lowKeyLimits, lowKeyExprs, assignKeyVarList, assignKeyExprList, keyVarList, context, constantAtRuntimeExpressions, constAtRuntimeExprVars);
int numHighKeys = createKeyVarsAndExprs(numSecondaryKeys, highKeyLimits, highKeyExprs, assignKeyVarList, assignKeyExprList, keyVarList, context, constantAtRuntimeExpressions, constAtRuntimeExprVars);
BTreeJobGenParams jobGenParams = new BTreeJobGenParams(chosenIndex.getIndexName(), IndexType.BTREE, dataset.getDataverseName(), dataset.getDatasetName(), retainInput, requiresBroadcast);
jobGenParams.setLowKeyInclusive(lowKeyInclusive[0]);
jobGenParams.setHighKeyInclusive(highKeyInclusive[0]);
jobGenParams.setIsEqCondition(isEqCondition);
jobGenParams.setLowKeyVarList(keyVarList, 0, numLowKeys);
jobGenParams.setHighKeyVarList(keyVarList, numLowKeys, numHighKeys);
ILogicalOperator inputOp = null;
if (!assignKeyVarList.isEmpty()) {
// Assign operator that sets the constant secondary-index search-key fields if necessary.
AssignOperator assignConstantSearchKeys = new AssignOperator(assignKeyVarList, assignKeyExprList);
// Input to this assign is the EmptyTupleSource (which the dataSourceScan also must have had as input).
assignConstantSearchKeys.getInputs().add(new MutableObject<>(OperatorManipulationUtil.deepCopy(dataSourceOp.getInputs().get(0).getValue())));
assignConstantSearchKeys.setExecutionMode(dataSourceOp.getExecutionMode());
inputOp = assignConstantSearchKeys;
} else if (probeSubTree == null) {
//nonpure case
//Make sure that the nonpure function is unpartitioned
ILogicalOperator checkOp = dataSourceOp.getInputs().get(0).getValue();
while (checkOp.getExecutionMode() != ExecutionMode.UNPARTITIONED) {
if (checkOp.getInputs().size() == 1) {
checkOp = checkOp.getInputs().get(0).getValue();
} else {
return null;
}
}
inputOp = dataSourceOp.getInputs().get(0).getValue();
} else {
// All index search keys are variables.
inputOp = probeSubTree.getRoot();
}
ILogicalOperator secondaryIndexUnnestOp = AccessMethodUtils.createSecondaryIndexUnnestMap(dataset, recordType, metaRecordType, chosenIndex, inputOp, jobGenParams, context, false, retainInput, retainNull);
// Generate the rest of the upstream plan which feeds the search results into the primary index.
AbstractUnnestMapOperator primaryIndexUnnestOp = null;
boolean isPrimaryIndex = chosenIndex.isPrimaryIndex();
if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
// External dataset
UnnestMapOperator externalDataAccessOp = AccessMethodUtils.createExternalDataLookupUnnestMap(dataSourceOp, dataset, recordType, secondaryIndexUnnestOp, context, retainInput, retainNull);
indexSubTree.getDataSourceRef().setValue(externalDataAccessOp);
return externalDataAccessOp;
} else if (!isPrimaryIndex) {
primaryIndexUnnestOp = AccessMethodUtils.createPrimaryIndexUnnestMap(dataSourceOp, dataset, recordType, metaRecordType, secondaryIndexUnnestOp, context, true, retainInput, retainNull, false);
// Adds equivalence classes --- one equivalent class between a primary key
// variable and a record field-access expression.
EquivalenceClassUtils.addEquivalenceClassesForPrimaryIndexAccess(primaryIndexUnnestOp, dataSourceOp.getVariables(), recordType, metaRecordType, dataset, context);
} else {
List<Object> primaryIndexOutputTypes = new ArrayList<>();
AccessMethodUtils.appendPrimaryIndexTypes(dataset, recordType, metaRecordType, primaryIndexOutputTypes);
List<LogicalVariable> scanVariables = dataSourceOp.getVariables();
// If not, we create a new condition based on remaining ones.
if (!primaryIndexPostProccessingIsNeeded) {
List<Mutable<ILogicalExpression>> remainingFuncExprs = new ArrayList<>();
try {
getNewConditionExprs(conditionRef, replacedFuncExprs, remainingFuncExprs);
} catch (CompilationException e) {
return null;
}
// Generate new condition.
if (!remainingFuncExprs.isEmpty()) {
ILogicalExpression pulledCond = createSelectCondition(remainingFuncExprs);
conditionRef.setValue(pulledCond);
} else {
conditionRef.setValue(null);
}
}
// Checks whether LEFT_OUTER_UNNESTMAP operator is required.
boolean leftOuterUnnestMapRequired = false;
if (retainNull && retainInput) {
leftOuterUnnestMapRequired = true;
} else {
leftOuterUnnestMapRequired = false;
}
if (conditionRef.getValue() != null) {
// The job gen parameters are transferred to the actual job gen
// via the UnnestMapOperator's function arguments.
List<Mutable<ILogicalExpression>> primaryIndexFuncArgs = new ArrayList<>();
jobGenParams.writeToFuncArgs(primaryIndexFuncArgs);
// An index search is expressed as an unnest-map over an
// index-search function.
IFunctionInfo primaryIndexSearch = FunctionUtil.getFunctionInfo(BuiltinFunctions.INDEX_SEARCH);
UnnestingFunctionCallExpression primaryIndexSearchFunc = new UnnestingFunctionCallExpression(primaryIndexSearch, primaryIndexFuncArgs);
primaryIndexSearchFunc.setReturnsUniqueValues(true);
if (!leftOuterUnnestMapRequired) {
primaryIndexUnnestOp = new UnnestMapOperator(scanVariables, new MutableObject<ILogicalExpression>(primaryIndexSearchFunc), primaryIndexOutputTypes, retainInput);
} else {
primaryIndexUnnestOp = new LeftOuterUnnestMapOperator(scanVariables, new MutableObject<ILogicalExpression>(primaryIndexSearchFunc), primaryIndexOutputTypes, true);
}
} else {
if (!leftOuterUnnestMapRequired) {
primaryIndexUnnestOp = new UnnestMapOperator(scanVariables, ((UnnestMapOperator) secondaryIndexUnnestOp).getExpressionRef(), primaryIndexOutputTypes, retainInput);
} else {
primaryIndexUnnestOp = new LeftOuterUnnestMapOperator(scanVariables, ((LeftOuterUnnestMapOperator) secondaryIndexUnnestOp).getExpressionRef(), primaryIndexOutputTypes, true);
}
}
primaryIndexUnnestOp.getInputs().add(new MutableObject<>(inputOp));
// Adds equivalence classes --- one equivalent class between a primary key
// variable and a record field-access expression.
EquivalenceClassUtils.addEquivalenceClassesForPrimaryIndexAccess(primaryIndexUnnestOp, scanVariables, recordType, metaRecordType, dataset, context);
}
return primaryIndexUnnestOp;
}
use of org.apache.beam.repackaged.core.org.apache.commons.lang3.mutable.MutableObject in project asterixdb by apache.
the class InvertedIndexAccessMethod method createPanicNestedLoopJoinPlan.
private Mutable<ILogicalOperator> createPanicNestedLoopJoinPlan(Mutable<ILogicalOperator> joinRef, OptimizableOperatorSubTree indexSubTree, OptimizableOperatorSubTree probeSubTree, IOptimizableFuncExpr optFuncExpr, Index chosenIndex, Map<LogicalVariable, LogicalVariable> panicVarMap, IOptimizationContext context) throws AlgebricksException {
LogicalVariable inputSearchVar = getInputSearchVar(optFuncExpr, indexSubTree);
// We split the plan into two "branches", and add selections on each side.
AbstractLogicalOperator replicateOp = new ReplicateOperator(2);
replicateOp.getInputs().add(new MutableObject<ILogicalOperator>(probeSubTree.getRoot()));
replicateOp.setExecutionMode(ExecutionMode.PARTITIONED);
context.computeAndSetTypeEnvironmentForOperator(replicateOp);
// Create select ops for removing tuples that are filterable and not filterable, respectively.
IVariableTypeEnvironment probeTypeEnv = context.getOutputTypeEnvironment(probeSubTree.getRoot());
IAType inputSearchVarType;
if (chosenIndex.isEnforcingKeyFileds()) {
inputSearchVarType = optFuncExpr.getFieldType(optFuncExpr.findLogicalVar(inputSearchVar));
} else {
inputSearchVarType = (IAType) probeTypeEnv.getVarType(inputSearchVar);
}
Mutable<ILogicalOperator> isFilterableSelectOpRef = new MutableObject<ILogicalOperator>();
Mutable<ILogicalOperator> isNotFilterableSelectOpRef = new MutableObject<ILogicalOperator>();
createIsFilterableSelectOps(replicateOp, inputSearchVar, inputSearchVarType, optFuncExpr, chosenIndex, context, isFilterableSelectOpRef, isNotFilterableSelectOpRef);
List<LogicalVariable> originalLiveVars = new ArrayList<LogicalVariable>();
VariableUtilities.getLiveVariables(indexSubTree.getRoot(), originalLiveVars);
// Copy the scan subtree in indexSubTree.
LogicalOperatorDeepCopyWithNewVariablesVisitor deepCopyVisitor = new LogicalOperatorDeepCopyWithNewVariablesVisitor(context, context);
ILogicalOperator scanSubTree = deepCopyVisitor.deepCopy(indexSubTree.getRoot());
Map<LogicalVariable, LogicalVariable> copyVarMap = deepCopyVisitor.getInputToOutputVariableMapping();
panicVarMap.putAll(copyVarMap);
List<LogicalVariable> copyLiveVars = new ArrayList<LogicalVariable>();
VariableUtilities.getLiveVariables(scanSubTree, copyLiveVars);
// Replace the inputs of the given join op, and replace variables in its
// condition since we deep-copied one of the scanner subtrees which
// changed variables.
AbstractBinaryJoinOperator joinOp = (AbstractBinaryJoinOperator) joinRef.getValue();
for (Map.Entry<LogicalVariable, LogicalVariable> entry : copyVarMap.entrySet()) {
joinOp.getCondition().getValue().substituteVar(entry.getKey(), entry.getValue());
}
joinOp.getInputs().clear();
joinOp.getInputs().add(new MutableObject<ILogicalOperator>(scanSubTree));
// Make sure that the build input (which may be materialized causing blocking) comes from
// the split+select, otherwise the plan will have a deadlock.
joinOp.getInputs().add(isNotFilterableSelectOpRef);
context.computeAndSetTypeEnvironmentForOperator(joinOp);
// Return the new root of the probeSubTree.
return isFilterableSelectOpRef;
}
Aggregations