use of org.apache.beam.repackaged.core.org.apache.commons.lang3.mutable.MutableObject in project asterixdb by apache.
the class LangExpressionToPlanTranslator method constructSubplanOperatorForBranch.
/**
* Constructs a subplan operator for a branch in a if-else (or case) expression.
*
* @param inputOp,
* the input operator.
* @param selectExpr,
* the expression to select tuples that are processed by this branch.
* @param branchExpression,
* the expression to be evaluated in this branch.
* @return a pair of the constructed subplan operator and the output variable for the branch.
* @throws CompilationException
*/
protected Pair<ILogicalOperator, LogicalVariable> constructSubplanOperatorForBranch(ILogicalOperator inputOp, Mutable<ILogicalExpression> selectExpr, Expression branchExpression) throws CompilationException {
context.enterSubplan();
SubplanOperator subplanOp = new SubplanOperator();
subplanOp.getInputs().add(new MutableObject<>(inputOp));
Mutable<ILogicalOperator> nestedSource = new MutableObject<>(new NestedTupleSourceOperator(new MutableObject<>(subplanOp)));
SelectOperator select = new SelectOperator(selectExpr, false, null);
// The select operator cannot be moved up and down, otherwise it will cause typing issues (ASTERIXDB-1203).
OperatorPropertiesUtil.markMovable(select, false);
select.getInputs().add(nestedSource);
Pair<ILogicalOperator, LogicalVariable> pBranch = branchExpression.accept(this, new MutableObject<>(select));
LogicalVariable branchVar = context.newVar();
AggregateOperator aggOp = new AggregateOperator(Collections.singletonList(branchVar), Collections.singletonList(new MutableObject<>(new AggregateFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.LISTIFY), false, Collections.singletonList(new MutableObject<>(new VariableReferenceExpression(pBranch.second)))))));
aggOp.getInputs().add(new MutableObject<>(pBranch.first));
ILogicalPlan planForBranch = new ALogicalPlanImpl(new MutableObject<>(aggOp));
subplanOp.getNestedPlans().add(planForBranch);
context.exitSubplan();
return new Pair<>(subplanOp, branchVar);
}
use of org.apache.beam.repackaged.core.org.apache.commons.lang3.mutable.MutableObject in project asterixdb by apache.
the class LangExpressionToPlanTranslator method translateSubscribeFeed.
private ILogicalOperator translateSubscribeFeed(CompiledSubscribeFeedStatement sfs, DatasetDataSource targetDatasource, LogicalVariable unnestVar, ILogicalOperator topOp, ArrayList<Mutable<ILogicalExpression>> exprs, LogicalVariable resVar, List<Mutable<ILogicalExpression>> varRefsForLoading, Mutable<ILogicalExpression> varRef, ILogicalOperator assign, List<String> additionalFilteringField, AssignOperator additionalFilteringAssign, List<Mutable<ILogicalExpression>> additionalFilteringExpressions) throws AlgebricksException {
// if the feed is a change feed (i.e, performs different operations), we need to project op variable
InsertDeleteUpsertOperator feedModificationOp;
AssignOperator metaAndKeysAssign;
List<LogicalVariable> metaAndKeysVars = null;
List<Mutable<ILogicalExpression>> metaAndKeysExprs = null;
List<Mutable<ILogicalExpression>> metaExpSingletonList = null;
Feed feed = metadataProvider.findFeed(sfs.getDataverseName(), sfs.getFeedName());
boolean isChangeFeed = ExternalDataUtils.isChangeFeed(feed.getAdapterConfiguration());
boolean isUpsertFeed = ExternalDataUtils.isUpsertFeed(feed.getAdapterConfiguration());
ProjectOperator project = (ProjectOperator) topOp;
if (targetDatasource.getDataset().hasMetaPart() || isChangeFeed) {
metaAndKeysVars = new ArrayList<>();
metaAndKeysExprs = new ArrayList<>();
if (targetDatasource.getDataset().hasMetaPart()) {
// add the meta function
IFunctionInfo finfoMeta = FunctionUtil.getFunctionInfo(BuiltinFunctions.META);
ScalarFunctionCallExpression metaFunction = new ScalarFunctionCallExpression(finfoMeta, new MutableObject<>(new VariableReferenceExpression(unnestVar)));
// create assign for the meta part
LogicalVariable metaVar = context.newVar();
metaExpSingletonList = new ArrayList<>(1);
metaExpSingletonList.add(new MutableObject<>(new VariableReferenceExpression(metaVar)));
metaAndKeysVars.add(metaVar);
metaAndKeysExprs.add(new MutableObject<>(metaFunction));
project.getVariables().add(metaVar);
}
}
if (isChangeFeed) {
varRefsForLoading.clear();
for (Mutable<ILogicalExpression> assignExpr : exprs) {
if (assignExpr.getValue().getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
AbstractFunctionCallExpression funcCall = (AbstractFunctionCallExpression) assignExpr.getValue();
funcCall.substituteVar(resVar, unnestVar);
LogicalVariable pkVar = context.newVar();
metaAndKeysVars.add(pkVar);
metaAndKeysExprs.add(new MutableObject<>(assignExpr.getValue()));
project.getVariables().add(pkVar);
varRefsForLoading.add(new MutableObject<>(new VariableReferenceExpression(pkVar)));
}
}
// A change feed, we don't need the assign to access PKs
feedModificationOp = new InsertDeleteUpsertOperator(targetDatasource, varRef, varRefsForLoading, metaExpSingletonList, InsertDeleteUpsertOperator.Kind.UPSERT, false);
// Create and add a new variable used for representing the original record
feedModificationOp.setPrevRecordVar(context.newVar());
feedModificationOp.setPrevRecordType(targetDatasource.getItemType());
if (targetDatasource.getDataset().hasMetaPart()) {
List<LogicalVariable> metaVars = new ArrayList<>();
metaVars.add(context.newVar());
feedModificationOp.setPrevAdditionalNonFilteringVars(metaVars);
List<Object> metaTypes = new ArrayList<>();
metaTypes.add(targetDatasource.getMetaItemType());
feedModificationOp.setPrevAdditionalNonFilteringTypes(metaTypes);
}
if (additionalFilteringField != null) {
feedModificationOp.setPrevFilterVar(context.newVar());
feedModificationOp.setPrevFilterType(((ARecordType) targetDatasource.getItemType()).getFieldType(additionalFilteringField.get(0)));
additionalFilteringAssign.getInputs().clear();
additionalFilteringAssign.getInputs().add(assign.getInputs().get(0));
feedModificationOp.getInputs().add(new MutableObject<>(additionalFilteringAssign));
} else {
feedModificationOp.getInputs().add(assign.getInputs().get(0));
}
} else {
final InsertDeleteUpsertOperator.Kind opKind = isUpsertFeed ? InsertDeleteUpsertOperator.Kind.UPSERT : InsertDeleteUpsertOperator.Kind.INSERT;
feedModificationOp = new InsertDeleteUpsertOperator(targetDatasource, varRef, varRefsForLoading, metaExpSingletonList, opKind, false);
if (isUpsertFeed) {
feedModificationOp.setPrevRecordVar(context.newVar());
feedModificationOp.setPrevRecordType(targetDatasource.getItemType());
}
feedModificationOp.getInputs().add(new MutableObject<>(assign));
}
if (targetDatasource.getDataset().hasMetaPart() || isChangeFeed) {
metaAndKeysAssign = new AssignOperator(metaAndKeysVars, metaAndKeysExprs);
metaAndKeysAssign.getInputs().add(topOp.getInputs().get(0));
topOp.getInputs().set(0, new MutableObject<>(metaAndKeysAssign));
}
feedModificationOp.setAdditionalFilteringExpressions(additionalFilteringExpressions);
ILogicalOperator leafOperator = new DelegateOperator(new CommitOperator(true));
leafOperator.getInputs().add(new MutableObject<>(feedModificationOp));
return leafOperator;
}
use of org.apache.beam.repackaged.core.org.apache.commons.lang3.mutable.MutableObject in project asterixdb by apache.
the class AqlExpressionToPlanTranslator method visit.
@Override
public Pair<ILogicalOperator, LogicalVariable> visit(ForClause fc, Mutable<ILogicalOperator> tupSource) throws CompilationException {
LogicalVariable v = context.newVarFromExpression(fc.getVarExpr());
Expression inExpr = fc.getInExpr();
Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = langExprToAlgExpression(inExpr, tupSource);
ILogicalOperator returnedOp;
if (fc.getPosVarExpr() == null) {
returnedOp = new UnnestOperator(v, new MutableObject<ILogicalExpression>(makeUnnestExpression(eo.first)));
} else {
LogicalVariable pVar = context.newVarFromExpression(fc.getPosVarExpr());
// We set the positional variable type as INT64 type.
returnedOp = new UnnestOperator(v, new MutableObject<ILogicalExpression>(makeUnnestExpression(eo.first)), pVar, BuiltinType.AINT64, new PositionWriter());
}
returnedOp.getInputs().add(eo.second);
return new Pair<>(returnedOp, v);
}
use of org.apache.beam.repackaged.core.org.apache.commons.lang3.mutable.MutableObject in project asterixdb by apache.
the class AqlPlusExpressionToPlanTranslator method translate.
public ILogicalPlan translate(List<Clause> clauses) throws AlgebricksException, CompilationException {
if (clauses == null) {
return null;
}
Mutable<ILogicalOperator> opRef = new MutableObject<ILogicalOperator>(new EmptyTupleSourceOperator());
Pair<ILogicalOperator, LogicalVariable> p = null;
for (Clause c : clauses) {
p = c.accept(this, opRef);
opRef = new MutableObject<ILogicalOperator>(p.first);
}
ArrayList<Mutable<ILogicalOperator>> globalPlanRoots = new ArrayList<Mutable<ILogicalOperator>>();
ILogicalOperator topOp = p.first;
globalPlanRoots.add(new MutableObject<ILogicalOperator>(topOp));
ILogicalPlan plan = new ALogicalPlanImpl(globalPlanRoots);
return plan;
}
use of org.apache.beam.repackaged.core.org.apache.commons.lang3.mutable.MutableObject in project asterixdb by apache.
the class LangExpressionToPlanTranslator method translateLoad.
@Override
public ILogicalPlan translateLoad(ICompiledDmlStatement stmt) throws AlgebricksException {
CompiledLoadFromFileStatement clffs = (CompiledLoadFromFileStatement) stmt;
Dataset dataset = metadataProvider.findDataset(clffs.getDataverseName(), clffs.getDatasetName());
if (dataset == null) {
// This would never happen since we check for this in AqlTranslator
throw new AlgebricksException("Unable to load dataset " + clffs.getDatasetName() + " since it does not exist");
}
IAType itemType = metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
IAType metaItemType = metadataProvider.findType(dataset.getMetaItemTypeDataverseName(), dataset.getMetaItemTypeName());
DatasetDataSource targetDatasource = validateDatasetInfo(metadataProvider, stmt.getDataverseName(), stmt.getDatasetName());
List<List<String>> partitionKeys = targetDatasource.getDataset().getPrimaryKeys();
if (dataset.hasMetaPart()) {
throw new AlgebricksException(dataset.getDatasetName() + ": load dataset is not supported on Datasets with Meta records");
}
LoadableDataSource lds;
try {
lds = new LoadableDataSource(dataset, itemType, metaItemType, clffs.getAdapter(), clffs.getProperties());
} catch (IOException e) {
throw new AlgebricksException(e);
}
// etsOp is a dummy input operator used to keep the compiler happy. it
// could be removed but would result in
// the need to fix many rewrite rules that assume that datasourcescan
// operators always have input.
ILogicalOperator etsOp = new EmptyTupleSourceOperator();
// Add a logical variable for the record.
List<LogicalVariable> payloadVars = new ArrayList<>();
payloadVars.add(context.newVar());
// Create a scan operator and make the empty tuple source its input
DataSourceScanOperator dssOp = new DataSourceScanOperator(payloadVars, lds);
dssOp.getInputs().add(new MutableObject<>(etsOp));
ILogicalExpression payloadExpr = new VariableReferenceExpression(payloadVars.get(0));
Mutable<ILogicalExpression> payloadRef = new MutableObject<>(payloadExpr);
// Creating the assign to extract the PK out of the record
ArrayList<LogicalVariable> pkVars = new ArrayList<>();
ArrayList<Mutable<ILogicalExpression>> pkExprs = new ArrayList<>();
List<Mutable<ILogicalExpression>> varRefsForLoading = new ArrayList<>();
LogicalVariable payloadVar = payloadVars.get(0);
for (List<String> keyFieldName : partitionKeys) {
PlanTranslationUtil.prepareVarAndExpression(keyFieldName, payloadVar, pkVars, pkExprs, varRefsForLoading, context);
}
AssignOperator assign = new AssignOperator(pkVars, pkExprs);
assign.getInputs().add(new MutableObject<>(dssOp));
// If the input is pre-sorted, we set the ordering property explicitly in the assign
if (clffs.alreadySorted()) {
List<OrderColumn> orderColumns = new ArrayList<>();
for (int i = 0; i < pkVars.size(); ++i) {
orderColumns.add(new OrderColumn(pkVars.get(i), OrderKind.ASC));
}
assign.setExplicitOrderingProperty(new LocalOrderProperty(orderColumns));
}
List<String> additionalFilteringField = DatasetUtil.getFilterField(targetDatasource.getDataset());
List<LogicalVariable> additionalFilteringVars;
List<Mutable<ILogicalExpression>> additionalFilteringAssignExpressions;
List<Mutable<ILogicalExpression>> additionalFilteringExpressions = null;
AssignOperator additionalFilteringAssign = null;
if (additionalFilteringField != null) {
additionalFilteringVars = new ArrayList<>();
additionalFilteringAssignExpressions = new ArrayList<>();
additionalFilteringExpressions = new ArrayList<>();
PlanTranslationUtil.prepareVarAndExpression(additionalFilteringField, payloadVar, additionalFilteringVars, additionalFilteringAssignExpressions, additionalFilteringExpressions, context);
additionalFilteringAssign = new AssignOperator(additionalFilteringVars, additionalFilteringAssignExpressions);
}
InsertDeleteUpsertOperator insertOp = new InsertDeleteUpsertOperator(targetDatasource, payloadRef, varRefsForLoading, InsertDeleteUpsertOperator.Kind.INSERT, true);
insertOp.setAdditionalFilteringExpressions(additionalFilteringExpressions);
if (additionalFilteringAssign != null) {
additionalFilteringAssign.getInputs().add(new MutableObject<>(assign));
insertOp.getInputs().add(new MutableObject<>(additionalFilteringAssign));
} else {
insertOp.getInputs().add(new MutableObject<>(assign));
}
ILogicalOperator leafOperator = new SinkOperator();
leafOperator.getInputs().add(new MutableObject<>(insertOp));
return new ALogicalPlanImpl(new MutableObject<>(leafOperator));
}
Aggregations