use of org.apache.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl in project asterixdb by apache.
the class LangExpressionToPlanTranslator method translate.
public ILogicalPlan translate(Query expr, String outputDatasetName, ICompiledDmlStatement stmt, ILogicalOperator baseOp) throws AlgebricksException {
MutableObject<ILogicalOperator> base = new MutableObject<>(new EmptyTupleSourceOperator());
if (baseOp != null) {
base = new MutableObject<>(baseOp);
}
Pair<ILogicalOperator, LogicalVariable> p = expr.accept(this, base);
ArrayList<Mutable<ILogicalOperator>> globalPlanRoots = new ArrayList<>();
ILogicalOperator topOp = p.first;
List<LogicalVariable> liveVars = new ArrayList<>();
VariableUtilities.getLiveVariables(topOp, liveVars);
LogicalVariable unnestVar = liveVars.get(0);
LogicalVariable resVar = unnestVar;
if (outputDatasetName == null) {
FileSplit outputFileSplit = metadataProvider.getOutputFile();
if (outputFileSplit == null) {
outputFileSplit = getDefaultOutputFileLocation(metadataProvider.getApplicationContext());
}
metadataProvider.setOutputFile(outputFileSplit);
List<Mutable<ILogicalExpression>> writeExprList = new ArrayList<>(1);
writeExprList.add(new MutableObject<>(new VariableReferenceExpression(resVar)));
ResultSetSinkId rssId = new ResultSetSinkId(metadataProvider.getResultSetId());
ResultSetDataSink sink = new ResultSetDataSink(rssId, null);
DistributeResultOperator newTop = new DistributeResultOperator(writeExprList, sink);
newTop.getInputs().add(new MutableObject<>(topOp));
topOp = newTop;
// Retrieve the Output RecordType (if any) and store it on
// the DistributeResultOperator
IAType outputRecordType = metadataProvider.findOutputRecordType();
if (outputRecordType != null) {
topOp.getAnnotations().put("output-record-type", outputRecordType);
}
} else {
/**
* add the collection-to-sequence right before the project,
* because dataset only accept non-collection records
*/
LogicalVariable seqVar = context.newVar();
/**
* This assign adds a marker function collection-to-sequence: if the input is a singleton collection, unnest
* it; otherwise do nothing.
*/
AssignOperator assignCollectionToSequence = new AssignOperator(seqVar, new MutableObject<>(new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.COLLECTION_TO_SEQUENCE), new MutableObject<>(new VariableReferenceExpression(resVar)))));
assignCollectionToSequence.getInputs().add(new MutableObject<>(topOp.getInputs().get(0).getValue()));
topOp.getInputs().get(0).setValue(assignCollectionToSequence);
ProjectOperator projectOperator = (ProjectOperator) topOp;
projectOperator.getVariables().set(0, seqVar);
resVar = seqVar;
DatasetDataSource targetDatasource = validateDatasetInfo(metadataProvider, stmt.getDataverseName(), stmt.getDatasetName());
List<Integer> keySourceIndicator = ((InternalDatasetDetails) targetDatasource.getDataset().getDatasetDetails()).getKeySourceIndicator();
ArrayList<LogicalVariable> vars = new ArrayList<>();
ArrayList<Mutable<ILogicalExpression>> exprs = new ArrayList<>();
List<Mutable<ILogicalExpression>> varRefsForLoading = new ArrayList<>();
List<List<String>> partitionKeys = targetDatasource.getDataset().getPrimaryKeys();
int numOfPrimaryKeys = partitionKeys.size();
for (int i = 0; i < numOfPrimaryKeys; i++) {
if (keySourceIndicator == null || keySourceIndicator.get(i).intValue() == 0) {
// record part
PlanTranslationUtil.prepareVarAndExpression(partitionKeys.get(i), resVar, vars, exprs, varRefsForLoading, context);
} else {
// meta part
PlanTranslationUtil.prepareMetaKeyAccessExpression(partitionKeys.get(i), unnestVar, exprs, vars, varRefsForLoading, context);
}
}
AssignOperator assign = new AssignOperator(vars, exprs);
List<String> additionalFilteringField = DatasetUtil.getFilterField(targetDatasource.getDataset());
List<LogicalVariable> additionalFilteringVars;
List<Mutable<ILogicalExpression>> additionalFilteringAssignExpressions;
List<Mutable<ILogicalExpression>> additionalFilteringExpressions = null;
AssignOperator additionalFilteringAssign = null;
if (additionalFilteringField != null) {
additionalFilteringVars = new ArrayList<>();
additionalFilteringAssignExpressions = new ArrayList<>();
additionalFilteringExpressions = new ArrayList<>();
PlanTranslationUtil.prepareVarAndExpression(additionalFilteringField, resVar, additionalFilteringVars, additionalFilteringAssignExpressions, additionalFilteringExpressions, context);
additionalFilteringAssign = new AssignOperator(additionalFilteringVars, additionalFilteringAssignExpressions);
additionalFilteringAssign.getInputs().add(new MutableObject<>(topOp));
assign.getInputs().add(new MutableObject<>(additionalFilteringAssign));
} else {
assign.getInputs().add(new MutableObject<>(topOp));
}
Mutable<ILogicalExpression> varRef = new MutableObject<>(new VariableReferenceExpression(resVar));
ILogicalOperator leafOperator;
switch(stmt.getKind()) {
case Statement.Kind.INSERT:
leafOperator = translateInsert(targetDatasource, varRef, varRefsForLoading, additionalFilteringExpressions, assign, stmt);
break;
case Statement.Kind.UPSERT:
leafOperator = translateUpsert(targetDatasource, varRef, varRefsForLoading, additionalFilteringExpressions, assign, additionalFilteringField, unnestVar, topOp, exprs, resVar, additionalFilteringAssign, stmt);
break;
case Statement.Kind.DELETE:
leafOperator = translateDelete(targetDatasource, varRef, varRefsForLoading, additionalFilteringExpressions, assign);
break;
case Statement.Kind.CONNECT_FEED:
leafOperator = translateConnectFeed(targetDatasource, varRef, varRefsForLoading, additionalFilteringExpressions, assign);
break;
case Statement.Kind.SUBSCRIBE_FEED:
leafOperator = translateSubscribeFeed((CompiledSubscribeFeedStatement) stmt, targetDatasource, unnestVar, topOp, exprs, resVar, varRefsForLoading, varRef, assign, additionalFilteringField, additionalFilteringAssign, additionalFilteringExpressions);
break;
default:
throw new AlgebricksException("Unsupported statement kind " + stmt.getKind());
}
topOp = leafOperator;
}
globalPlanRoots.add(new MutableObject<>(topOp));
ILogicalPlan plan = new ALogicalPlanImpl(globalPlanRoots);
eliminateSharedOperatorReferenceForPlan(plan);
return plan;
}
use of org.apache.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl in project asterixdb by apache.
the class LangExpressionToPlanTranslator method constructSubplanOperatorForBranch.
/**
* Constructs a subplan operator for a branch in a if-else (or case) expression.
*
* @param inputOp,
* the input operator.
* @param selectExpr,
* the expression to select tuples that are processed by this branch.
* @param branchExpression,
* the expression to be evaluated in this branch.
* @return a pair of the constructed subplan operator and the output variable for the branch.
* @throws CompilationException
*/
protected Pair<ILogicalOperator, LogicalVariable> constructSubplanOperatorForBranch(ILogicalOperator inputOp, Mutable<ILogicalExpression> selectExpr, Expression branchExpression) throws CompilationException {
context.enterSubplan();
SubplanOperator subplanOp = new SubplanOperator();
subplanOp.getInputs().add(new MutableObject<>(inputOp));
Mutable<ILogicalOperator> nestedSource = new MutableObject<>(new NestedTupleSourceOperator(new MutableObject<>(subplanOp)));
SelectOperator select = new SelectOperator(selectExpr, false, null);
// The select operator cannot be moved up and down, otherwise it will cause typing issues (ASTERIXDB-1203).
OperatorPropertiesUtil.markMovable(select, false);
select.getInputs().add(nestedSource);
Pair<ILogicalOperator, LogicalVariable> pBranch = branchExpression.accept(this, new MutableObject<>(select));
LogicalVariable branchVar = context.newVar();
AggregateOperator aggOp = new AggregateOperator(Collections.singletonList(branchVar), Collections.singletonList(new MutableObject<>(new AggregateFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.LISTIFY), false, Collections.singletonList(new MutableObject<>(new VariableReferenceExpression(pBranch.second)))))));
aggOp.getInputs().add(new MutableObject<>(pBranch.first));
ILogicalPlan planForBranch = new ALogicalPlanImpl(new MutableObject<>(aggOp));
subplanOp.getNestedPlans().add(planForBranch);
context.exitSubplan();
return new Pair<>(subplanOp, branchVar);
}
use of org.apache.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl in project asterixdb by apache.
the class AqlPlusExpressionToPlanTranslator method translate.
public ILogicalPlan translate(List<Clause> clauses) throws AlgebricksException, CompilationException {
if (clauses == null) {
return null;
}
Mutable<ILogicalOperator> opRef = new MutableObject<ILogicalOperator>(new EmptyTupleSourceOperator());
Pair<ILogicalOperator, LogicalVariable> p = null;
for (Clause c : clauses) {
p = c.accept(this, opRef);
opRef = new MutableObject<ILogicalOperator>(p.first);
}
ArrayList<Mutable<ILogicalOperator>> globalPlanRoots = new ArrayList<Mutable<ILogicalOperator>>();
ILogicalOperator topOp = p.first;
globalPlanRoots.add(new MutableObject<ILogicalOperator>(topOp));
ILogicalPlan plan = new ALogicalPlanImpl(globalPlanRoots);
return plan;
}
use of org.apache.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl in project asterixdb by apache.
the class LangExpressionToPlanTranslator method translateLoad.
@Override
public ILogicalPlan translateLoad(ICompiledDmlStatement stmt) throws AlgebricksException {
CompiledLoadFromFileStatement clffs = (CompiledLoadFromFileStatement) stmt;
Dataset dataset = metadataProvider.findDataset(clffs.getDataverseName(), clffs.getDatasetName());
if (dataset == null) {
// This would never happen since we check for this in AqlTranslator
throw new AlgebricksException("Unable to load dataset " + clffs.getDatasetName() + " since it does not exist");
}
IAType itemType = metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
IAType metaItemType = metadataProvider.findType(dataset.getMetaItemTypeDataverseName(), dataset.getMetaItemTypeName());
DatasetDataSource targetDatasource = validateDatasetInfo(metadataProvider, stmt.getDataverseName(), stmt.getDatasetName());
List<List<String>> partitionKeys = targetDatasource.getDataset().getPrimaryKeys();
if (dataset.hasMetaPart()) {
throw new AlgebricksException(dataset.getDatasetName() + ": load dataset is not supported on Datasets with Meta records");
}
LoadableDataSource lds;
try {
lds = new LoadableDataSource(dataset, itemType, metaItemType, clffs.getAdapter(), clffs.getProperties());
} catch (IOException e) {
throw new AlgebricksException(e);
}
// etsOp is a dummy input operator used to keep the compiler happy. it
// could be removed but would result in
// the need to fix many rewrite rules that assume that datasourcescan
// operators always have input.
ILogicalOperator etsOp = new EmptyTupleSourceOperator();
// Add a logical variable for the record.
List<LogicalVariable> payloadVars = new ArrayList<>();
payloadVars.add(context.newVar());
// Create a scan operator and make the empty tuple source its input
DataSourceScanOperator dssOp = new DataSourceScanOperator(payloadVars, lds);
dssOp.getInputs().add(new MutableObject<>(etsOp));
ILogicalExpression payloadExpr = new VariableReferenceExpression(payloadVars.get(0));
Mutable<ILogicalExpression> payloadRef = new MutableObject<>(payloadExpr);
// Creating the assign to extract the PK out of the record
ArrayList<LogicalVariable> pkVars = new ArrayList<>();
ArrayList<Mutable<ILogicalExpression>> pkExprs = new ArrayList<>();
List<Mutable<ILogicalExpression>> varRefsForLoading = new ArrayList<>();
LogicalVariable payloadVar = payloadVars.get(0);
for (List<String> keyFieldName : partitionKeys) {
PlanTranslationUtil.prepareVarAndExpression(keyFieldName, payloadVar, pkVars, pkExprs, varRefsForLoading, context);
}
AssignOperator assign = new AssignOperator(pkVars, pkExprs);
assign.getInputs().add(new MutableObject<>(dssOp));
// If the input is pre-sorted, we set the ordering property explicitly in the assign
if (clffs.alreadySorted()) {
List<OrderColumn> orderColumns = new ArrayList<>();
for (int i = 0; i < pkVars.size(); ++i) {
orderColumns.add(new OrderColumn(pkVars.get(i), OrderKind.ASC));
}
assign.setExplicitOrderingProperty(new LocalOrderProperty(orderColumns));
}
List<String> additionalFilteringField = DatasetUtil.getFilterField(targetDatasource.getDataset());
List<LogicalVariable> additionalFilteringVars;
List<Mutable<ILogicalExpression>> additionalFilteringAssignExpressions;
List<Mutable<ILogicalExpression>> additionalFilteringExpressions = null;
AssignOperator additionalFilteringAssign = null;
if (additionalFilteringField != null) {
additionalFilteringVars = new ArrayList<>();
additionalFilteringAssignExpressions = new ArrayList<>();
additionalFilteringExpressions = new ArrayList<>();
PlanTranslationUtil.prepareVarAndExpression(additionalFilteringField, payloadVar, additionalFilteringVars, additionalFilteringAssignExpressions, additionalFilteringExpressions, context);
additionalFilteringAssign = new AssignOperator(additionalFilteringVars, additionalFilteringAssignExpressions);
}
InsertDeleteUpsertOperator insertOp = new InsertDeleteUpsertOperator(targetDatasource, payloadRef, varRefsForLoading, InsertDeleteUpsertOperator.Kind.INSERT, true);
insertOp.setAdditionalFilteringExpressions(additionalFilteringExpressions);
if (additionalFilteringAssign != null) {
additionalFilteringAssign.getInputs().add(new MutableObject<>(assign));
insertOp.getInputs().add(new MutableObject<>(additionalFilteringAssign));
} else {
insertOp.getInputs().add(new MutableObject<>(assign));
}
ILogicalOperator leafOperator = new SinkOperator();
leafOperator.getInputs().add(new MutableObject<>(insertOp));
return new ALogicalPlanImpl(new MutableObject<>(leafOperator));
}
use of org.apache.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl in project asterixdb by apache.
the class InlineAllNtsInSubplanVisitor method wrapLimitInGroupBy.
private Pair<ILogicalOperator, LogicalVariable> wrapLimitInGroupBy(ILogicalOperator op, LogicalVariable recordVar, Set<LogicalVariable> inputLiveVars) throws AlgebricksException {
GroupByOperator gbyOp = new GroupByOperator();
List<Pair<LogicalVariable, LogicalVariable>> keyVarNewVarPairs = new ArrayList<>();
for (LogicalVariable keyVar : correlatedKeyVars) {
// This limits the visitor can only be applied to a nested logical
// plan inside a Subplan operator,
// where the keyVarsToEnforce forms a candidate key which can
// uniquely identify a tuple out of the nested-tuple-source.
LogicalVariable newVar = context.newVar();
gbyOp.getGroupByList().add(new Pair<>(newVar, new MutableObject<>(new VariableReferenceExpression(keyVar))));
keyVarNewVarPairs.add(new Pair<>(keyVar, newVar));
}
// Creates an aggregate operator doing LISTIFY, as the root of the
// nested plan of the added group-by operator.
List<LogicalVariable> aggVarList = new ArrayList<LogicalVariable>();
List<Mutable<ILogicalExpression>> aggExprList = new ArrayList<Mutable<ILogicalExpression>>();
LogicalVariable aggVar = context.newVar();
List<Mutable<ILogicalExpression>> aggArgList = new ArrayList<>();
aggVarList.add(aggVar);
// Creates an aggregation function expression.
aggArgList.add(new MutableObject<>(new VariableReferenceExpression(recordVar)));
ILogicalExpression aggExpr = new AggregateFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.LISTIFY), false, aggArgList);
aggExprList.add(new MutableObject<>(aggExpr));
AggregateOperator aggOp = new AggregateOperator(aggVarList, aggExprList);
// Adds the original limit operator as the input operator to the added
// aggregate operator.
aggOp.getInputs().add(new MutableObject<>(op));
op.getInputs().clear();
ILogicalOperator currentOp = op;
if (!orderingExprs.isEmpty()) {
OrderOperator orderOp = new OrderOperator(cloneOrderingExpression(orderingExprs));
op.getInputs().add(new MutableObject<>(orderOp));
currentOp = orderOp;
}
// Adds a nested tuple source operator as the input operator to the
// limit operator.
NestedTupleSourceOperator nts = new NestedTupleSourceOperator(new MutableObject<ILogicalOperator>(gbyOp));
currentOp.getInputs().add(new MutableObject<>(nts));
// Sets the root of the added nested plan to the aggregate operator.
ILogicalPlan nestedPlan = new ALogicalPlanImpl();
nestedPlan.getRoots().add(new MutableObject<>(aggOp));
// Sets the nested plan for the added group-by operator.
gbyOp.getNestedPlans().add(nestedPlan);
// Updates variable mapping for ancestor operators.
for (Pair<LogicalVariable, LogicalVariable> keyVarNewVar : keyVarNewVarPairs) {
updateInputToOutputVarMapping(keyVarNewVar.first, keyVarNewVar.second, false);
}
return new Pair<>(gbyOp, aggVar);
}
Aggregations