use of org.apache.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator in project asterixdb by apache.
the class LangExpressionToPlanTranslator method translate.
public ILogicalPlan translate(Query expr, String outputDatasetName, ICompiledDmlStatement stmt, ILogicalOperator baseOp) throws AlgebricksException {
MutableObject<ILogicalOperator> base = new MutableObject<>(new EmptyTupleSourceOperator());
if (baseOp != null) {
base = new MutableObject<>(baseOp);
}
Pair<ILogicalOperator, LogicalVariable> p = expr.accept(this, base);
ArrayList<Mutable<ILogicalOperator>> globalPlanRoots = new ArrayList<>();
ILogicalOperator topOp = p.first;
List<LogicalVariable> liveVars = new ArrayList<>();
VariableUtilities.getLiveVariables(topOp, liveVars);
LogicalVariable unnestVar = liveVars.get(0);
LogicalVariable resVar = unnestVar;
if (outputDatasetName == null) {
FileSplit outputFileSplit = metadataProvider.getOutputFile();
if (outputFileSplit == null) {
outputFileSplit = getDefaultOutputFileLocation(metadataProvider.getApplicationContext());
}
metadataProvider.setOutputFile(outputFileSplit);
List<Mutable<ILogicalExpression>> writeExprList = new ArrayList<>(1);
writeExprList.add(new MutableObject<>(new VariableReferenceExpression(resVar)));
ResultSetSinkId rssId = new ResultSetSinkId(metadataProvider.getResultSetId());
ResultSetDataSink sink = new ResultSetDataSink(rssId, null);
DistributeResultOperator newTop = new DistributeResultOperator(writeExprList, sink);
newTop.getInputs().add(new MutableObject<>(topOp));
topOp = newTop;
// Retrieve the Output RecordType (if any) and store it on
// the DistributeResultOperator
IAType outputRecordType = metadataProvider.findOutputRecordType();
if (outputRecordType != null) {
topOp.getAnnotations().put("output-record-type", outputRecordType);
}
} else {
/**
* add the collection-to-sequence right before the project,
* because dataset only accept non-collection records
*/
LogicalVariable seqVar = context.newVar();
/**
* This assign adds a marker function collection-to-sequence: if the input is a singleton collection, unnest
* it; otherwise do nothing.
*/
AssignOperator assignCollectionToSequence = new AssignOperator(seqVar, new MutableObject<>(new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.COLLECTION_TO_SEQUENCE), new MutableObject<>(new VariableReferenceExpression(resVar)))));
assignCollectionToSequence.getInputs().add(new MutableObject<>(topOp.getInputs().get(0).getValue()));
topOp.getInputs().get(0).setValue(assignCollectionToSequence);
ProjectOperator projectOperator = (ProjectOperator) topOp;
projectOperator.getVariables().set(0, seqVar);
resVar = seqVar;
DatasetDataSource targetDatasource = validateDatasetInfo(metadataProvider, stmt.getDataverseName(), stmt.getDatasetName());
List<Integer> keySourceIndicator = ((InternalDatasetDetails) targetDatasource.getDataset().getDatasetDetails()).getKeySourceIndicator();
ArrayList<LogicalVariable> vars = new ArrayList<>();
ArrayList<Mutable<ILogicalExpression>> exprs = new ArrayList<>();
List<Mutable<ILogicalExpression>> varRefsForLoading = new ArrayList<>();
List<List<String>> partitionKeys = targetDatasource.getDataset().getPrimaryKeys();
int numOfPrimaryKeys = partitionKeys.size();
for (int i = 0; i < numOfPrimaryKeys; i++) {
if (keySourceIndicator == null || keySourceIndicator.get(i).intValue() == 0) {
// record part
PlanTranslationUtil.prepareVarAndExpression(partitionKeys.get(i), resVar, vars, exprs, varRefsForLoading, context);
} else {
// meta part
PlanTranslationUtil.prepareMetaKeyAccessExpression(partitionKeys.get(i), unnestVar, exprs, vars, varRefsForLoading, context);
}
}
AssignOperator assign = new AssignOperator(vars, exprs);
List<String> additionalFilteringField = DatasetUtil.getFilterField(targetDatasource.getDataset());
List<LogicalVariable> additionalFilteringVars;
List<Mutable<ILogicalExpression>> additionalFilteringAssignExpressions;
List<Mutable<ILogicalExpression>> additionalFilteringExpressions = null;
AssignOperator additionalFilteringAssign = null;
if (additionalFilteringField != null) {
additionalFilteringVars = new ArrayList<>();
additionalFilteringAssignExpressions = new ArrayList<>();
additionalFilteringExpressions = new ArrayList<>();
PlanTranslationUtil.prepareVarAndExpression(additionalFilteringField, resVar, additionalFilteringVars, additionalFilteringAssignExpressions, additionalFilteringExpressions, context);
additionalFilteringAssign = new AssignOperator(additionalFilteringVars, additionalFilteringAssignExpressions);
additionalFilteringAssign.getInputs().add(new MutableObject<>(topOp));
assign.getInputs().add(new MutableObject<>(additionalFilteringAssign));
} else {
assign.getInputs().add(new MutableObject<>(topOp));
}
Mutable<ILogicalExpression> varRef = new MutableObject<>(new VariableReferenceExpression(resVar));
ILogicalOperator leafOperator;
switch(stmt.getKind()) {
case Statement.Kind.INSERT:
leafOperator = translateInsert(targetDatasource, varRef, varRefsForLoading, additionalFilteringExpressions, assign, stmt);
break;
case Statement.Kind.UPSERT:
leafOperator = translateUpsert(targetDatasource, varRef, varRefsForLoading, additionalFilteringExpressions, assign, additionalFilteringField, unnestVar, topOp, exprs, resVar, additionalFilteringAssign, stmt);
break;
case Statement.Kind.DELETE:
leafOperator = translateDelete(targetDatasource, varRef, varRefsForLoading, additionalFilteringExpressions, assign);
break;
case Statement.Kind.CONNECT_FEED:
leafOperator = translateConnectFeed(targetDatasource, varRef, varRefsForLoading, additionalFilteringExpressions, assign);
break;
case Statement.Kind.SUBSCRIBE_FEED:
leafOperator = translateSubscribeFeed((CompiledSubscribeFeedStatement) stmt, targetDatasource, unnestVar, topOp, exprs, resVar, varRefsForLoading, varRef, assign, additionalFilteringField, additionalFilteringAssign, additionalFilteringExpressions);
break;
default:
throw new AlgebricksException("Unsupported statement kind " + stmt.getKind());
}
topOp = leafOperator;
}
globalPlanRoots.add(new MutableObject<>(topOp));
ILogicalPlan plan = new ALogicalPlanImpl(globalPlanRoots);
eliminateSharedOperatorReferenceForPlan(plan);
return plan;
}
use of org.apache.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator in project asterixdb by apache.
the class LangExpressionToPlanTranslator method processReturningExpression.
// Stitches the translated operators for the returning expression into the query plan.
private ILogicalOperator processReturningExpression(ILogicalOperator inputOperator, InsertDeleteUpsertOperator insertOp, CompiledInsertStatement compiledInsert) throws AlgebricksException {
Expression returnExpression = compiledInsert.getReturnExpression();
if (returnExpression == null) {
return inputOperator;
}
ILogicalOperator rootOperator = inputOperator;
//Makes the id of the insert var point to the record variable.
context.newVarFromExpression(compiledInsert.getVar());
context.setVar(compiledInsert.getVar(), ((VariableReferenceExpression) insertOp.getPayloadExpression().getValue()).getVariableReference());
Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = langExprToAlgExpression(returnExpression, new MutableObject<>(rootOperator));
// Adds an assign operator for the returning expression.
LogicalVariable resultVar = context.newVar();
AssignOperator assignOperator = new AssignOperator(resultVar, new MutableObject<>(p.first));
assignOperator.getInputs().add(p.second);
// Adds a distribute result operator.
List<Mutable<ILogicalExpression>> expressions = new ArrayList<>();
expressions.add(new MutableObject<>(new VariableReferenceExpression(resultVar)));
ResultSetSinkId rssId = new ResultSetSinkId(metadataProvider.getResultSetId());
ResultSetDataSink sink = new ResultSetDataSink(rssId, null);
rootOperator = new DistributeResultOperator(expressions, sink);
rootOperator.getInputs().add(new MutableObject<>(assignOperator));
return rootOperator;
}
use of org.apache.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator in project asterixdb by apache.
the class DistributeResultPOperator method getRequiredPropertiesForChildren.
@Override
public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op, IPhysicalPropertiesVector reqdByParent, IOptimizationContext context) {
DistributeResultOperator write = (DistributeResultOperator) op;
IDataSink sink = write.getDataSink();
IPartitioningProperty pp = sink.getPartitioningProperty();
StructuralPropertiesVector[] r = new StructuralPropertiesVector[] { new StructuralPropertiesVector(pp, null) };
return new PhysicalRequirements(r, IPartitioningRequirementsCoordinator.NO_COORDINATION);
}
use of org.apache.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator in project asterixdb by apache.
the class IsomorphismOperatorVisitor method visitDistributeResultOperator.
@Override
public Boolean visitDistributeResultOperator(DistributeResultOperator op, ILogicalOperator arg) throws AlgebricksException {
AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
if (aop.getOperatorTag() != LogicalOperatorTag.DISTRIBUTE_RESULT) {
return Boolean.FALSE;
}
DistributeResultOperator writeOpArg = (DistributeResultOperator) copyAndSubstituteVar(op, arg);
boolean isomorphic = VariableUtilities.varListEqualUnordered(op.getSchema(), writeOpArg.getSchema());
return isomorphic;
}
use of org.apache.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator in project asterixdb by apache.
the class DistributeResultPOperator method contributeRuntimeOperator.
@SuppressWarnings({ "rawtypes", "unchecked" })
@Override
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException {
DistributeResultOperator resultOp = (DistributeResultOperator) op;
IMetadataProvider mp = context.getMetadataProvider();
JobSpecification spec = builder.getJobSpec();
int[] columns = new int[resultOp.getExpressions().size()];
int i = 0;
for (Mutable<ILogicalExpression> exprRef : resultOp.getExpressions()) {
ILogicalExpression expr = exprRef.getValue();
if (expr.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
throw new NotImplementedException("Only writing variable expressions is supported.");
}
VariableReferenceExpression varRef = (VariableReferenceExpression) expr;
LogicalVariable v = varRef.getVariableReference();
columns[i++] = inputSchemas[0].findVariable(v);
}
RecordDescriptor inputDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas[0], context);
IPrinterFactory[] pf = JobGenHelper.mkPrinterFactories(inputSchemas[0], context.getTypeEnvironment(op), context, columns);
Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> runtimeAndConstraints = mp.getResultHandleRuntime(resultOp.getDataSink(), columns, pf, inputDesc, true, spec);
builder.contributeHyracksOperator(resultOp, runtimeAndConstraints.first);
ILogicalOperator src = resultOp.getInputs().get(0).getValue();
builder.contributeGraphEdge(src, 0, resultOp, 0);
}
Aggregations