Search in sources :

Example 1 with EmptyTupleSourceOperator

use of org.apache.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator in project asterixdb by apache.

the class OperatorManipulationUtil method ntsToEts.

// Transforms all NestedTupleSource operators to EmptyTupleSource operators
public static void ntsToEts(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
    AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
    if (op.getOperatorTag() == LogicalOperatorTag.NESTEDTUPLESOURCE) {
        EmptyTupleSourceOperator ets = new EmptyTupleSourceOperator();
        context.computeAndSetTypeEnvironmentForOperator(ets);
        opRef.setValue(ets);
    } else {
        for (Mutable<ILogicalOperator> i : opRef.getValue().getInputs()) {
            ntsToEts(i, context);
        }
    }
}
Also used : AbstractLogicalOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator) ILogicalOperator(org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator) EmptyTupleSourceOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator)

Example 2 with EmptyTupleSourceOperator

use of org.apache.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator in project asterixdb by apache.

the class RequiredCapacityVisitorTest method testParallelJoin.

@Test
public void testParallelJoin() throws AlgebricksException {
    IClusterCapacity clusterCapacity = new ClusterCapacity();
    RequiredCapacityVisitor visitor = makeComputationCapacityVisitor(PARALLELISM, clusterCapacity);
    // Constructs a join query plan.
    InnerJoinOperator join = makeJoinOperator(AbstractLogicalOperator.ExecutionMode.PARTITIONED);
    // Left child plan of the join.
    ExchangeOperator leftChildExchange = new ExchangeOperator();
    leftChildExchange.setExecutionMode(AbstractLogicalOperator.ExecutionMode.PARTITIONED);
    leftChildExchange.setPhysicalOperator(new HashPartitionExchangePOperator(Collections.emptyList(), null));
    InnerJoinOperator leftChild = makeJoinOperator(AbstractLogicalOperator.ExecutionMode.PARTITIONED);
    join.getInputs().add(new MutableObject<>(leftChildExchange));
    leftChildExchange.getInputs().add(new MutableObject<>(leftChild));
    EmptyTupleSourceOperator ets = new EmptyTupleSourceOperator();
    ets.setExecutionMode(AbstractLogicalOperator.ExecutionMode.PARTITIONED);
    leftChild.getInputs().add(new MutableObject<>(ets));
    leftChild.getInputs().add(new MutableObject<>(ets));
    // Right child plan of the join.
    ExchangeOperator rightChildExchange = new ExchangeOperator();
    rightChildExchange.setExecutionMode(AbstractLogicalOperator.ExecutionMode.PARTITIONED);
    rightChildExchange.setPhysicalOperator(new HashPartitionExchangePOperator(Collections.emptyList(), null));
    GroupByOperator rightChild = makeGroupByOperator(AbstractLogicalOperator.ExecutionMode.LOCAL);
    join.getInputs().add(new MutableObject<>(rightChildExchange));
    rightChildExchange.getInputs().add(new MutableObject<>(rightChild));
    rightChild.getInputs().add(new MutableObject<>(ets));
    // Verifies the calculated cluster capacity requirement for the test quer plan.
    join.accept(visitor, null);
    Assert.assertTrue(clusterCapacity.getAggregatedCores() == PARALLELISM);
    Assert.assertTrue(clusterCapacity.getAggregatedMemoryByteSize() == 3 * MEMORY_BUDGET * PARALLELISM + 2 * 2L * PARALLELISM * PARALLELISM * FRAME_SIZE + 3 * FRAME_SIZE * PARALLELISM);
}
Also used : IClusterCapacity(org.apache.hyracks.api.job.resource.IClusterCapacity) GroupByOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator) ClusterCapacity(org.apache.hyracks.api.job.resource.ClusterCapacity) IClusterCapacity(org.apache.hyracks.api.job.resource.IClusterCapacity) HashPartitionExchangePOperator(org.apache.hyracks.algebricks.core.algebra.operators.physical.HashPartitionExchangePOperator) ExchangeOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator) InnerJoinOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator) EmptyTupleSourceOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator) Test(org.junit.Test)

Example 3 with EmptyTupleSourceOperator

use of org.apache.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator in project asterixdb by apache.

the class RequiredCapacityVisitorTest method testUnPartitionedJoin.

@Test
public void testUnPartitionedJoin() throws AlgebricksException {
    IClusterCapacity clusterCapacity = new ClusterCapacity();
    RequiredCapacityVisitor visitor = makeComputationCapacityVisitor(PARALLELISM, clusterCapacity);
    // Constructs a join query plan.
    InnerJoinOperator join = makeJoinOperator(AbstractLogicalOperator.ExecutionMode.UNPARTITIONED);
    // Left child plan of the join.
    ExchangeOperator leftChildExchange = new ExchangeOperator();
    leftChildExchange.setExecutionMode(AbstractLogicalOperator.ExecutionMode.UNPARTITIONED);
    leftChildExchange.setPhysicalOperator(new OneToOneExchangePOperator());
    InnerJoinOperator leftChild = makeJoinOperator(AbstractLogicalOperator.ExecutionMode.UNPARTITIONED);
    join.getInputs().add(new MutableObject<>(leftChildExchange));
    leftChildExchange.getInputs().add(new MutableObject<>(leftChild));
    EmptyTupleSourceOperator ets = new EmptyTupleSourceOperator();
    ets.setExecutionMode(AbstractLogicalOperator.ExecutionMode.UNPARTITIONED);
    leftChild.getInputs().add(new MutableObject<>(ets));
    leftChild.getInputs().add(new MutableObject<>(ets));
    // Right child plan of the join.
    ExchangeOperator rightChildExchange = new ExchangeOperator();
    rightChildExchange.setExecutionMode(AbstractLogicalOperator.ExecutionMode.UNPARTITIONED);
    rightChildExchange.setPhysicalOperator(new OneToOneExchangePOperator());
    GroupByOperator rightChild = makeGroupByOperator(AbstractLogicalOperator.ExecutionMode.UNPARTITIONED);
    join.getInputs().add(new MutableObject<>(rightChildExchange));
    rightChildExchange.getInputs().add(new MutableObject<>(rightChild));
    rightChild.getInputs().add(new MutableObject<>(ets));
    // Verifies the calculated cluster capacity requirement for the test quer plan.
    join.accept(visitor, null);
    Assert.assertTrue(clusterCapacity.getAggregatedCores() == 1);
    Assert.assertTrue(clusterCapacity.getAggregatedMemoryByteSize() == 3 * MEMORY_BUDGET + 5L * FRAME_SIZE);
}
Also used : IClusterCapacity(org.apache.hyracks.api.job.resource.IClusterCapacity) GroupByOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator) ClusterCapacity(org.apache.hyracks.api.job.resource.ClusterCapacity) IClusterCapacity(org.apache.hyracks.api.job.resource.IClusterCapacity) OneToOneExchangePOperator(org.apache.hyracks.algebricks.core.algebra.operators.physical.OneToOneExchangePOperator) ExchangeOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator) InnerJoinOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator) EmptyTupleSourceOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator) Test(org.junit.Test)

Example 4 with EmptyTupleSourceOperator

use of org.apache.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator in project asterixdb by apache.

the class LangExpressionToPlanTranslator method translate.

public ILogicalPlan translate(Query expr, String outputDatasetName, ICompiledDmlStatement stmt, ILogicalOperator baseOp) throws AlgebricksException {
    MutableObject<ILogicalOperator> base = new MutableObject<>(new EmptyTupleSourceOperator());
    if (baseOp != null) {
        base = new MutableObject<>(baseOp);
    }
    Pair<ILogicalOperator, LogicalVariable> p = expr.accept(this, base);
    ArrayList<Mutable<ILogicalOperator>> globalPlanRoots = new ArrayList<>();
    ILogicalOperator topOp = p.first;
    List<LogicalVariable> liveVars = new ArrayList<>();
    VariableUtilities.getLiveVariables(topOp, liveVars);
    LogicalVariable unnestVar = liveVars.get(0);
    LogicalVariable resVar = unnestVar;
    if (outputDatasetName == null) {
        FileSplit outputFileSplit = metadataProvider.getOutputFile();
        if (outputFileSplit == null) {
            outputFileSplit = getDefaultOutputFileLocation(metadataProvider.getApplicationContext());
        }
        metadataProvider.setOutputFile(outputFileSplit);
        List<Mutable<ILogicalExpression>> writeExprList = new ArrayList<>(1);
        writeExprList.add(new MutableObject<>(new VariableReferenceExpression(resVar)));
        ResultSetSinkId rssId = new ResultSetSinkId(metadataProvider.getResultSetId());
        ResultSetDataSink sink = new ResultSetDataSink(rssId, null);
        DistributeResultOperator newTop = new DistributeResultOperator(writeExprList, sink);
        newTop.getInputs().add(new MutableObject<>(topOp));
        topOp = newTop;
        // Retrieve the Output RecordType (if any) and store it on
        // the DistributeResultOperator
        IAType outputRecordType = metadataProvider.findOutputRecordType();
        if (outputRecordType != null) {
            topOp.getAnnotations().put("output-record-type", outputRecordType);
        }
    } else {
        /**
             * add the collection-to-sequence right before the project,
             * because dataset only accept non-collection records
             */
        LogicalVariable seqVar = context.newVar();
        /**
             * This assign adds a marker function collection-to-sequence: if the input is a singleton collection, unnest
             * it; otherwise do nothing.
             */
        AssignOperator assignCollectionToSequence = new AssignOperator(seqVar, new MutableObject<>(new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.COLLECTION_TO_SEQUENCE), new MutableObject<>(new VariableReferenceExpression(resVar)))));
        assignCollectionToSequence.getInputs().add(new MutableObject<>(topOp.getInputs().get(0).getValue()));
        topOp.getInputs().get(0).setValue(assignCollectionToSequence);
        ProjectOperator projectOperator = (ProjectOperator) topOp;
        projectOperator.getVariables().set(0, seqVar);
        resVar = seqVar;
        DatasetDataSource targetDatasource = validateDatasetInfo(metadataProvider, stmt.getDataverseName(), stmt.getDatasetName());
        List<Integer> keySourceIndicator = ((InternalDatasetDetails) targetDatasource.getDataset().getDatasetDetails()).getKeySourceIndicator();
        ArrayList<LogicalVariable> vars = new ArrayList<>();
        ArrayList<Mutable<ILogicalExpression>> exprs = new ArrayList<>();
        List<Mutable<ILogicalExpression>> varRefsForLoading = new ArrayList<>();
        List<List<String>> partitionKeys = targetDatasource.getDataset().getPrimaryKeys();
        int numOfPrimaryKeys = partitionKeys.size();
        for (int i = 0; i < numOfPrimaryKeys; i++) {
            if (keySourceIndicator == null || keySourceIndicator.get(i).intValue() == 0) {
                // record part
                PlanTranslationUtil.prepareVarAndExpression(partitionKeys.get(i), resVar, vars, exprs, varRefsForLoading, context);
            } else {
                // meta part
                PlanTranslationUtil.prepareMetaKeyAccessExpression(partitionKeys.get(i), unnestVar, exprs, vars, varRefsForLoading, context);
            }
        }
        AssignOperator assign = new AssignOperator(vars, exprs);
        List<String> additionalFilteringField = DatasetUtil.getFilterField(targetDatasource.getDataset());
        List<LogicalVariable> additionalFilteringVars;
        List<Mutable<ILogicalExpression>> additionalFilteringAssignExpressions;
        List<Mutable<ILogicalExpression>> additionalFilteringExpressions = null;
        AssignOperator additionalFilteringAssign = null;
        if (additionalFilteringField != null) {
            additionalFilteringVars = new ArrayList<>();
            additionalFilteringAssignExpressions = new ArrayList<>();
            additionalFilteringExpressions = new ArrayList<>();
            PlanTranslationUtil.prepareVarAndExpression(additionalFilteringField, resVar, additionalFilteringVars, additionalFilteringAssignExpressions, additionalFilteringExpressions, context);
            additionalFilteringAssign = new AssignOperator(additionalFilteringVars, additionalFilteringAssignExpressions);
            additionalFilteringAssign.getInputs().add(new MutableObject<>(topOp));
            assign.getInputs().add(new MutableObject<>(additionalFilteringAssign));
        } else {
            assign.getInputs().add(new MutableObject<>(topOp));
        }
        Mutable<ILogicalExpression> varRef = new MutableObject<>(new VariableReferenceExpression(resVar));
        ILogicalOperator leafOperator;
        switch(stmt.getKind()) {
            case Statement.Kind.INSERT:
                leafOperator = translateInsert(targetDatasource, varRef, varRefsForLoading, additionalFilteringExpressions, assign, stmt);
                break;
            case Statement.Kind.UPSERT:
                leafOperator = translateUpsert(targetDatasource, varRef, varRefsForLoading, additionalFilteringExpressions, assign, additionalFilteringField, unnestVar, topOp, exprs, resVar, additionalFilteringAssign, stmt);
                break;
            case Statement.Kind.DELETE:
                leafOperator = translateDelete(targetDatasource, varRef, varRefsForLoading, additionalFilteringExpressions, assign);
                break;
            case Statement.Kind.CONNECT_FEED:
                leafOperator = translateConnectFeed(targetDatasource, varRef, varRefsForLoading, additionalFilteringExpressions, assign);
                break;
            case Statement.Kind.SUBSCRIBE_FEED:
                leafOperator = translateSubscribeFeed((CompiledSubscribeFeedStatement) stmt, targetDatasource, unnestVar, topOp, exprs, resVar, varRefsForLoading, varRef, assign, additionalFilteringField, additionalFilteringAssign, additionalFilteringExpressions);
                break;
            default:
                throw new AlgebricksException("Unsupported statement kind " + stmt.getKind());
        }
        topOp = leafOperator;
    }
    globalPlanRoots.add(new MutableObject<>(topOp));
    ILogicalPlan plan = new ALogicalPlanImpl(globalPlanRoots);
    eliminateSharedOperatorReferenceForPlan(plan);
    return plan;
}
Also used : ArrayList(java.util.ArrayList) DatasetDataSource(org.apache.asterix.metadata.declared.DatasetDataSource) AString(org.apache.asterix.om.base.AString) FileSplit(org.apache.hyracks.api.io.FileSplit) ManagedFileSplit(org.apache.hyracks.api.io.ManagedFileSplit) DistributeResultOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator) ALogicalPlanImpl(org.apache.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl) ArrayList(java.util.ArrayList) List(java.util.List) ResultSetSinkId(org.apache.asterix.metadata.declared.ResultSetSinkId) EmptyTupleSourceOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator) MutableObject(org.apache.commons.lang3.mutable.MutableObject) ScalarFunctionCallExpression(org.apache.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression) LogicalVariable(org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable) ResultSetDataSink(org.apache.asterix.metadata.declared.ResultSetDataSink) ProjectOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator) ILogicalOperator(org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator) InternalDatasetDetails(org.apache.asterix.metadata.entities.InternalDatasetDetails) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) AssignOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator) Mutable(org.apache.commons.lang3.mutable.Mutable) ILogicalExpression(org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression) VariableReferenceExpression(org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression) CompiledSubscribeFeedStatement(org.apache.asterix.translator.CompiledStatements.CompiledSubscribeFeedStatement) ILogicalPlan(org.apache.hyracks.algebricks.core.algebra.base.ILogicalPlan) IAType(org.apache.asterix.om.types.IAType)

Example 5 with EmptyTupleSourceOperator

use of org.apache.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator in project asterixdb by apache.

the class AqlPlusExpressionToPlanTranslator method translate.

public ILogicalPlan translate(List<Clause> clauses) throws AlgebricksException, CompilationException {
    if (clauses == null) {
        return null;
    }
    Mutable<ILogicalOperator> opRef = new MutableObject<ILogicalOperator>(new EmptyTupleSourceOperator());
    Pair<ILogicalOperator, LogicalVariable> p = null;
    for (Clause c : clauses) {
        p = c.accept(this, opRef);
        opRef = new MutableObject<ILogicalOperator>(p.first);
    }
    ArrayList<Mutable<ILogicalOperator>> globalPlanRoots = new ArrayList<Mutable<ILogicalOperator>>();
    ILogicalOperator topOp = p.first;
    globalPlanRoots.add(new MutableObject<ILogicalOperator>(topOp));
    ILogicalPlan plan = new ALogicalPlanImpl(globalPlanRoots);
    return plan;
}
Also used : LogicalVariable(org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable) ILogicalOperator(org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator) ArrayList(java.util.ArrayList) Mutable(org.apache.commons.lang3.mutable.Mutable) ALogicalPlanImpl(org.apache.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl) ILogicalPlan(org.apache.hyracks.algebricks.core.algebra.base.ILogicalPlan) MetaVariableClause(org.apache.asterix.lang.aql.clause.MetaVariableClause) JoinClause(org.apache.asterix.lang.aql.clause.JoinClause) Clause(org.apache.asterix.lang.common.base.Clause) EmptyTupleSourceOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator) MutableObject(org.apache.commons.lang3.mutable.MutableObject)

Aggregations

EmptyTupleSourceOperator (org.apache.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator)12 ILogicalOperator (org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator)8 Mutable (org.apache.commons.lang3.mutable.Mutable)6 LogicalVariable (org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable)6 ArrayList (java.util.ArrayList)5 InnerJoinOperator (org.apache.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator)5 MutableObject (org.apache.commons.lang3.mutable.MutableObject)4 ILogicalExpression (org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression)4 IAType (org.apache.asterix.om.types.IAType)3 ILogicalPlan (org.apache.hyracks.algebricks.core.algebra.base.ILogicalPlan)3 VariableReferenceExpression (org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression)3 AbstractLogicalOperator (org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator)3 ALogicalPlanImpl (org.apache.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl)3 HashSet (java.util.HashSet)2 List (java.util.List)2 DatasetDataSource (org.apache.asterix.metadata.declared.DatasetDataSource)2 AString (org.apache.asterix.om.base.AString)2 AlgebricksException (org.apache.hyracks.algebricks.common.exceptions.AlgebricksException)2 ScalarFunctionCallExpression (org.apache.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression)2 AssignOperator (org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator)2