Search in sources :

Example 1 with InternalDatasetDetails

use of org.apache.asterix.metadata.entities.InternalDatasetDetails in project asterixdb by apache.

the class LangExpressionToPlanTranslator method translate.

public ILogicalPlan translate(Query expr, String outputDatasetName, ICompiledDmlStatement stmt, ILogicalOperator baseOp) throws AlgebricksException {
    MutableObject<ILogicalOperator> base = new MutableObject<>(new EmptyTupleSourceOperator());
    if (baseOp != null) {
        base = new MutableObject<>(baseOp);
    }
    Pair<ILogicalOperator, LogicalVariable> p = expr.accept(this, base);
    ArrayList<Mutable<ILogicalOperator>> globalPlanRoots = new ArrayList<>();
    ILogicalOperator topOp = p.first;
    List<LogicalVariable> liveVars = new ArrayList<>();
    VariableUtilities.getLiveVariables(topOp, liveVars);
    LogicalVariable unnestVar = liveVars.get(0);
    LogicalVariable resVar = unnestVar;
    if (outputDatasetName == null) {
        FileSplit outputFileSplit = metadataProvider.getOutputFile();
        if (outputFileSplit == null) {
            outputFileSplit = getDefaultOutputFileLocation(metadataProvider.getApplicationContext());
        }
        metadataProvider.setOutputFile(outputFileSplit);
        List<Mutable<ILogicalExpression>> writeExprList = new ArrayList<>(1);
        writeExprList.add(new MutableObject<>(new VariableReferenceExpression(resVar)));
        ResultSetSinkId rssId = new ResultSetSinkId(metadataProvider.getResultSetId());
        ResultSetDataSink sink = new ResultSetDataSink(rssId, null);
        DistributeResultOperator newTop = new DistributeResultOperator(writeExprList, sink);
        newTop.getInputs().add(new MutableObject<>(topOp));
        topOp = newTop;
        // Retrieve the Output RecordType (if any) and store it on
        // the DistributeResultOperator
        IAType outputRecordType = metadataProvider.findOutputRecordType();
        if (outputRecordType != null) {
            topOp.getAnnotations().put("output-record-type", outputRecordType);
        }
    } else {
        /**
             * add the collection-to-sequence right before the project,
             * because dataset only accept non-collection records
             */
        LogicalVariable seqVar = context.newVar();
        /**
             * This assign adds a marker function collection-to-sequence: if the input is a singleton collection, unnest
             * it; otherwise do nothing.
             */
        AssignOperator assignCollectionToSequence = new AssignOperator(seqVar, new MutableObject<>(new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.COLLECTION_TO_SEQUENCE), new MutableObject<>(new VariableReferenceExpression(resVar)))));
        assignCollectionToSequence.getInputs().add(new MutableObject<>(topOp.getInputs().get(0).getValue()));
        topOp.getInputs().get(0).setValue(assignCollectionToSequence);
        ProjectOperator projectOperator = (ProjectOperator) topOp;
        projectOperator.getVariables().set(0, seqVar);
        resVar = seqVar;
        DatasetDataSource targetDatasource = validateDatasetInfo(metadataProvider, stmt.getDataverseName(), stmt.getDatasetName());
        List<Integer> keySourceIndicator = ((InternalDatasetDetails) targetDatasource.getDataset().getDatasetDetails()).getKeySourceIndicator();
        ArrayList<LogicalVariable> vars = new ArrayList<>();
        ArrayList<Mutable<ILogicalExpression>> exprs = new ArrayList<>();
        List<Mutable<ILogicalExpression>> varRefsForLoading = new ArrayList<>();
        List<List<String>> partitionKeys = targetDatasource.getDataset().getPrimaryKeys();
        int numOfPrimaryKeys = partitionKeys.size();
        for (int i = 0; i < numOfPrimaryKeys; i++) {
            if (keySourceIndicator == null || keySourceIndicator.get(i).intValue() == 0) {
                // record part
                PlanTranslationUtil.prepareVarAndExpression(partitionKeys.get(i), resVar, vars, exprs, varRefsForLoading, context);
            } else {
                // meta part
                PlanTranslationUtil.prepareMetaKeyAccessExpression(partitionKeys.get(i), unnestVar, exprs, vars, varRefsForLoading, context);
            }
        }
        AssignOperator assign = new AssignOperator(vars, exprs);
        List<String> additionalFilteringField = DatasetUtil.getFilterField(targetDatasource.getDataset());
        List<LogicalVariable> additionalFilteringVars;
        List<Mutable<ILogicalExpression>> additionalFilteringAssignExpressions;
        List<Mutable<ILogicalExpression>> additionalFilteringExpressions = null;
        AssignOperator additionalFilteringAssign = null;
        if (additionalFilteringField != null) {
            additionalFilteringVars = new ArrayList<>();
            additionalFilteringAssignExpressions = new ArrayList<>();
            additionalFilteringExpressions = new ArrayList<>();
            PlanTranslationUtil.prepareVarAndExpression(additionalFilteringField, resVar, additionalFilteringVars, additionalFilteringAssignExpressions, additionalFilteringExpressions, context);
            additionalFilteringAssign = new AssignOperator(additionalFilteringVars, additionalFilteringAssignExpressions);
            additionalFilteringAssign.getInputs().add(new MutableObject<>(topOp));
            assign.getInputs().add(new MutableObject<>(additionalFilteringAssign));
        } else {
            assign.getInputs().add(new MutableObject<>(topOp));
        }
        Mutable<ILogicalExpression> varRef = new MutableObject<>(new VariableReferenceExpression(resVar));
        ILogicalOperator leafOperator;
        switch(stmt.getKind()) {
            case Statement.Kind.INSERT:
                leafOperator = translateInsert(targetDatasource, varRef, varRefsForLoading, additionalFilteringExpressions, assign, stmt);
                break;
            case Statement.Kind.UPSERT:
                leafOperator = translateUpsert(targetDatasource, varRef, varRefsForLoading, additionalFilteringExpressions, assign, additionalFilteringField, unnestVar, topOp, exprs, resVar, additionalFilteringAssign, stmt);
                break;
            case Statement.Kind.DELETE:
                leafOperator = translateDelete(targetDatasource, varRef, varRefsForLoading, additionalFilteringExpressions, assign);
                break;
            case Statement.Kind.CONNECT_FEED:
                leafOperator = translateConnectFeed(targetDatasource, varRef, varRefsForLoading, additionalFilteringExpressions, assign);
                break;
            case Statement.Kind.SUBSCRIBE_FEED:
                leafOperator = translateSubscribeFeed((CompiledSubscribeFeedStatement) stmt, targetDatasource, unnestVar, topOp, exprs, resVar, varRefsForLoading, varRef, assign, additionalFilteringField, additionalFilteringAssign, additionalFilteringExpressions);
                break;
            default:
                throw new AlgebricksException("Unsupported statement kind " + stmt.getKind());
        }
        topOp = leafOperator;
    }
    globalPlanRoots.add(new MutableObject<>(topOp));
    ILogicalPlan plan = new ALogicalPlanImpl(globalPlanRoots);
    eliminateSharedOperatorReferenceForPlan(plan);
    return plan;
}
Also used : ArrayList(java.util.ArrayList) DatasetDataSource(org.apache.asterix.metadata.declared.DatasetDataSource) AString(org.apache.asterix.om.base.AString) FileSplit(org.apache.hyracks.api.io.FileSplit) ManagedFileSplit(org.apache.hyracks.api.io.ManagedFileSplit) DistributeResultOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator) ALogicalPlanImpl(org.apache.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl) ArrayList(java.util.ArrayList) List(java.util.List) ResultSetSinkId(org.apache.asterix.metadata.declared.ResultSetSinkId) EmptyTupleSourceOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator) MutableObject(org.apache.commons.lang3.mutable.MutableObject) ScalarFunctionCallExpression(org.apache.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression) LogicalVariable(org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable) ResultSetDataSink(org.apache.asterix.metadata.declared.ResultSetDataSink) ProjectOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator) ILogicalOperator(org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator) InternalDatasetDetails(org.apache.asterix.metadata.entities.InternalDatasetDetails) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) AssignOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator) Mutable(org.apache.commons.lang3.mutable.Mutable) ILogicalExpression(org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression) VariableReferenceExpression(org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression) CompiledSubscribeFeedStatement(org.apache.asterix.translator.CompiledStatements.CompiledSubscribeFeedStatement) ILogicalPlan(org.apache.hyracks.algebricks.core.algebra.base.ILogicalPlan) IAType(org.apache.asterix.om.types.IAType)

Example 2 with InternalDatasetDetails

use of org.apache.asterix.metadata.entities.InternalDatasetDetails in project asterixdb by apache.

the class EquivalenceClassUtils method addEquivalenceClassesForPrimaryIndexAccess.

/**
     * Adds equivalent classes for primary index accesses, including unnest-map for
     * primary index access and data source scan through primary index ---
     * one equivalent class between a primary key variable and a record field-access expression.
     *
     * @param operator
     *            , the primary index access operator.
     * @param indexSearchVars
     *            , the returned variables from primary index access. The last variable
     *            is the record variable.
     * @param recordType
     *            , the record type of an index payload record.
     * @param metaRecordType
     *            , the type of a meta record associated with an index payload record.
     * @param dataset
     *            , the accessed dataset.
     * @param context
     *            , the optimization context.
     * @throws AlgebricksException
     */
@SuppressWarnings("unchecked")
public static void addEquivalenceClassesForPrimaryIndexAccess(ILogicalOperator operator, List<LogicalVariable> indexSearchVars, ARecordType recordType, ARecordType metaRecordType, Dataset dataset, IOptimizationContext context) throws AlgebricksException {
    if (dataset.getDatasetDetails().getDatasetType() != DatasetType.INTERNAL) {
        return;
    }
    InternalDatasetDetails datasetDetails = (InternalDatasetDetails) dataset.getDatasetDetails();
    List<List<String>> primaryKey = datasetDetails.getPrimaryKey();
    Map<String, Integer> fieldNameToIndexMap = new HashMap<String, Integer>();
    String[] fieldNames = recordType.getFieldNames();
    for (int fieldIndex = 0; fieldIndex < fieldNames.length; ++fieldIndex) {
        fieldNameToIndexMap.put(fieldNames[fieldIndex], fieldIndex);
    }
    boolean hasMeta = dataset.hasMetaPart();
    Map<String, Integer> metaFieldNameToIndexMap = new HashMap<>();
    if (hasMeta) {
        String[] metaFieldNames = metaRecordType.getFieldNames();
        for (int metaFieldIndex = 0; metaFieldIndex < metaFieldNames.length; ++metaFieldIndex) {
            metaFieldNameToIndexMap.put(metaFieldNames[metaFieldIndex], metaFieldIndex);
        }
    }
    List<Integer> keySourceIndicators = datasetDetails.getKeySourceIndicator();
    LogicalVariable recordVar = hasMeta ? indexSearchVars.get(indexSearchVars.size() - 2) : indexSearchVars.get(indexSearchVars.size() - 1);
    LogicalVariable metaRecordVar = hasMeta ? indexSearchVars.get(indexSearchVars.size() - 1) : null;
    for (int pkIndex = 0; pkIndex < primaryKey.size(); ++pkIndex) {
        LogicalVariable referredRecordVar = recordVar;
        String pkFieldName = primaryKey.get(pkIndex).get(0);
        int source = keySourceIndicators.get(pkIndex);
        Integer fieldIndexInRecord;
        if (source == 0) {
            // The field is from the main record.
            fieldIndexInRecord = fieldNameToIndexMap.get(pkFieldName);
        } else {
            // The field is from the auxiliary meta record.
            referredRecordVar = metaRecordVar;
            fieldIndexInRecord = metaFieldNameToIndexMap.get(pkFieldName);
        }
        LogicalVariable var = indexSearchVars.get(pkIndex);
        ILogicalExpression expr = new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.FIELD_ACCESS_BY_INDEX), new MutableObject<ILogicalExpression>(new VariableReferenceExpression(referredRecordVar)), new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(new AInt32(fieldIndexInRecord)))));
        EquivalenceClass equivClass = new EquivalenceClass(Collections.singletonList(var), var, Collections.singletonList(expr));
        Map<LogicalVariable, EquivalenceClass> equivalenceMap = context.getEquivalenceClassMap(operator);
        if (equivalenceMap == null) {
            equivalenceMap = new HashMap<LogicalVariable, EquivalenceClass>();
            context.putEquivalenceClassMap(operator, equivalenceMap);
        }
        equivalenceMap.put(var, equivClass);
    }
}
Also used : LogicalVariable(org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable) HashMap(java.util.HashMap) InternalDatasetDetails(org.apache.asterix.metadata.entities.InternalDatasetDetails) ConstantExpression(org.apache.hyracks.algebricks.core.algebra.expressions.ConstantExpression) AInt32(org.apache.asterix.om.base.AInt32) ILogicalExpression(org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression) AsterixConstantValue(org.apache.asterix.om.constants.AsterixConstantValue) VariableReferenceExpression(org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression) ArrayList(java.util.ArrayList) List(java.util.List) EquivalenceClass(org.apache.hyracks.algebricks.core.algebra.base.EquivalenceClass) ScalarFunctionCallExpression(org.apache.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression)

Example 3 with InternalDatasetDetails

use of org.apache.asterix.metadata.entities.InternalDatasetDetails in project asterixdb by apache.

the class IndexTupleTranslatorTest method test.

@Test
public void test() throws MetadataException, IOException {
    Integer[] indicators = { 0, 1, null };
    for (Integer indicator : indicators) {
        Map<String, String> compactionPolicyProperties = new HashMap<>();
        compactionPolicyProperties.put("max-mergable-component-size", "1073741824");
        compactionPolicyProperties.put("max-tolerance-component-count", "3");
        InternalDatasetDetails details = new InternalDatasetDetails(FileStructure.BTREE, PartitioningStrategy.HASH, Collections.singletonList(Collections.singletonList("row_id")), Collections.singletonList(Collections.singletonList("row_id")), indicator == null ? null : Collections.singletonList(indicator), Collections.singletonList(BuiltinType.AINT64), false, Collections.emptyList(), false);
        Dataset dataset = new Dataset("test", "d1", "foo", "LogType", "CB", "MetaType", "DEFAULT_NG_ALL_NODES", "prefix", compactionPolicyProperties, details, Collections.emptyMap(), DatasetType.INTERNAL, 115, 0);
        Index index = new Index("test", "d1", "i1", IndexType.BTREE, Collections.singletonList(Collections.singletonList("row_id")), indicator == null ? null : Collections.singletonList(indicator), Collections.singletonList(BuiltinType.AINT64), -1, false, false, 0);
        MetadataNode mockMetadataNode = mock(MetadataNode.class);
        when(mockMetadataNode.getDatatype(any(), anyString(), anyString())).thenReturn(new Datatype("test", "d1", new ARecordType("", new String[] { "row_id" }, new IAType[] { BuiltinType.AINT64 }, true), true));
        when(mockMetadataNode.getDataset(any(), anyString(), anyString())).thenReturn(dataset);
        IndexTupleTranslator idxTranslator = new IndexTupleTranslator(null, mockMetadataNode, true);
        ITupleReference tuple = idxTranslator.getTupleFromMetadataEntity(index);
        Index deserializedIndex = idxTranslator.getMetadataEntityFromTuple(tuple);
        if (indicator == null) {
            Assert.assertEquals(Collections.singletonList(new Integer(0)), deserializedIndex.getKeyFieldSourceIndicators());
        } else {
            Assert.assertEquals(index.getKeyFieldSourceIndicators(), deserializedIndex.getKeyFieldSourceIndicators());
        }
    }
}
Also used : HashMap(java.util.HashMap) Dataset(org.apache.asterix.metadata.entities.Dataset) InternalDatasetDetails(org.apache.asterix.metadata.entities.InternalDatasetDetails) Index(org.apache.asterix.metadata.entities.Index) Matchers.anyString(org.mockito.Matchers.anyString) MetadataNode(org.apache.asterix.metadata.MetadataNode) Datatype(org.apache.asterix.metadata.entities.Datatype) ITupleReference(org.apache.hyracks.dataflow.common.data.accessors.ITupleReference) ARecordType(org.apache.asterix.om.types.ARecordType) Test(org.junit.Test)

Example 4 with InternalDatasetDetails

use of org.apache.asterix.metadata.entities.InternalDatasetDetails in project asterixdb by apache.

the class MetadataNode method addDataset.

@Override
public void addDataset(JobId jobId, Dataset dataset) throws MetadataException, RemoteException {
    try {
        // Insert into the 'dataset' dataset.
        DatasetTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDatasetTupleTranslator(true);
        ITupleReference datasetTuple = tupleReaderWriter.getTupleFromMetadataEntity(dataset);
        insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.DATASET_DATASET, datasetTuple);
        if (dataset.getDatasetType() == DatasetType.INTERNAL) {
            // Add the primary index for the dataset.
            InternalDatasetDetails id = (InternalDatasetDetails) dataset.getDatasetDetails();
            Index primaryIndex = new Index(dataset.getDataverseName(), dataset.getDatasetName(), dataset.getDatasetName(), IndexType.BTREE, id.getPrimaryKey(), id.getKeySourceIndicator(), id.getPrimaryKeyType(), false, true, dataset.getPendingOp());
            addIndex(jobId, primaryIndex);
        }
    } catch (HyracksDataException e) {
        if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) {
            throw new MetadataException("A dataset with this name " + dataset.getDatasetName() + " already exists in dataverse '" + dataset.getDataverseName() + "'.", e);
        } else {
            throw new MetadataException(e);
        }
    } catch (ACIDException e) {
        throw new MetadataException(e);
    }
}
Also used : DatasetTupleTranslator(org.apache.asterix.metadata.entitytupletranslators.DatasetTupleTranslator) InternalDatasetDetails(org.apache.asterix.metadata.entities.InternalDatasetDetails) ITupleReference(org.apache.hyracks.dataflow.common.data.accessors.ITupleReference) IMetadataIndex(org.apache.asterix.metadata.api.IMetadataIndex) Index(org.apache.asterix.metadata.entities.Index) AbstractLSMIndex(org.apache.hyracks.storage.am.lsm.common.impls.AbstractLSMIndex) ILSMIndex(org.apache.hyracks.storage.am.lsm.common.api.ILSMIndex) IIndex(org.apache.hyracks.storage.common.IIndex) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) ACIDException(org.apache.asterix.common.exceptions.ACIDException)

Example 5 with InternalDatasetDetails

use of org.apache.asterix.metadata.entities.InternalDatasetDetails in project asterixdb by apache.

the class LogMarkerTest method testInsertWithSnapshot.

@Test
public void testInsertWithSnapshot() {
    try {
        TestNodeController nc = new TestNodeController(null, false);
        nc.init();
        StorageComponentProvider storageManager = new StorageComponentProvider();
        List<List<String>> partitioningKeys = new ArrayList<>();
        partitioningKeys.add(Collections.singletonList("key"));
        Dataset dataset = new Dataset(DATAVERSE_NAME, DATASET_NAME, DATAVERSE_NAME, DATA_TYPE_NAME, NODE_GROUP_NAME, null, null, new InternalDatasetDetails(null, PartitioningStrategy.HASH, partitioningKeys, null, null, null, false, null, false), null, DatasetType.INTERNAL, DATASET_ID, 0);
        try {
            nc.createPrimaryIndex(dataset, KEY_TYPES, RECORD_TYPE, META_TYPE, new NoMergePolicyFactory(), null, null, storageManager, KEY_INDEXES, KEY_INDICATORS_LIST);
            IHyracksTaskContext ctx = nc.createTestContext(true);
            nc.newJobId();
            ITransactionContext txnCtx = nc.getTransactionManager().getTransactionContext(nc.getTxnJobId(), true);
            LSMInsertDeleteOperatorNodePushable insertOp = nc.getInsertPipeline(ctx, dataset, KEY_TYPES, RECORD_TYPE, META_TYPE, new NoMergePolicyFactory(), null, null, KEY_INDEXES, KEY_INDICATORS_LIST, storageManager).getLeft();
            insertOp.open();
            TupleGenerator tupleGenerator = new TupleGenerator(RECORD_TYPE, META_TYPE, KEY_INDEXES, KEY_INDICATORS, RECORD_GEN_FUNCTION, UNIQUE_RECORD_FIELDS, META_GEN_FUNCTION, UNIQUE_META_FIELDS);
            VSizeFrame frame = new VSizeFrame(ctx);
            VSizeFrame marker = new VSizeFrame(ctx);
            FrameTupleAppender tupleAppender = new FrameTupleAppender(frame);
            long markerId = 0L;
            for (int j = 0; j < NUM_OF_RECORDS; j++) {
                if (j % SNAPSHOT_SIZE == 0) {
                    marker.reset();
                    marker.getBuffer().put(MessagingFrameTupleAppender.MARKER_MESSAGE);
                    marker.getBuffer().putLong(markerId);
                    marker.getBuffer().flip();
                    markerId++;
                    TaskUtil.putInSharedMap(HyracksConstants.KEY_MESSAGE, marker, ctx);
                    tupleAppender.flush(insertOp);
                }
                ITupleReference tuple = tupleGenerator.next();
                DataflowUtils.addTupleToFrame(tupleAppender, tuple, insertOp);
            }
            if (tupleAppender.getTupleCount() > 0) {
                tupleAppender.write(insertOp, true);
            }
            insertOp.close();
            nc.getTransactionManager().completedTransaction(txnCtx, DatasetId.NULL, -1, true);
            IIndexDataflowHelper dataflowHelper = nc.getPrimaryIndexDataflowHelper(dataset, KEY_TYPES, RECORD_TYPE, META_TYPE, new NoMergePolicyFactory(), null, null, storageManager, KEY_INDEXES, KEY_INDICATORS_LIST);
            dataflowHelper.open();
            LSMBTree btree = (LSMBTree) dataflowHelper.getIndexInstance();
            LongPointable longPointable = LongPointable.FACTORY.createPointable();
            ComponentMetadataUtil.get(btree, ComponentMetadataUtil.MARKER_LSN_KEY, longPointable);
            long lsn = longPointable.getLong();
            int numOfMarkers = 0;
            LogReader logReader = (LogReader) nc.getTransactionSubsystem().getLogManager().getLogReader(false);
            long expectedMarkerId = markerId - 1;
            while (lsn >= 0) {
                numOfMarkers++;
                ILogRecord logRecord = logReader.read(lsn);
                lsn = logRecord.getPreviousMarkerLSN();
                long logMarkerId = logRecord.getMarker().getLong();
                Assert.assertEquals(expectedMarkerId, logMarkerId);
                expectedMarkerId--;
            }
            logReader.close();
            dataflowHelper.close();
            Assert.assertEquals(markerId, numOfMarkers);
            nc.newJobId();
            TestTupleCounterFrameWriter countOp = create(nc.getSearchOutputDesc(KEY_TYPES, RECORD_TYPE, META_TYPE), Collections.emptyList(), Collections.emptyList(), false);
            IPushRuntime emptyTupleOp = nc.getFullScanPipeline(countOp, ctx, dataset, KEY_TYPES, RECORD_TYPE, META_TYPE, new NoMergePolicyFactory(), null, null, KEY_INDEXES, KEY_INDICATORS_LIST, storageManager);
            emptyTupleOp.open();
            emptyTupleOp.close();
            Assert.assertEquals(NUM_OF_RECORDS, countOp.getCount());
        } finally {
            nc.deInit();
        }
    } catch (Throwable e) {
        e.printStackTrace();
        Assert.fail(e.getMessage());
    }
}
Also used : LSMInsertDeleteOperatorNodePushable(org.apache.asterix.common.dataflow.LSMInsertDeleteOperatorNodePushable) IIndexDataflowHelper(org.apache.hyracks.storage.am.common.api.IIndexDataflowHelper) IPushRuntime(org.apache.hyracks.algebricks.runtime.base.IPushRuntime) ArrayList(java.util.ArrayList) TestTupleCounterFrameWriter(org.apache.asterix.app.data.gen.TestTupleCounterFrameWriter) NoMergePolicyFactory(org.apache.hyracks.storage.am.lsm.common.impls.NoMergePolicyFactory) MessagingFrameTupleAppender(org.apache.hyracks.dataflow.common.io.MessagingFrameTupleAppender) FrameTupleAppender(org.apache.hyracks.dataflow.common.comm.io.FrameTupleAppender) LongPointable(org.apache.hyracks.data.std.primitive.LongPointable) ArrayList(java.util.ArrayList) List(java.util.List) ILogRecord(org.apache.asterix.common.transactions.ILogRecord) Dataset(org.apache.asterix.metadata.entities.Dataset) InternalDatasetDetails(org.apache.asterix.metadata.entities.InternalDatasetDetails) ITransactionContext(org.apache.asterix.common.transactions.ITransactionContext) TupleGenerator(org.apache.asterix.app.data.gen.TupleGenerator) StorageComponentProvider(org.apache.asterix.file.StorageComponentProvider) VSizeFrame(org.apache.hyracks.api.comm.VSizeFrame) IHyracksTaskContext(org.apache.hyracks.api.context.IHyracksTaskContext) LSMBTree(org.apache.hyracks.storage.am.lsm.btree.impls.LSMBTree) TestNodeController(org.apache.asterix.app.bootstrap.TestNodeController) ITupleReference(org.apache.hyracks.dataflow.common.data.accessors.ITupleReference) LogReader(org.apache.asterix.transaction.management.service.logging.LogReader) Test(org.junit.Test)

Aggregations

InternalDatasetDetails (org.apache.asterix.metadata.entities.InternalDatasetDetails)14 Dataset (org.apache.asterix.metadata.entities.Dataset)8 ArrayList (java.util.ArrayList)7 List (java.util.List)7 HashMap (java.util.HashMap)6 ITupleReference (org.apache.hyracks.dataflow.common.data.accessors.ITupleReference)5 AString (org.apache.asterix.om.base.AString)4 ARecordType (org.apache.asterix.om.types.ARecordType)4 IAType (org.apache.asterix.om.types.IAType)4 Test (org.junit.Test)4 IDatasetDetails (org.apache.asterix.metadata.IDatasetDetails)3 DatasetDataSource (org.apache.asterix.metadata.declared.DatasetDataSource)3 Index (org.apache.asterix.metadata.entities.Index)3 AInt32 (org.apache.asterix.om.base.AInt32)3 ILogicalExpression (org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression)3 LogicalVariable (org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable)3 ScalarFunctionCallExpression (org.apache.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression)3 VariableReferenceExpression (org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression)3 Date (java.util.Date)2 CommitOperator (org.apache.asterix.algebra.operators.CommitOperator)2