Search in sources :

Example 6 with ITupleReference

use of org.apache.hyracks.dataflow.common.data.accessors.ITupleReference in project asterixdb by apache.

the class LSMBTreeTuplesTest method testLSMBTreeTuple.

private void testLSMBTreeTuple(ISerializerDeserializer[] maxFieldSerdes) throws HyracksDataException {
    // Create a tuple with the max-1 fields for checking setFieldCount() of tuple references later.
    ITypeTraits[] maxTypeTraits = SerdeUtils.serdesToTypeTraits(maxFieldSerdes);
    IFieldValueGenerator[] maxFieldGens = DataGenUtils.getFieldGensFromSerdes(maxFieldSerdes, rnd, false);
    // Generate a tuple with random field values.
    Object[] maxFields = new Object[maxFieldSerdes.length];
    for (int j = 0; j < maxFieldSerdes.length; j++) {
        maxFields[j] = maxFieldGens[j].next();
    }
    // Run test for varying number of fields and keys.
    for (int numKeyFields = 1; numKeyFields < maxFieldSerdes.length; numKeyFields++) {
        // Create tuples with varying number of fields, and try to interpret their bytes with the lsmBTreeTuple.
        for (int numFields = numKeyFields; numFields <= maxFieldSerdes.length; numFields++) {
            // Create and write tuple to bytes using an LSMBTreeTupleWriter.
            LSMBTreeTupleWriter maxMatterTupleWriter = new LSMBTreeTupleWriter(maxTypeTraits, numKeyFields, false);
            ITupleReference maxTuple = TupleUtils.createTuple(maxFieldSerdes, (Object[]) maxFields);
            ByteBuffer maxMatterBuf = writeTuple(maxTuple, maxMatterTupleWriter);
            // Tuple reference should work for both matter and antimatter tuples (doesn't matter which factory creates it).
            LSMBTreeTupleReference maxLsmBTreeTuple = (LSMBTreeTupleReference) maxMatterTupleWriter.createTupleReference();
            ISerializerDeserializer[] fieldSerdes = Arrays.copyOfRange(maxFieldSerdes, 0, numFields);
            ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes);
            IFieldValueGenerator[] fieldGens = DataGenUtils.getFieldGensFromSerdes(fieldSerdes, rnd, false);
            // Generate a tuple with random field values.
            Object[] fields = new Object[numFields];
            for (int j = 0; j < numFields; j++) {
                fields[j] = fieldGens[j].next();
            }
            // Create and write tuple to bytes using an LSMBTreeTupleWriter.
            ITupleReference tuple = TupleUtils.createTuple(fieldSerdes, (Object[]) fields);
            LSMBTreeTupleWriter matterTupleWriter = new LSMBTreeTupleWriter(typeTraits, numKeyFields, false);
            LSMBTreeTupleWriter antimatterTupleWriter = new LSMBTreeTupleWriter(typeTraits, numKeyFields, true);
            LSMBTreeCopyTupleWriter copyTupleWriter = new LSMBTreeCopyTupleWriter(typeTraits, numKeyFields);
            ByteBuffer matterBuf = writeTuple(tuple, matterTupleWriter);
            ByteBuffer antimatterBuf = writeTuple(tuple, antimatterTupleWriter);
            // The antimatter buf should only contain keys, sanity check the size.
            if (numFields != numKeyFields) {
                assertTrue(antimatterBuf.array().length < matterBuf.array().length);
            }
            // Tuple reference should work for both matter and antimatter tuples (doesn't matter which factory creates it).
            LSMBTreeTupleReference lsmBTreeTuple = (LSMBTreeTupleReference) matterTupleWriter.createTupleReference();
            // Repeat the block inside to test that repeated resetting to matter/antimatter tuples works.
            for (int r = 0; r < 4; r++) {
                // Check matter tuple with lsmBTreeTuple.
                lsmBTreeTuple.resetByTupleOffset(matterBuf.array(), 0);
                checkTuple(lsmBTreeTuple, numFields, false, fieldSerdes, fields);
                // Create a copy using copyTupleWriter, and verify again.
                ByteBuffer copyMatterBuf = writeTuple(lsmBTreeTuple, copyTupleWriter);
                lsmBTreeTuple.resetByTupleOffset(copyMatterBuf.array(), 0);
                checkTuple(lsmBTreeTuple, numFields, false, fieldSerdes, fields);
                // Check antimatter tuple with lsmBTreeTuple.
                lsmBTreeTuple.resetByTupleOffset(antimatterBuf.array(), 0);
                // Should only contain keys.
                checkTuple(lsmBTreeTuple, numKeyFields, true, fieldSerdes, fields);
                // Create a copy using copyTupleWriter, and verify again.
                ByteBuffer copyAntimatterBuf = writeTuple(lsmBTreeTuple, copyTupleWriter);
                lsmBTreeTuple.resetByTupleOffset(copyAntimatterBuf.array(), 0);
                // Should only contain keys.
                checkTuple(lsmBTreeTuple, numKeyFields, true, fieldSerdes, fields);
                // Check matter tuple with maxLsmBTreeTuple.
                // We should be able to manually set a prefix of the fields
                // (the passed type traits in the tuple factory's constructor).
                maxLsmBTreeTuple.setFieldCount(numFields);
                maxLsmBTreeTuple.resetByTupleOffset(matterBuf.array(), 0);
                checkTuple(maxLsmBTreeTuple, numFields, false, fieldSerdes, fields);
                // Check antimatter tuple with maxLsmBTreeTuple.
                maxLsmBTreeTuple.resetByTupleOffset(antimatterBuf.array(), 0);
                // Should only contain keys.
                checkTuple(maxLsmBTreeTuple, numKeyFields, true, fieldSerdes, fields);
                // Resetting maxLsmBTreeTuple should set its field count to
                // maxFieldSerdes.length, based on the its type traits.
                maxLsmBTreeTuple.resetByTupleOffset(maxMatterBuf.array(), 0);
                checkTuple(maxLsmBTreeTuple, maxFieldSerdes.length, false, maxFieldSerdes, maxFields);
            }
        }
    }
}
Also used : ITypeTraits(org.apache.hyracks.api.dataflow.value.ITypeTraits) IFieldValueGenerator(org.apache.hyracks.storage.am.common.datagen.IFieldValueGenerator) ByteBuffer(java.nio.ByteBuffer) ISerializerDeserializer(org.apache.hyracks.api.dataflow.value.ISerializerDeserializer) ITupleReference(org.apache.hyracks.dataflow.common.data.accessors.ITupleReference)

Example 7 with ITupleReference

use of org.apache.hyracks.dataflow.common.data.accessors.ITupleReference in project asterixdb by apache.

the class MetadataNode method getExternalFiles.

@Override
public List<ExternalFile> getExternalFiles(JobId jobId, Dataset dataset) throws MetadataException, RemoteException {
    try {
        ITupleReference searchKey = createTuple(dataset.getDataverseName(), dataset.getDatasetName());
        ExternalFileTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getExternalFileTupleTranslator(false);
        IValueExtractor<ExternalFile> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
        List<ExternalFile> results = new ArrayList<>();
        searchIndex(jobId, MetadataPrimaryIndexes.EXTERNAL_FILE_DATASET, searchKey, valueExtractor, results);
        return results;
    } catch (HyracksDataException e) {
        throw new MetadataException(e);
    }
}
Also used : MetadataEntityValueExtractor(org.apache.asterix.metadata.valueextractors.MetadataEntityValueExtractor) ExternalFileTupleTranslator(org.apache.asterix.metadata.entitytupletranslators.ExternalFileTupleTranslator) ITupleReference(org.apache.hyracks.dataflow.common.data.accessors.ITupleReference) ArrayList(java.util.ArrayList) ExternalFile(org.apache.asterix.external.indexing.ExternalFile) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException)

Example 8 with ITupleReference

use of org.apache.hyracks.dataflow.common.data.accessors.ITupleReference in project asterixdb by apache.

the class MetadataNode method getDataverseDatatypes.

private List<Datatype> getDataverseDatatypes(JobId jobId, String dataverseName) throws MetadataException, RemoteException {
    try {
        ITupleReference searchKey = createTuple(dataverseName);
        DatatypeTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDataTypeTupleTranslator(jobId, this, false);
        IValueExtractor<Datatype> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
        List<Datatype> results = new ArrayList<>();
        searchIndex(jobId, MetadataPrimaryIndexes.DATATYPE_DATASET, searchKey, valueExtractor, results);
        return results;
    } catch (HyracksDataException e) {
        throw new MetadataException(e);
    }
}
Also used : MetadataEntityValueExtractor(org.apache.asterix.metadata.valueextractors.MetadataEntityValueExtractor) DatatypeTupleTranslator(org.apache.asterix.metadata.entitytupletranslators.DatatypeTupleTranslator) ITupleReference(org.apache.hyracks.dataflow.common.data.accessors.ITupleReference) ArrayList(java.util.ArrayList) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) Datatype(org.apache.asterix.metadata.entities.Datatype)

Example 9 with ITupleReference

use of org.apache.hyracks.dataflow.common.data.accessors.ITupleReference in project asterixdb by apache.

the class MetadataNode method getDataset.

@Override
public Dataset getDataset(JobId jobId, String dataverseName, String datasetName) throws MetadataException, RemoteException {
    try {
        ITupleReference searchKey = createTuple(dataverseName, datasetName);
        DatasetTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDatasetTupleTranslator(false);
        List<Dataset> results = new ArrayList<>();
        IValueExtractor<Dataset> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
        searchIndex(jobId, MetadataPrimaryIndexes.DATASET_DATASET, searchKey, valueExtractor, results);
        if (results.isEmpty()) {
            return null;
        }
        return results.get(0);
    } catch (HyracksDataException e) {
        throw new MetadataException(e);
    }
}
Also used : MetadataEntityValueExtractor(org.apache.asterix.metadata.valueextractors.MetadataEntityValueExtractor) DatasetTupleTranslator(org.apache.asterix.metadata.entitytupletranslators.DatasetTupleTranslator) ExtensionMetadataDataset(org.apache.asterix.metadata.api.ExtensionMetadataDataset) Dataset(org.apache.asterix.metadata.entities.Dataset) ITupleReference(org.apache.hyracks.dataflow.common.data.accessors.ITupleReference) ArrayList(java.util.ArrayList) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException)

Example 10 with ITupleReference

use of org.apache.hyracks.dataflow.common.data.accessors.ITupleReference in project asterixdb by apache.

the class MetadataNode method getDataversePolicies.

@Override
public List<FeedPolicyEntity> getDataversePolicies(JobId jobId, String dataverse) throws MetadataException, RemoteException {
    try {
        ITupleReference searchKey = createTuple(dataverse);
        FeedPolicyTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getFeedPolicyTupleTranslator(false);
        IValueExtractor<FeedPolicyEntity> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
        List<FeedPolicyEntity> results = new ArrayList<>();
        searchIndex(jobId, MetadataPrimaryIndexes.FEED_POLICY_DATASET, searchKey, valueExtractor, results);
        return results;
    } catch (HyracksDataException e) {
        throw new MetadataException(e);
    }
}
Also used : MetadataEntityValueExtractor(org.apache.asterix.metadata.valueextractors.MetadataEntityValueExtractor) FeedPolicyEntity(org.apache.asterix.metadata.entities.FeedPolicyEntity) ITupleReference(org.apache.hyracks.dataflow.common.data.accessors.ITupleReference) ArrayList(java.util.ArrayList) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) FeedPolicyTupleTranslator(org.apache.asterix.metadata.entitytupletranslators.FeedPolicyTupleTranslator)

Aggregations

ITupleReference (org.apache.hyracks.dataflow.common.data.accessors.ITupleReference)149 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)80 ArrayList (java.util.ArrayList)40 ACIDException (org.apache.asterix.common.exceptions.ACIDException)31 MetadataEntityValueExtractor (org.apache.asterix.metadata.valueextractors.MetadataEntityValueExtractor)26 ISerializerDeserializer (org.apache.hyracks.api.dataflow.value.ISerializerDeserializer)20 Test (org.junit.Test)20 RangePredicate (org.apache.hyracks.storage.am.btree.impls.RangePredicate)18 IIndexCursor (org.apache.hyracks.storage.common.IIndexCursor)18 Dataset (org.apache.asterix.metadata.entities.Dataset)10 MultiComparator (org.apache.hyracks.storage.common.MultiComparator)10 CheckTuple (org.apache.hyracks.storage.am.common.CheckTuple)8 ITreeIndexAccessor (org.apache.hyracks.storage.am.common.api.ITreeIndexAccessor)8 ITreeIndexCursor (org.apache.hyracks.storage.am.common.api.ITreeIndexCursor)8 ArrayTupleBuilder (org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder)7 ILSMDiskComponentBulkLoader (org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponentBulkLoader)7 SearchPredicate (org.apache.hyracks.storage.am.rtree.impls.SearchPredicate)7 ExtensionMetadataDataset (org.apache.asterix.metadata.api.ExtensionMetadataDataset)6 Datatype (org.apache.asterix.metadata.entities.Datatype)6 ArrayTupleReference (org.apache.hyracks.dataflow.common.comm.io.ArrayTupleReference)6