use of org.apache.hyracks.dataflow.common.data.accessors.ITupleReference in project asterixdb by apache.
the class LSMBTreeTuplesTest method testLSMBTreeTuple.
private void testLSMBTreeTuple(ISerializerDeserializer[] maxFieldSerdes) throws HyracksDataException {
// Create a tuple with the max-1 fields for checking setFieldCount() of tuple references later.
ITypeTraits[] maxTypeTraits = SerdeUtils.serdesToTypeTraits(maxFieldSerdes);
IFieldValueGenerator[] maxFieldGens = DataGenUtils.getFieldGensFromSerdes(maxFieldSerdes, rnd, false);
// Generate a tuple with random field values.
Object[] maxFields = new Object[maxFieldSerdes.length];
for (int j = 0; j < maxFieldSerdes.length; j++) {
maxFields[j] = maxFieldGens[j].next();
}
// Run test for varying number of fields and keys.
for (int numKeyFields = 1; numKeyFields < maxFieldSerdes.length; numKeyFields++) {
// Create tuples with varying number of fields, and try to interpret their bytes with the lsmBTreeTuple.
for (int numFields = numKeyFields; numFields <= maxFieldSerdes.length; numFields++) {
// Create and write tuple to bytes using an LSMBTreeTupleWriter.
LSMBTreeTupleWriter maxMatterTupleWriter = new LSMBTreeTupleWriter(maxTypeTraits, numKeyFields, false);
ITupleReference maxTuple = TupleUtils.createTuple(maxFieldSerdes, (Object[]) maxFields);
ByteBuffer maxMatterBuf = writeTuple(maxTuple, maxMatterTupleWriter);
// Tuple reference should work for both matter and antimatter tuples (doesn't matter which factory creates it).
LSMBTreeTupleReference maxLsmBTreeTuple = (LSMBTreeTupleReference) maxMatterTupleWriter.createTupleReference();
ISerializerDeserializer[] fieldSerdes = Arrays.copyOfRange(maxFieldSerdes, 0, numFields);
ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes);
IFieldValueGenerator[] fieldGens = DataGenUtils.getFieldGensFromSerdes(fieldSerdes, rnd, false);
// Generate a tuple with random field values.
Object[] fields = new Object[numFields];
for (int j = 0; j < numFields; j++) {
fields[j] = fieldGens[j].next();
}
// Create and write tuple to bytes using an LSMBTreeTupleWriter.
ITupleReference tuple = TupleUtils.createTuple(fieldSerdes, (Object[]) fields);
LSMBTreeTupleWriter matterTupleWriter = new LSMBTreeTupleWriter(typeTraits, numKeyFields, false);
LSMBTreeTupleWriter antimatterTupleWriter = new LSMBTreeTupleWriter(typeTraits, numKeyFields, true);
LSMBTreeCopyTupleWriter copyTupleWriter = new LSMBTreeCopyTupleWriter(typeTraits, numKeyFields);
ByteBuffer matterBuf = writeTuple(tuple, matterTupleWriter);
ByteBuffer antimatterBuf = writeTuple(tuple, antimatterTupleWriter);
// The antimatter buf should only contain keys, sanity check the size.
if (numFields != numKeyFields) {
assertTrue(antimatterBuf.array().length < matterBuf.array().length);
}
// Tuple reference should work for both matter and antimatter tuples (doesn't matter which factory creates it).
LSMBTreeTupleReference lsmBTreeTuple = (LSMBTreeTupleReference) matterTupleWriter.createTupleReference();
// Repeat the block inside to test that repeated resetting to matter/antimatter tuples works.
for (int r = 0; r < 4; r++) {
// Check matter tuple with lsmBTreeTuple.
lsmBTreeTuple.resetByTupleOffset(matterBuf.array(), 0);
checkTuple(lsmBTreeTuple, numFields, false, fieldSerdes, fields);
// Create a copy using copyTupleWriter, and verify again.
ByteBuffer copyMatterBuf = writeTuple(lsmBTreeTuple, copyTupleWriter);
lsmBTreeTuple.resetByTupleOffset(copyMatterBuf.array(), 0);
checkTuple(lsmBTreeTuple, numFields, false, fieldSerdes, fields);
// Check antimatter tuple with lsmBTreeTuple.
lsmBTreeTuple.resetByTupleOffset(antimatterBuf.array(), 0);
// Should only contain keys.
checkTuple(lsmBTreeTuple, numKeyFields, true, fieldSerdes, fields);
// Create a copy using copyTupleWriter, and verify again.
ByteBuffer copyAntimatterBuf = writeTuple(lsmBTreeTuple, copyTupleWriter);
lsmBTreeTuple.resetByTupleOffset(copyAntimatterBuf.array(), 0);
// Should only contain keys.
checkTuple(lsmBTreeTuple, numKeyFields, true, fieldSerdes, fields);
// Check matter tuple with maxLsmBTreeTuple.
// We should be able to manually set a prefix of the fields
// (the passed type traits in the tuple factory's constructor).
maxLsmBTreeTuple.setFieldCount(numFields);
maxLsmBTreeTuple.resetByTupleOffset(matterBuf.array(), 0);
checkTuple(maxLsmBTreeTuple, numFields, false, fieldSerdes, fields);
// Check antimatter tuple with maxLsmBTreeTuple.
maxLsmBTreeTuple.resetByTupleOffset(antimatterBuf.array(), 0);
// Should only contain keys.
checkTuple(maxLsmBTreeTuple, numKeyFields, true, fieldSerdes, fields);
// Resetting maxLsmBTreeTuple should set its field count to
// maxFieldSerdes.length, based on the its type traits.
maxLsmBTreeTuple.resetByTupleOffset(maxMatterBuf.array(), 0);
checkTuple(maxLsmBTreeTuple, maxFieldSerdes.length, false, maxFieldSerdes, maxFields);
}
}
}
}
use of org.apache.hyracks.dataflow.common.data.accessors.ITupleReference in project asterixdb by apache.
the class MetadataNode method getExternalFiles.
@Override
public List<ExternalFile> getExternalFiles(JobId jobId, Dataset dataset) throws MetadataException, RemoteException {
try {
ITupleReference searchKey = createTuple(dataset.getDataverseName(), dataset.getDatasetName());
ExternalFileTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getExternalFileTupleTranslator(false);
IValueExtractor<ExternalFile> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
List<ExternalFile> results = new ArrayList<>();
searchIndex(jobId, MetadataPrimaryIndexes.EXTERNAL_FILE_DATASET, searchKey, valueExtractor, results);
return results;
} catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
use of org.apache.hyracks.dataflow.common.data.accessors.ITupleReference in project asterixdb by apache.
the class MetadataNode method getDataverseDatatypes.
private List<Datatype> getDataverseDatatypes(JobId jobId, String dataverseName) throws MetadataException, RemoteException {
try {
ITupleReference searchKey = createTuple(dataverseName);
DatatypeTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDataTypeTupleTranslator(jobId, this, false);
IValueExtractor<Datatype> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
List<Datatype> results = new ArrayList<>();
searchIndex(jobId, MetadataPrimaryIndexes.DATATYPE_DATASET, searchKey, valueExtractor, results);
return results;
} catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
use of org.apache.hyracks.dataflow.common.data.accessors.ITupleReference in project asterixdb by apache.
the class MetadataNode method getDataset.
@Override
public Dataset getDataset(JobId jobId, String dataverseName, String datasetName) throws MetadataException, RemoteException {
try {
ITupleReference searchKey = createTuple(dataverseName, datasetName);
DatasetTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDatasetTupleTranslator(false);
List<Dataset> results = new ArrayList<>();
IValueExtractor<Dataset> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
searchIndex(jobId, MetadataPrimaryIndexes.DATASET_DATASET, searchKey, valueExtractor, results);
if (results.isEmpty()) {
return null;
}
return results.get(0);
} catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
use of org.apache.hyracks.dataflow.common.data.accessors.ITupleReference in project asterixdb by apache.
the class MetadataNode method getDataversePolicies.
@Override
public List<FeedPolicyEntity> getDataversePolicies(JobId jobId, String dataverse) throws MetadataException, RemoteException {
try {
ITupleReference searchKey = createTuple(dataverse);
FeedPolicyTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getFeedPolicyTupleTranslator(false);
IValueExtractor<FeedPolicyEntity> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
List<FeedPolicyEntity> results = new ArrayList<>();
searchIndex(jobId, MetadataPrimaryIndexes.FEED_POLICY_DATASET, searchKey, valueExtractor, results);
return results;
} catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
Aggregations