use of org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory in project asterixdb by apache.
the class OrderedIndexMultiThreadTest method runTest.
protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, int numThreads, TestWorkloadConf conf, String dataMsg) throws InterruptedException, HyracksDataException {
setUp();
if (LOGGER.isLoggable(Level.INFO)) {
String indexTypeName = getIndexTypeName();
LOGGER.info(indexTypeName + " MultiThread Test:\nData: " + dataMsg + "; Threads: " + numThreads + "; Workload: " + conf.toString() + ".");
}
ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes);
IBinaryComparatorFactory[] cmpFactories = SerdeUtils.serdesToComparatorFactories(fieldSerdes, numKeys);
// This is only used for the LSM-BTree.
int[] bloomFilterKeyFields = new int[numKeys];
for (int i = 0; i < numKeys; ++i) {
bloomFilterKeyFields[i] = i;
}
IIndex index = createIndex(typeTraits, cmpFactories, bloomFilterKeyFields);
IIndexTestWorkerFactory workerFactory = getWorkerFactory();
// 4 batches per thread.
int batchSize = (NUM_OPERATIONS / numThreads) / 4;
IndexMultiThreadTestDriver driver = new IndexMultiThreadTestDriver(index, workerFactory, fieldSerdes, conf.ops, conf.opProbs);
driver.init();
long[] times = driver.run(numThreads, 1, NUM_OPERATIONS, batchSize);
index.validate();
driver.deinit();
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("BTree MultiThread Test Time: " + times[0] + "ms");
}
tearDown();
}
use of org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory in project asterixdb by apache.
the class SortMergeTest method sortMergeTest02.
@Test
public void sortMergeTest02() throws Exception {
JobSpecification spec = new JobSpecification();
FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator + "orders-part1.tbl"), new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator + "orders-part2.tbl") };
IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitProvider, new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 4, new int[] { 1, 0 }, new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) }, ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID, NC2_ID);
ResultSetId rsId = new ResultSetId(1);
spec.addResultSetId(rsId);
IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, false, false, ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
spec.connect(new MToNPartitioningMergingConnectorDescriptor(spec, new FieldHashPartitionComputerFactory(new int[] { 1, 0 }, new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY), PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }), new int[] { 1, 0 }, new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) }, new UTF8StringNormalizedKeyComputerFactory()), sorter, 0, printer, 0);
runTest(spec);
}
use of org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory in project asterixdb by apache.
the class RTreeTestContext method create.
public static RTreeTestContext create(IBufferCache bufferCache, IFileMapProvider fileMapProvider, FileReference file, ISerializerDeserializer[] fieldSerdes, IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeyFields, RTreePolicyType rtreePolicyType, IPageManagerFactory pageManagerFactory) throws Exception {
ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes);
IBinaryComparatorFactory[] cmpFactories = SerdeUtils.serdesToComparatorFactories(fieldSerdes, numKeyFields);
RTree rtree = RTreeUtils.createRTree(bufferCache, fileMapProvider, typeTraits, valueProviderFactories, cmpFactories, rtreePolicyType, file, false, pageManagerFactory);
RTreeTestContext testCtx = new RTreeTestContext(fieldSerdes, rtree);
return testCtx;
}
use of org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory in project asterixdb by apache.
the class OnDiskInvertedIndexLifecycleTest method setup.
@Override
public void setup() throws Exception {
harness.setUp();
ITypeTraits[] tokenTypeTraits = new ITypeTraits[] { UTF8StringPointable.TYPE_TRAITS };
IBinaryComparatorFactory[] tokenCmpFactories = new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) };
ITypeTraits[] invListTypeTraits = new ITypeTraits[] { IntegerPointable.TYPE_TRAITS };
IBinaryComparatorFactory[] invListCmpFactories = new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) };
IInvertedListBuilder invListBuilder = new FixedSizeElementInvertedListBuilder(invListTypeTraits);
FileReference btreeFile = harness.getIOManager().resolveAbsolutePath(harness.getInvListsFileRef().getFile().getAbsolutePath() + "_btree");
index = new OnDiskInvertedIndex(harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), invListBuilder, invListTypeTraits, invListCmpFactories, tokenTypeTraits, tokenCmpFactories, harness.getInvListsFileRef(), btreeFile, harness.getMetadataPageManagerFactory());
}
use of org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory in project asterixdb by apache.
the class LSMInvertedIndexTestContext method getComparatorFactories.
@Override
public IBinaryComparatorFactory[] getComparatorFactories() {
if (allCmpFactories == null) {
// Concatenate token and inv-list comparators.
IInvertedIndex invIndex = (IInvertedIndex) index;
IBinaryComparatorFactory[] tokenCmpFactories = invIndex.getTokenCmpFactories();
IBinaryComparatorFactory[] invListCmpFactories = invIndex.getInvListCmpFactories();
int totalCmpCount = tokenCmpFactories.length + invListCmpFactories.length;
allCmpFactories = new IBinaryComparatorFactory[totalCmpCount];
for (int i = 0; i < tokenCmpFactories.length; i++) {
allCmpFactories[i] = tokenCmpFactories[i];
}
for (int i = 0; i < invListCmpFactories.length; i++) {
allCmpFactories[i + tokenCmpFactories.length] = invListCmpFactories[i];
}
}
return allCmpFactories;
}
Aggregations