use of org.apache.hyracks.storage.am.common.datagen.DataGenThread in project asterixdb by apache.
the class IndexMultiThreadTestDriver method run.
public long[] run(int numThreads, int numRepeats, int numOps, int batchSize) throws InterruptedException, HyracksDataException {
int numBatches = (batchSize < 1 ? numOps : numOps / batchSize);
if (numBatches < numThreads) {
numThreads = numBatches;
}
int threadNumBatches = numBatches / numThreads;
long[] times = new long[numRepeats];
for (int i = 0; i < numRepeats; i++) {
DataGenThread dataGen = createDatagenThread(numThreads, numBatches, batchSize);
dataGen.start();
// Wait until the tupleBatchQueue is filled to capacity.
while (dataGen.tupleBatchQueue.remainingCapacity() != 0 && dataGen.tupleBatchQueue.size() != numBatches) {
Thread.sleep(10);
}
// Start worker threads.
AbstractIndexTestWorker[] workers = new AbstractIndexTestWorker[numThreads];
long start = System.currentTimeMillis();
for (int j = 0; j < numThreads; j++) {
workers[j] = workerFactory.create(dataGen, opSelector, index, threadNumBatches);
workers[j].start();
}
// Join worker threads.
for (int j = 0; j < numThreads; j++) {
workers[j].join();
}
long end = System.currentTimeMillis();
times[i] = end - start;
}
return times;
}
use of org.apache.hyracks.storage.am.common.datagen.DataGenThread in project asterixdb by apache.
the class PerfExperiment method main.
public static void main(String[] args) throws Exception {
// Disable logging so we can better see the output times.
Enumeration<String> loggers = LogManager.getLogManager().getLoggerNames();
while (loggers.hasMoreElements()) {
String loggerName = loggers.nextElement();
Logger logger = LogManager.getLogManager().getLogger(loggerName);
logger.setLevel(Level.OFF);
}
boolean sorted = Boolean.parseBoolean(args[0]);
int numThreads = Integer.parseInt(args[1]);
//int numTuples = 100000; // 100K
//int numTuples = 1000000; // 1M
//int numTuples = 2000000; // 2M
//int numTuples = 3000000; // 3M
//int numTuples = 10000000; // 10M
// 20M
int numTuples = 20000000;
//int numTuples = 30000000; // 30M
//int numTuples = 40000000; // 40M
//int numTuples = 60000000; // 60M
//int numTuples = 100000000; // 100M
//int numTuples = 200000000; // 200M
int batchSize = 10000;
int numBatches = numTuples / batchSize;
int payLoadSize = 240;
ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE };
ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes, payLoadSize);
IBinaryComparatorFactory[] cmpFactories = SerdeUtils.serdesToComparatorFactories(fieldSerdes, fieldSerdes.length);
int[] bloomFilterKeyFields = new int[cmpFactories.length];
for (int i = 0; i < bloomFilterKeyFields.length; i++) {
bloomFilterKeyFields[i] = i;
}
double bloomFilterFalsePositiveRate = 0.01;
//int repeats = 1000;
int repeats = 1;
long[] times = new long[repeats];
// boolean sorted = true;
for (int i = 0; i < repeats; i++) {
//ConcurrentSkipListRunner runner = new ConcurrentSkipListRunner(numBatches, batchSize, tupleSize, typeTraits, cmp);
//InMemoryBTreeRunner runner = new InMemoryBTreeRunner(numBatches, 8192, 100000, typeTraits, cmpFactories);
//BTreeBulkLoadRunner runner = new BTreeBulkLoadRunner(numBatches, 8192, 100000, typeTraits, cmp, 1.0f);
//BTreeRunner runner = new BTreeRunner(numBatches, 8192, 100000, typeTraits, cmp);
//String btreeName = "071211";
//BTreeSearchRunner runner = new BTreeSearchRunner(btreeName, 10, numBatches, 8192, 25000, typeTraits, cmp);
//LSMTreeRunner runner = new LSMTreeRunner(numBatches, 8192, 100, 8192, 250, typeTraits, cmp);
//LSMTreeSearchRunner runner = new LSMTreeSearchRunner(100000, numBatches, 8192, 24750, 8192, 250, typeTraits, cmp);
// 128kb
int inMemPageSize = 131072;
int onDiskPageSize = inMemPageSize;
// 1GB
int inMemNumPages = 8192;
// 2GB
int onDiskNumPages = 16384;
LSMTreeRunner runner = new LSMTreeRunner(numBatches, inMemPageSize, inMemNumPages, onDiskPageSize, onDiskNumPages, typeTraits, cmpFactories, bloomFilterKeyFields, bloomFilterFalsePositiveRate);
DataGenThread dataGen = new DataGenThread(numThreads, numBatches, batchSize, fieldSerdes, payLoadSize, 50, 10, sorted);
dataGen.start();
runner.reset();
times[i] = runner.runExperiment(dataGen, numThreads);
System.out.println("TIME " + i + ": " + times[i] + "ms");
runner.deinit();
}
long avgTime = 0;
for (int i = 0; i < repeats; i++) {
avgTime += times[i];
}
avgTime /= repeats;
System.out.println("AVG TIME: " + avgTime + "ms");
}
use of org.apache.hyracks.storage.am.common.datagen.DataGenThread in project asterixdb by apache.
the class BTreePageSizePerf method runExperiment.
private static void runExperiment(int numBatches, int batchSize, int pageSize, int numPages, ISerializerDeserializer[] fieldSerdes, IBinaryComparatorFactory[] cmpFactories, ITypeTraits[] typeTraits) throws Exception {
System.out.println("PAGE SIZE: " + pageSize);
System.out.println("NUM PAGES: " + numPages);
System.out.println("MEMORY: " + (pageSize * numPages));
int repeats = 5;
long[] times = new long[repeats];
//BTreeRunner runner = new BTreeRunner(numTuples, pageSize, numPages, typeTraits, cmp);
InMemoryBTreeRunner runner = new InMemoryBTreeRunner(numBatches, pageSize, numPages, typeTraits, cmpFactories);
runner.init();
int numThreads = 1;
for (int i = 0; i < repeats; i++) {
DataGenThread dataGen = new DataGenThread(numThreads, numBatches, batchSize, fieldSerdes, 30, 50, 10, false);
dataGen.start();
times[i] = runner.runExperiment(dataGen, numThreads);
System.out.println("TIME " + i + ": " + times[i] + "ms");
}
runner.deinit();
long avgTime = 0;
for (int i = 0; i < repeats; i++) {
avgTime += times[i];
}
avgTime /= repeats;
System.out.println("AVG TIME: " + avgTime + "ms");
System.out.println("-------------------------------");
}
Aggregations