Search in sources :

Example 31 with BenchmarkOptions

use of com.carrotsearch.junitbenchmarks.BenchmarkOptions in project parquet-mr by apache.

the class RandomWritingBenchmarkTest method writeDeltaPackingTest.

@BenchmarkOptions(benchmarkRounds = 10, warmupRounds = 2)
@Test
public void writeDeltaPackingTest() {
    DeltaBinaryPackingValuesWriter writer = new DeltaBinaryPackingValuesWriterForInteger(blockSize, miniBlockNum, 100, 20000, new DirectByteBufferAllocator());
    runWriteTest(writer);
}
Also used : DirectByteBufferAllocator(org.apache.parquet.bytes.DirectByteBufferAllocator) DeltaBinaryPackingValuesWriterForInteger(org.apache.parquet.column.values.delta.DeltaBinaryPackingValuesWriterForInteger) DeltaBinaryPackingValuesWriter(org.apache.parquet.column.values.delta.DeltaBinaryPackingValuesWriter) Test(org.junit.Test) BenchmarkOptions(com.carrotsearch.junitbenchmarks.BenchmarkOptions)

Example 32 with BenchmarkOptions

use of com.carrotsearch.junitbenchmarks.BenchmarkOptions in project parquet-mr by apache.

the class BenchmarkDeltaByteArray method benchmarkSortedStringsWithPlainValuesWriter.

@BenchmarkOptions(benchmarkRounds = 20, warmupRounds = 4)
@Test
public void benchmarkSortedStringsWithPlainValuesWriter() throws IOException {
    PlainValuesWriter writer = new PlainValuesWriter(64 * 1024, 64 * 1024, new DirectByteBufferAllocator());
    BinaryPlainValuesReader reader = new BinaryPlainValuesReader();
    Utils.writeData(writer, sortedVals);
    ByteBufferInputStream data = writer.getBytes().toInputStream();
    Binary[] bin = Utils.readData(reader, data, values.length);
    System.out.println("size " + data.position());
}
Also used : PlainValuesWriter(org.apache.parquet.column.values.plain.PlainValuesWriter) BinaryPlainValuesReader(org.apache.parquet.column.values.plain.BinaryPlainValuesReader) DirectByteBufferAllocator(org.apache.parquet.bytes.DirectByteBufferAllocator) ByteBufferInputStream(org.apache.parquet.bytes.ByteBufferInputStream) Binary(org.apache.parquet.io.api.Binary) Test(org.junit.Test) BenchmarkOptions(com.carrotsearch.junitbenchmarks.BenchmarkOptions)

Example 33 with BenchmarkOptions

use of com.carrotsearch.junitbenchmarks.BenchmarkOptions in project druid by druid-io.

the class OnheapIncrementalIndexBenchmark method testConcurrentAddRead.

@Ignore
@Test
@BenchmarkOptions(callgc = true, clock = Clock.REAL_TIME, warmupRounds = 10, benchmarkRounds = 20)
public void testConcurrentAddRead() throws InterruptedException, ExecutionException, NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException {
    final int taskCount = 30;
    final int concurrentThreads = 3;
    final int elementsPerThread = 1 << 15;
    final IncrementalIndex incrementalIndex = this.incrementalIndex.getConstructor(IncrementalIndexSchema.class, boolean.class, boolean.class, boolean.class, boolean.class, int.class).newInstance(new IncrementalIndexSchema.Builder().withMetrics(factories).build(), true, true, false, true, elementsPerThread * taskCount);
    final ArrayList<AggregatorFactory> queryAggregatorFactories = new ArrayList<>(DIMENSION_COUNT + 1);
    queryAggregatorFactories.add(new CountAggregatorFactory("rows"));
    for (int i = 0; i < DIMENSION_COUNT; ++i) {
        queryAggregatorFactories.add(new LongSumAggregatorFactory(StringUtils.format("sumResult%s", i), StringUtils.format("sumResult%s", i)));
        queryAggregatorFactories.add(new DoubleSumAggregatorFactory(StringUtils.format("doubleSumResult%s", i), StringUtils.format("doubleSumResult%s", i)));
    }
    final ListeningExecutorService indexExecutor = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(concurrentThreads, new ThreadFactoryBuilder().setDaemon(false).setNameFormat("index-executor-%d").setPriority(Thread.MIN_PRIORITY).build()));
    final ListeningExecutorService queryExecutor = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(concurrentThreads, new ThreadFactoryBuilder().setDaemon(false).setNameFormat("query-executor-%d").build()));
    final long timestamp = System.currentTimeMillis();
    final Interval queryInterval = Intervals.of("1900-01-01T00:00:00Z/2900-01-01T00:00:00Z");
    final List<ListenableFuture<?>> indexFutures = new ArrayList<>();
    final List<ListenableFuture<?>> queryFutures = new ArrayList<>();
    final Segment incrementalIndexSegment = new IncrementalIndexSegment(incrementalIndex, null);
    final QueryRunnerFactory factory = new TimeseriesQueryRunnerFactory(new TimeseriesQueryQueryToolChest(), new TimeseriesQueryEngine(), QueryRunnerTestHelper.NOOP_QUERYWATCHER);
    final AtomicInteger currentlyRunning = new AtomicInteger(0);
    final AtomicBoolean concurrentlyRan = new AtomicBoolean(false);
    final AtomicBoolean someoneRan = new AtomicBoolean(false);
    for (int j = 0; j < taskCount; j++) {
        indexFutures.add(indexExecutor.submit(new Runnable() {

            @Override
            public void run() {
                currentlyRunning.incrementAndGet();
                try {
                    for (int i = 0; i < elementsPerThread; i++) {
                        incrementalIndex.add(getLongRow(timestamp + i, 1, DIMENSION_COUNT));
                    }
                } catch (IndexSizeExceededException e) {
                    throw new RuntimeException(e);
                }
                currentlyRunning.decrementAndGet();
                someoneRan.set(true);
            }
        }));
        queryFutures.add(queryExecutor.submit(new Runnable() {

            @Override
            public void run() {
                QueryRunner<Result<TimeseriesResultValue>> runner = new FinalizeResultsQueryRunner<Result<TimeseriesResultValue>>(factory.createRunner(incrementalIndexSegment), factory.getToolchest());
                TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("xxx").granularity(Granularities.ALL).intervals(ImmutableList.of(queryInterval)).aggregators(queryAggregatorFactories).build();
                List<Result<TimeseriesResultValue>> results = runner.run(QueryPlus.wrap(query)).toList();
                for (Result<TimeseriesResultValue> result : results) {
                    if (someoneRan.get()) {
                        Assert.assertTrue(result.getValue().getDoubleMetric("doubleSumResult0") > 0);
                    }
                }
                if (currentlyRunning.get() > 0) {
                    concurrentlyRan.set(true);
                }
            }
        }));
    }
    List<ListenableFuture<?>> allFutures = new ArrayList<>(queryFutures.size() + indexFutures.size());
    allFutures.addAll(queryFutures);
    allFutures.addAll(indexFutures);
    Futures.allAsList(allFutures).get();
    // Assert.assertTrue("Did not hit concurrency, please try again", concurrentlyRan.get());
    queryExecutor.shutdown();
    indexExecutor.shutdown();
    QueryRunner<Result<TimeseriesResultValue>> runner = new FinalizeResultsQueryRunner<Result<TimeseriesResultValue>>(factory.createRunner(incrementalIndexSegment), factory.getToolchest());
    TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("xxx").granularity(Granularities.ALL).intervals(ImmutableList.of(queryInterval)).aggregators(queryAggregatorFactories).build();
    List<Result<TimeseriesResultValue>> results = runner.run(QueryPlus.wrap(query)).toList();
    final int expectedVal = elementsPerThread * taskCount;
    for (Result<TimeseriesResultValue> result : results) {
        Assert.assertEquals(elementsPerThread, result.getValue().getLongMetric("rows").intValue());
        for (int i = 0; i < DIMENSION_COUNT; ++i) {
            Assert.assertEquals(StringUtils.format("Failed long sum on dimension %d", i), expectedVal, result.getValue().getLongMetric(StringUtils.format("sumResult%s", i)).intValue());
            Assert.assertEquals(StringUtils.format("Failed double sum on dimension %d", i), expectedVal, result.getValue().getDoubleMetric(StringUtils.format("doubleSumResult%s", i)).intValue());
        }
    }
}
Also used : TimeseriesResultValue(org.apache.druid.query.timeseries.TimeseriesResultValue) IncrementalIndexSegment(org.apache.druid.segment.IncrementalIndexSegment) ArrayList(java.util.ArrayList) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) TimeseriesQueryQueryToolChest(org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest) IncrementalIndexSegment(org.apache.druid.segment.IncrementalIndexSegment) Segment(org.apache.druid.segment.Segment) Result(org.apache.druid.query.Result) TimeseriesQueryEngine(org.apache.druid.query.timeseries.TimeseriesQueryEngine) ThreadFactoryBuilder(com.google.common.util.concurrent.ThreadFactoryBuilder) DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) TimeseriesQueryRunnerFactory(org.apache.druid.query.timeseries.TimeseriesQueryRunnerFactory) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) QueryRunnerFactory(org.apache.druid.query.QueryRunnerFactory) TimeseriesQueryRunnerFactory(org.apache.druid.query.timeseries.TimeseriesQueryRunnerFactory) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) ListeningExecutorService(com.google.common.util.concurrent.ListeningExecutorService) Interval(org.joda.time.Interval) Ignore(org.junit.Ignore) Test(org.junit.Test) BenchmarkOptions(com.carrotsearch.junitbenchmarks.BenchmarkOptions)

Example 34 with BenchmarkOptions

use of com.carrotsearch.junitbenchmarks.BenchmarkOptions in project druid by druid-io.

the class BitmapCreationBenchmark method testLinearAdditionDescending.

@BenchmarkOptions(warmupRounds = 10, benchmarkRounds = 1000)
@Test
public void testLinearAdditionDescending() {
    MutableBitmap mutableBitmap = factory.makeEmptyMutableBitmap();
    for (int i = NUM_BITS - 1; i >= 0; --i) {
        mutableBitmap.add(i);
    }
    Assert.assertEquals(NUM_BITS, mutableBitmap.size());
}
Also used : MutableBitmap(org.apache.druid.collections.bitmap.MutableBitmap) Test(org.junit.Test) BenchmarkOptions(com.carrotsearch.junitbenchmarks.BenchmarkOptions)

Example 35 with BenchmarkOptions

use of com.carrotsearch.junitbenchmarks.BenchmarkOptions in project druid by druid-io.

the class BitmapCreationBenchmark method testRandomAddition.

@BenchmarkOptions(warmupRounds = 10, benchmarkRounds = 10)
@Test
public void testRandomAddition() {
    MutableBitmap mutableBitmap = factory.makeEmptyMutableBitmap();
    for (int i : randIndex) {
        mutableBitmap.add(i);
    }
    Assert.assertEquals(NUM_BITS, mutableBitmap.size());
}
Also used : MutableBitmap(org.apache.druid.collections.bitmap.MutableBitmap) Test(org.junit.Test) BenchmarkOptions(com.carrotsearch.junitbenchmarks.BenchmarkOptions)

Aggregations

BenchmarkOptions (com.carrotsearch.junitbenchmarks.BenchmarkOptions)42 Test (org.junit.Test)42 DirectByteBufferAllocator (org.apache.parquet.bytes.DirectByteBufferAllocator)10 ListenableFuture (com.google.common.util.concurrent.ListenableFuture)6 ListeningExecutorService (com.google.common.util.concurrent.ListeningExecutorService)6 ThreadFactoryBuilder (com.google.common.util.concurrent.ThreadFactoryBuilder)6 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)6 ByteBufferInputStream (org.apache.parquet.bytes.ByteBufferInputStream)6 Binary (org.apache.parquet.io.api.Binary)6 Ignore (org.junit.Ignore)6 ArrayList (java.util.ArrayList)4 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)4 Pointer (org.directmemory.memory.Pointer)4 BasicDBObject (com.mongodb.BasicDBObject)3 MutableBitmap (io.druid.collections.bitmap.MutableBitmap)3 LinkedList (java.util.LinkedList)3 MutableBitmap (org.apache.druid.collections.bitmap.MutableBitmap)3 DeltaBinaryPackingValuesWriter (org.apache.parquet.column.values.delta.DeltaBinaryPackingValuesWriter)3 BinaryPlainValuesReader (org.apache.parquet.column.values.plain.BinaryPlainValuesReader)3 PlainValuesWriter (org.apache.parquet.column.values.plain.PlainValuesWriter)3