Search in sources :

Example 36 with Setup

use of org.openjdk.jmh.annotations.Setup in project gradle by gradle.

the class ChmodBenchmark method setupIteration.

@Setup(Level.Iteration)
public void setupIteration() throws IOException {
    this.tempDirPath = Files.createTempDirectory(tempRootDir, "iteration");
    this.tempDirFile = tempDirPath.toFile();
    this.counter = new AtomicInteger(0);
}
Also used : AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Setup(org.openjdk.jmh.annotations.Setup)

Example 37 with Setup

use of org.openjdk.jmh.annotations.Setup in project spring-data-mongodb by spring-projects.

the class DbRefMappingBenchmark method setUp.

@Setup
public void setUp() throws Exception {
    client = new MongoClient(new ServerAddress());
    template = new MongoTemplate(client, DB_NAME);
    List<RefObject> refObjects = new ArrayList<>();
    for (int i = 0; i < 1; i++) {
        RefObject o = new RefObject();
        template.save(o);
        refObjects.add(o);
    }
    ObjectWithDBRef singleDBRef = new ObjectWithDBRef();
    singleDBRef.ref = refObjects.iterator().next();
    template.save(singleDBRef);
    ObjectWithDBRef multipleDBRefs = new ObjectWithDBRef();
    multipleDBRefs.refList = refObjects;
    template.save(multipleDBRefs);
    queryObjectWithDBRef = query(where("id").is(singleDBRef.id));
    queryObjectWithDBRefList = query(where("id").is(multipleDBRefs.id));
}
Also used : MongoClient(com.mongodb.MongoClient) ServerAddress(com.mongodb.ServerAddress) ArrayList(java.util.ArrayList) MongoTemplate(org.springframework.data.mongodb.core.MongoTemplate) Setup(org.openjdk.jmh.annotations.Setup)

Example 38 with Setup

use of org.openjdk.jmh.annotations.Setup in project hive by apache.

the class ColumnarStorageBench method prepareBenchmark.

/**
 * Initializes resources that will be needed for each of the benchmark tests.
 *
 * @throws SerDeException If it cannot initialize the desired test format.
 * @throws IOException If it cannot write data to temporary files.
 */
@Setup(Level.Trial)
public void prepareBenchmark() throws SerDeException, IOException {
    if (format.equalsIgnoreCase("parquet") || format.equalsIgnoreCase("parquet-vec")) {
        storageFormatTest = new ParquetStorageFormatTest();
    } else if (format.equalsIgnoreCase("orc")) {
        storageFormatTest = new OrcStorageFormatTest();
    } else {
        throw new IllegalArgumentException("Invalid file format argument: " + format);
    }
    for (int i = 0; i < rows.length; i++) {
        recordWritable[i] = storageFormatTest.serialize(rows[i], oi);
    }
    fs = FileSystem.getLocal(new Configuration());
    writeFile = createTempFile();
    writePath = new Path(writeFile.getPath());
    readFile = createTempFile();
    readPath = new Path(readFile.getPath());
    /*
     * Write a bunch of random rows that will be used for read benchmark.
     */
    RecordWriter writer = storageFormatTest.getRecordWriter(readPath);
    storageFormatTest.writeRecords(writer, recordWritable);
    writer.close(false);
}
Also used : Path(org.apache.hadoop.fs.Path) RecordWriter(org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter) Configuration(org.apache.hadoop.conf.Configuration) Setup(org.openjdk.jmh.annotations.Setup)

Example 39 with Setup

use of org.openjdk.jmh.annotations.Setup in project hive by apache.

the class VectorSelectOperatorBench method setup.

@Setup
public void setup(Blackhole bh) throws HiveException {
    HiveConf hconf = new HiveConf();
    List<String> columns = new ArrayList<String>();
    columns.add("a");
    columns.add("b");
    columns.add("c");
    VectorizationContext vc = new VectorizationContext("name", columns);
    selDesc = new SelectDesc(false);
    List<ExprNodeDesc> colList = new ArrayList<ExprNodeDesc>();
    ExprNodeColumnDesc colDesc1 = new ExprNodeColumnDesc(Long.class, "a", "table", false);
    ExprNodeColumnDesc colDesc2 = new ExprNodeColumnDesc(Long.class, "b", "table", false);
    ExprNodeColumnDesc colDesc3 = new ExprNodeColumnDesc(Long.class, "c", "table", false);
    ExprNodeGenericFuncDesc plusDesc = new ExprNodeGenericFuncDesc();
    GenericUDF gudf = new GenericUDFOPPlus();
    plusDesc.setGenericUDF(gudf);
    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
    children.add(colDesc1);
    children.add(colDesc2);
    plusDesc.setChildren(children);
    plusDesc.setTypeInfo(TypeInfoFactory.longTypeInfo);
    colList.add(plusDesc);
    colList.add(colDesc3);
    selDesc.setColList(colList);
    List<String> outputColNames = new ArrayList<String>();
    outputColNames.add("_col0");
    outputColNames.add("_col1");
    selDesc.setOutputColumnNames(outputColNames);
    VectorSelectDesc vectorSelectDesc = new VectorSelectDesc();
    selDesc.setVectorDesc(vectorSelectDesc);
    List<ExprNodeDesc> selectColList = selDesc.getColList();
    VectorExpression[] vectorSelectExprs = new VectorExpression[selectColList.size()];
    for (int i = 0; i < selectColList.size(); i++) {
        ExprNodeDesc expr = selectColList.get(i);
        VectorExpression ve = vc.getVectorExpression(expr);
        vectorSelectExprs[i] = ve;
    }
    vectorSelectDesc.setSelectExpressions(vectorSelectExprs);
    vectorSelectDesc.setProjectedOutputColumns(new int[] { 3, 2 });
    CompilationOpContext opContext = new CompilationOpContext();
    vso = new VectorSelectOperator(opContext, selDesc, vc, vectorSelectDesc);
    // to trigger vectorForward
    child = new ArrayList<>();
    child.add(new BlackholeOperator(opContext, bh));
    child.add(new BlackholeOperator(opContext, bh));
    vso.initialize(hconf, null);
    vrg = VectorizedRowGroupGenUtil.getVectorizedRowBatch(VectorizedRowBatch.DEFAULT_SIZE, 4, 17);
}
Also used : VectorSelectOperator(org.apache.hadoop.hive.ql.exec.vector.VectorSelectOperator) ArrayList(java.util.ArrayList) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) VectorizationContext(org.apache.hadoop.hive.ql.exec.vector.VectorizationContext) GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) CompilationOpContext(org.apache.hadoop.hive.ql.CompilationOpContext) BlackholeOperator(org.apache.hive.benchmark.vectorization.BlackholeOperator) GenericUDFOPPlus(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPPlus) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) HiveConf(org.apache.hadoop.hive.conf.HiveConf) VectorSelectDesc(org.apache.hadoop.hive.ql.plan.VectorSelectDesc) VectorExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression) SelectDesc(org.apache.hadoop.hive.ql.plan.SelectDesc) VectorSelectDesc(org.apache.hadoop.hive.ql.plan.VectorSelectDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) Setup(org.openjdk.jmh.annotations.Setup)

Example 40 with Setup

use of org.openjdk.jmh.annotations.Setup in project grakn by graknlabs.

the class AddWithCommitBenchmark method setup.

@Setup
public void setup() throws Throwable {
    session = sessionContext.newSession();
    try (GraknTx tx = session.open(GraknTxType.WRITE)) {
        role1 = tx.putRole("benchmark_role1");
        role2 = tx.putRole("benchmark_role2");
        entityType = tx.putEntityType("benchmark_Entitytype").plays(role1).plays(role2);
        relationshipType = tx.putRelationshipType("benchmark_relationshipType").relates(role1).relates(role2);
        tx.commit();
    }
}
Also used : GraknTx(ai.grakn.GraknTx) Setup(org.openjdk.jmh.annotations.Setup)

Aggregations

Setup (org.openjdk.jmh.annotations.Setup)88 File (java.io.File)19 InputRow (io.druid.data.input.InputRow)15 BenchmarkDataGenerator (io.druid.benchmark.datagen.BenchmarkDataGenerator)14 HyperUniquesSerde (io.druid.query.aggregation.hyperloglog.HyperUniquesSerde)14 Random (java.util.Random)11 IndexSpec (io.druid.segment.IndexSpec)10 IncrementalIndex (io.druid.segment.incremental.IncrementalIndex)8 OnheapIncrementalIndex (io.druid.segment.incremental.OnheapIncrementalIndex)8 QueryableIndex (io.druid.segment.QueryableIndex)7 ByteBuffer (java.nio.ByteBuffer)7 StupidPool (io.druid.collections.StupidPool)4 OffheapBufferGenerator (io.druid.offheap.OffheapBufferGenerator)4 PooledByteBufAllocator (io.netty.buffer.PooledByteBufAllocator)4 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)4 AtomicLong (java.util.concurrent.atomic.AtomicLong)4 Function (com.google.common.base.Function)3 BitmapFactory (io.druid.collections.bitmap.BitmapFactory)3 ImmutableBitmap (io.druid.collections.bitmap.ImmutableBitmap)3 MutableBitmap (io.druid.collections.bitmap.MutableBitmap)3