use of org.apache.druid.segment.data.ColumnarLongs in project druid by druid-io.
the class ColumnarLongsSelectRowsFromSegmentBenchmark method setupFromFile.
private void setupFromFile(String encoding) throws IOException {
File dir = getTmpDir();
File compFile = new File(dir, getColumnEncodedFileName(encoding, segmentName, columnName));
ByteBuffer buffer = Files.map(compFile);
int size = (int) compFile.length();
encodedSize.put(encoding, size);
ColumnarLongs data = BaseColumnarLongsBenchmark.createColumnarLongs(encoding, buffer);
decoders.put(encoding, data);
}
use of org.apache.druid.segment.data.ColumnarLongs in project druid by druid-io.
the class BaseColumnarLongsBenchmark method scanVectorized.
void scanVectorized(Blackhole blackhole) {
EncodingSizeProfiler.encodedSize = encodedSize.get(encoding);
ColumnarLongs columnDecoder = decoders.get(encoding);
long[] vector = new long[VECTOR_SIZE];
while (!vectorOffset.isDone()) {
if (vectorOffset.isContiguous()) {
columnDecoder.get(vector, vectorOffset.getStartOffset(), vectorOffset.getCurrentVectorSize());
} else {
columnDecoder.get(vector, vectorOffset.getOffsets(), vectorOffset.getCurrentVectorSize());
}
for (int i = 0; i < vectorOffset.getCurrentVectorSize(); i++) {
blackhole.consume(vector[i]);
}
vectorOffset.advance();
}
blackhole.consume(vector);
blackhole.consume(vectorOffset);
vectorOffset.reset();
columnDecoder.close();
}
use of org.apache.druid.segment.data.ColumnarLongs in project druid by druid-io.
the class LongCompressionBenchmark method readSkipping.
@Benchmark
public void readSkipping(Blackhole bh) {
ColumnarLongs columnarLongs = supplier.get();
int count = columnarLongs.size();
for (int i = 0; i < count; i += ThreadLocalRandom.current().nextInt(2000)) {
bh.consume(columnarLongs.get(i));
}
columnarLongs.close();
}
Aggregations