use of org.apache.druid.segment.data.ColumnarLongs in project druid by druid-io.
the class LongCompressionBenchmark method readContinuous.
@Benchmark
public void readContinuous(Blackhole bh) {
ColumnarLongs columnarLongs = supplier.get();
int count = columnarLongs.size();
for (int i = 0; i < count; i++) {
bh.consume(columnarLongs.get(i));
}
columnarLongs.close();
}
use of org.apache.druid.segment.data.ColumnarLongs in project druid by druid-io.
the class ColumnarLongsSelectRowsFromGeneratorBenchmark method setupFromFile.
private void setupFromFile(String encoding) throws IOException {
File dir = getTmpDir();
File compFile = new File(dir, getGeneratorEncodedFilename(encoding, distribution, rows, zeroProbability));
ByteBuffer buffer = FileUtils.map(compFile).get();
int size = (int) compFile.length();
encodedSize.put(encoding, size);
ColumnarLongs data = createColumnarLongs(encoding, buffer);
decoders.put(encoding, data);
}
use of org.apache.druid.segment.data.ColumnarLongs in project druid by druid-io.
the class BaseColumnarLongsBenchmark method scan.
void scan(Blackhole blackhole) {
EncodingSizeProfiler.encodedSize = encodedSize.get(encoding);
ColumnarLongs encoder = decoders.get(encoding);
while (offset.withinBounds()) {
blackhole.consume(encoder.get(offset.getOffset()));
offset.increment();
}
offset.reset();
blackhole.consume(offset);
}
use of org.apache.druid.segment.data.ColumnarLongs in project druid by druid-io.
the class LongCompressionBenchmark method readVectorizedContinuous.
@Benchmark
public void readVectorizedContinuous(Blackhole bh) {
long[] vector = new long[QueryableIndexStorageAdapter.DEFAULT_VECTOR_SIZE];
ColumnarLongs columnarLongs = supplier.get();
int count = columnarLongs.size();
for (int i = 0; i < count; i++) {
if (i % vector.length == 0) {
columnarLongs.get(vector, i, Math.min(vector.length, count - i));
}
bh.consume(vector[i % vector.length]);
}
columnarLongs.close();
}
use of org.apache.druid.segment.data.ColumnarLongs in project druid by druid-io.
the class BaseColumnarLongsBenchmark method checkRowSanity.
static void checkRowSanity(Map<String, ColumnarLongs> encoders, List<String> encodings, int row) {
if (encodings.size() > 1) {
for (int i = 0; i < encodings.size() - 1; i++) {
String currentKey = encodings.get(i);
String nextKey = encodings.get(i + 1);
ColumnarLongs current = encoders.get(currentKey);
ColumnarLongs next = encoders.get(nextKey);
long vCurrent = current.get(row);
long vNext = next.get(row);
if (vCurrent != vNext) {
throw new RE("values do not match at row %s - %s:%s %s:%s", row, currentKey, vCurrent, nextKey, vNext);
}
}
}
}
Aggregations