use of org.apache.druid.java.util.common.io.smoosh.SmooshedFileMapper in project druid by druid-io.
the class CompressedColumnarIntsSerializerTest method checkV2SerializedSizeAndData.
private void checkV2SerializedSizeAndData(int chunkFactor) throws Exception {
File tmpDirectory = FileUtils.createTempDir(StringUtils.format("CompressedIntsIndexedWriterTest_%d", chunkFactor));
FileSmoosher smoosher = new FileSmoosher(tmpDirectory);
CompressedColumnarIntsSerializer writer = new CompressedColumnarIntsSerializer("test", segmentWriteOutMedium, chunkFactor, byteOrder, compressionStrategy, GenericIndexedWriter.ofCompressedByteBuffers(segmentWriteOutMedium, "test", compressionStrategy, Long.BYTES * 10000));
writer.open();
for (int val : vals) {
writer.addValue(val);
}
final SmooshedWriter channel = smoosher.addWithSmooshedWriter("test", writer.getSerializedSize());
writer.writeTo(channel, smoosher);
channel.close();
smoosher.close();
SmooshedFileMapper mapper = Smoosh.map(tmpDirectory);
// read from ByteBuffer and check values
CompressedColumnarIntsSupplier supplierFromByteBuffer = CompressedColumnarIntsSupplier.fromByteBuffer(mapper.mapFile("test"), byteOrder);
ColumnarInts columnarInts = supplierFromByteBuffer.get();
Assert.assertEquals(vals.length, columnarInts.size());
for (int i = 0; i < vals.length; ++i) {
Assert.assertEquals(vals[i], columnarInts.get(i));
}
CloseableUtils.closeAndWrapExceptions(columnarInts);
mapper.close();
}
Aggregations