use of org.apache.druid.segment.writeout.WriteOutBytes in project druid by druid-io.
the class CompressedVSizeColumnarIntsSerializerTest method checkSerializedSizeAndData.
private void checkSerializedSizeAndData(int chunkSize) throws Exception {
FileSmoosher smoosher = new FileSmoosher(temporaryFolder.newFolder());
final String columnName = "test";
CompressedVSizeColumnarIntsSerializer writer = new CompressedVSizeColumnarIntsSerializer(columnName, segmentWriteOutMedium, "test", vals.length > 0 ? Ints.max(vals) : 0, chunkSize, byteOrder, compressionStrategy);
CompressedVSizeColumnarIntsSupplier supplierFromList = CompressedVSizeColumnarIntsSupplier.fromList(IntArrayList.wrap(vals), vals.length > 0 ? Ints.max(vals) : 0, chunkSize, byteOrder, compressionStrategy, segmentWriteOutMedium.getCloser());
writer.open();
for (int val : vals) {
writer.addValue(val);
}
long writtenLength = writer.getSerializedSize();
final WriteOutBytes writeOutBytes = segmentWriteOutMedium.makeWriteOutBytes();
writer.writeTo(writeOutBytes, smoosher);
smoosher.close();
Assert.assertEquals(writtenLength, supplierFromList.getSerializedSize());
// read from ByteBuffer and check values
CompressedVSizeColumnarIntsSupplier supplierFromByteBuffer = CompressedVSizeColumnarIntsSupplier.fromByteBuffer(ByteBuffer.wrap(IOUtils.toByteArray(writeOutBytes.asInputStream())), byteOrder);
ColumnarInts columnarInts = supplierFromByteBuffer.get();
for (int i = 0; i < vals.length; ++i) {
Assert.assertEquals(vals[i], columnarInts.get(i));
}
CloseableUtils.closeAndWrapExceptions(columnarInts);
}
use of org.apache.druid.segment.writeout.WriteOutBytes in project druid by druid-io.
the class CompressedColumnarIntsSerializerTest method checkSerializedSizeAndData.
private void checkSerializedSizeAndData(int chunkFactor) throws Exception {
FileSmoosher smoosher = new FileSmoosher(temporaryFolder.newFolder());
CompressedColumnarIntsSerializer writer = new CompressedColumnarIntsSerializer("test", segmentWriteOutMedium, "test", chunkFactor, byteOrder, compressionStrategy);
CompressedColumnarIntsSupplier supplierFromList = CompressedColumnarIntsSupplier.fromList(IntArrayList.wrap(vals), chunkFactor, byteOrder, compressionStrategy, segmentWriteOutMedium.getCloser());
writer.open();
for (int val : vals) {
writer.addValue(val);
}
long writtenLength = writer.getSerializedSize();
final WriteOutBytes writeOutBytes = segmentWriteOutMedium.makeWriteOutBytes();
writer.writeTo(writeOutBytes, smoosher);
smoosher.close();
Assert.assertEquals(writtenLength, supplierFromList.getSerializedSize());
// read from ByteBuffer and check values
CompressedColumnarIntsSupplier supplierFromByteBuffer = CompressedColumnarIntsSupplier.fromByteBuffer(ByteBuffer.wrap(IOUtils.toByteArray(writeOutBytes.asInputStream())), byteOrder);
ColumnarInts columnarInts = supplierFromByteBuffer.get();
Assert.assertEquals(vals.length, columnarInts.size());
for (int i = 0; i < vals.length; ++i) {
Assert.assertEquals(vals[i], columnarInts.get(i));
}
CloseableUtils.closeAndWrapExceptions(columnarInts);
}
use of org.apache.druid.segment.writeout.WriteOutBytes in project druid by druid-io.
the class V3CompressedVSizeColumnarMultiIntsSerializerTest method checkSerializedSizeAndData.
private void checkSerializedSizeAndData(int offsetChunkFactor, int valueChunkFactor) throws Exception {
FileSmoosher smoosher = new FileSmoosher(temporaryFolder.newFolder());
try (SegmentWriteOutMedium segmentWriteOutMedium = new OffHeapMemorySegmentWriteOutMedium()) {
int maxValue = vals.size() > 0 ? getMaxValue(vals) : 0;
CompressedColumnarIntsSerializer offsetWriter = new CompressedColumnarIntsSerializer(TEST_COLUMN_NAME, segmentWriteOutMedium, "offset", offsetChunkFactor, byteOrder, compressionStrategy);
CompressedVSizeColumnarIntsSerializer valueWriter = new CompressedVSizeColumnarIntsSerializer(TEST_COLUMN_NAME, segmentWriteOutMedium, "value", maxValue, valueChunkFactor, byteOrder, compressionStrategy);
V3CompressedVSizeColumnarMultiIntsSerializer writer = new V3CompressedVSizeColumnarMultiIntsSerializer(TEST_COLUMN_NAME, offsetWriter, valueWriter);
V3CompressedVSizeColumnarMultiIntsSupplier supplierFromIterable = V3CompressedVSizeColumnarMultiIntsSupplier.fromIterable(Iterables.transform(vals, ArrayBasedIndexedInts::new), offsetChunkFactor, maxValue, byteOrder, compressionStrategy, segmentWriteOutMedium.getCloser());
writer.open();
for (int[] val : vals) {
writer.addValues(new ArrayBasedIndexedInts(val));
}
long writtenLength = writer.getSerializedSize();
final WriteOutBytes writeOutBytes = segmentWriteOutMedium.makeWriteOutBytes();
writer.writeTo(writeOutBytes, smoosher);
smoosher.close();
Assert.assertEquals(writtenLength, supplierFromIterable.getSerializedSize());
// read from ByteBuffer and check values
V3CompressedVSizeColumnarMultiIntsSupplier supplierFromByteBuffer = V3CompressedVSizeColumnarMultiIntsSupplier.fromByteBuffer(ByteBuffer.wrap(IOUtils.toByteArray(writeOutBytes.asInputStream())), byteOrder);
try (final ColumnarMultiInts columnarMultiInts = supplierFromByteBuffer.get()) {
Assert.assertEquals(columnarMultiInts.size(), vals.size());
for (int i = 0; i < vals.size(); ++i) {
IndexedInts subVals = columnarMultiInts.get(i);
Assert.assertEquals(subVals.size(), vals.get(i).length);
for (int j = 0, size = subVals.size(); j < size; ++j) {
Assert.assertEquals(subVals.get(j), vals.get(i)[j]);
}
}
}
}
}
use of org.apache.druid.segment.writeout.WriteOutBytes in project druid by druid-io.
the class VSizeColumnarIntsSerializerTest method checkSerializedSizeAndData.
private void checkSerializedSizeAndData() throws Exception {
int maxValue = vals.length == 0 ? 0 : Ints.max(vals);
VSizeColumnarIntsSerializer writer = new VSizeColumnarIntsSerializer(segmentWriteOutMedium, maxValue);
VSizeColumnarInts intsFromList = VSizeColumnarInts.fromIndexedInts(new ArrayBasedIndexedInts(vals), maxValue);
writer.open();
for (int val : vals) {
writer.addValue(val);
}
long writtenLength = writer.getSerializedSize();
WriteOutBytes writeOutBytes = segmentWriteOutMedium.makeWriteOutBytes();
writer.writeTo(writeOutBytes, null);
Assert.assertEquals(writtenLength, intsFromList.getSerializedSize());
// read from ByteBuffer and check values
VSizeColumnarInts intsFromByteBuffer = VSizeColumnarInts.readFromByteBuffer(ByteBuffer.wrap(IOUtils.toByteArray(writeOutBytes.asInputStream())));
Assert.assertEquals(vals.length, intsFromByteBuffer.size());
for (int i = 0; i < vals.length; ++i) {
Assert.assertEquals(vals[i], intsFromByteBuffer.get(i));
}
}
Aggregations