use of com.linkedin.pinot.core.io.compression.ChunkCompressor in project pinot by linkedin.
the class FixedByteChunkSingleValueReaderWriteTest method testInt.
@Test
public void testInt() throws Exception {
int[] expected = new int[NUM_VALUES];
for (int i = 0; i < NUM_VALUES; i++) {
expected[i] = _random.nextInt();
}
File outFile = new File(TEST_FILE);
FileUtils.deleteQuietly(outFile);
ChunkCompressor compressor = ChunkCompressorFactory.getCompressor("snappy");
FixedByteChunkSingleValueWriter writer = new FixedByteChunkSingleValueWriter(outFile, compressor, NUM_VALUES, NUM_DOCS_PER_CHUNK, V1Constants.Numbers.INTEGER_SIZE);
for (int i = 0; i < NUM_VALUES; i++) {
writer.setInt(i, expected[i]);
}
writer.close();
PinotDataBuffer pinotDataBuffer = PinotDataBuffer.fromFile(outFile, ReadMode.mmap, FileChannel.MapMode.READ_ONLY, getClass().getName());
ChunkDecompressor uncompressor = ChunkCompressorFactory.getDecompressor("snappy");
FixedByteChunkSingleValueReader reader = new FixedByteChunkSingleValueReader(pinotDataBuffer, uncompressor);
ChunkReaderContext context = reader.createContext();
for (int i = 0; i < NUM_VALUES; i++) {
int actual = reader.getInt(i, context);
Assert.assertEquals(actual, expected[i]);
}
reader.close();
FileUtils.deleteQuietly(outFile);
}
use of com.linkedin.pinot.core.io.compression.ChunkCompressor in project pinot by linkedin.
the class FixedByteChunkSingleValueReaderWriteTest method testLong.
@Test
public void testLong() throws Exception {
long[] expected = new long[NUM_VALUES];
for (int i = 0; i < NUM_VALUES; i++) {
expected[i] = _random.nextLong();
}
File outFile = new File(TEST_FILE);
FileUtils.deleteQuietly(outFile);
ChunkCompressor compressor = ChunkCompressorFactory.getCompressor("snappy");
FixedByteChunkSingleValueWriter writer = new FixedByteChunkSingleValueWriter(outFile, compressor, NUM_VALUES, NUM_DOCS_PER_CHUNK, V1Constants.Numbers.LONG_SIZE);
for (int i = 0; i < NUM_VALUES; i++) {
writer.setLong(i, expected[i]);
}
writer.close();
PinotDataBuffer pinotDataBuffer = PinotDataBuffer.fromFile(outFile, ReadMode.mmap, FileChannel.MapMode.READ_ONLY, getClass().getName());
ChunkDecompressor uncompressor = ChunkCompressorFactory.getDecompressor("snappy");
FixedByteChunkSingleValueReader reader = new FixedByteChunkSingleValueReader(pinotDataBuffer, uncompressor);
ChunkReaderContext context = reader.createContext();
for (int i = 0; i < NUM_VALUES; i++) {
long actual = reader.getLong(i, context);
Assert.assertEquals(actual, expected[i]);
}
reader.close();
FileUtils.deleteQuietly(outFile);
}
use of com.linkedin.pinot.core.io.compression.ChunkCompressor in project pinot by linkedin.
the class FixedByteChunkSingleValueReaderWriteTest method testFloat.
@Test
public void testFloat() throws Exception {
float[] expected = new float[NUM_VALUES];
for (int i = 0; i < NUM_VALUES; i++) {
expected[i] = _random.nextFloat();
}
File outFile = new File(TEST_FILE);
FileUtils.deleteQuietly(outFile);
ChunkCompressor compressor = ChunkCompressorFactory.getCompressor("snappy");
FixedByteChunkSingleValueWriter writer = new FixedByteChunkSingleValueWriter(outFile, compressor, NUM_VALUES, NUM_DOCS_PER_CHUNK, V1Constants.Numbers.FLOAT_SIZE);
for (int i = 0; i < NUM_VALUES; i++) {
writer.setFloat(i, expected[i]);
}
writer.close();
PinotDataBuffer pinotDataBuffer = PinotDataBuffer.fromFile(outFile, ReadMode.mmap, FileChannel.MapMode.READ_ONLY, getClass().getName());
ChunkDecompressor uncompressor = ChunkCompressorFactory.getDecompressor("snappy");
FixedByteChunkSingleValueReader reader = new FixedByteChunkSingleValueReader(pinotDataBuffer, uncompressor);
ChunkReaderContext context = reader.createContext();
for (int i = 0; i < NUM_VALUES; i++) {
float actual = reader.getFloat(i, context);
Assert.assertEquals(actual, expected[i]);
}
reader.close();
FileUtils.deleteQuietly(outFile);
}
use of com.linkedin.pinot.core.io.compression.ChunkCompressor in project pinot by linkedin.
the class FixedByteChunkSingleValueReaderWriteTest method testDouble.
@Test
public void testDouble() throws Exception {
double[] expected = new double[NUM_VALUES];
for (int i = 0; i < NUM_VALUES; i++) {
expected[i] = _random.nextDouble();
}
File outFile = new File(TEST_FILE);
FileUtils.deleteQuietly(outFile);
ChunkCompressor compressor = ChunkCompressorFactory.getCompressor("snappy");
FixedByteChunkSingleValueWriter writer = new FixedByteChunkSingleValueWriter(outFile, compressor, NUM_VALUES, NUM_DOCS_PER_CHUNK, V1Constants.Numbers.DOUBLE_SIZE);
for (int i = 0; i < NUM_VALUES; i++) {
writer.setDouble(i, expected[i]);
}
writer.close();
PinotDataBuffer pinotDataBuffer = PinotDataBuffer.fromFile(outFile, ReadMode.mmap, FileChannel.MapMode.READ_ONLY, getClass().getName());
ChunkDecompressor uncompressor = ChunkCompressorFactory.getDecompressor("snappy");
FixedByteChunkSingleValueReader reader = new FixedByteChunkSingleValueReader(pinotDataBuffer, uncompressor);
ChunkReaderContext context = reader.createContext();
for (int i = 0; i < NUM_VALUES; i++) {
double actual = reader.getDouble(i, context);
Assert.assertEquals(actual, expected[i]);
}
reader.close();
FileUtils.deleteQuietly(outFile);
}
use of com.linkedin.pinot.core.io.compression.ChunkCompressor in project pinot by linkedin.
the class VarByteChunkSingleValueReaderWriteTest method test.
/**
* This test writes {@link #NUM_STRINGS} using {@link VarByteChunkSingleValueWriter}. It then reads
* the strings using {@link VarByteChunkSingleValueReader}, and asserts that what was written is the same as
* what was read in.
*
* Number of docs and docs per chunk are chosen to generate complete as well partial chunks.
*
* @throws Exception
*/
@Test
public void test() throws Exception {
String[] expected = new String[NUM_STRINGS];
Random random = new Random();
File outFile = new File(TEST_FILE);
FileUtils.deleteQuietly(outFile);
int maxStringLengthInBytes = 0;
for (int i = 0; i < NUM_STRINGS; i++) {
expected[i] = RandomStringUtils.random(random.nextInt(MAX_STRING_LENGTH));
maxStringLengthInBytes = Math.max(maxStringLengthInBytes, expected[i].getBytes(UTF_8).length);
}
ChunkCompressor compressor = ChunkCompressorFactory.getCompressor("snappy");
VarByteChunkSingleValueWriter writer = new VarByteChunkSingleValueWriter(outFile, compressor, NUM_STRINGS, NUM_DOCS_PER_CHUNK, maxStringLengthInBytes);
for (int i = 0; i < NUM_STRINGS; i++) {
writer.setString(i, expected[i]);
}
writer.close();
PinotDataBuffer pinotDataBuffer = PinotDataBuffer.fromFile(outFile, ReadMode.mmap, FileChannel.MapMode.READ_ONLY, getClass().getName());
ChunkDecompressor uncompressor = ChunkCompressorFactory.getDecompressor("snappy");
VarByteChunkSingleValueReader reader = new VarByteChunkSingleValueReader(pinotDataBuffer, uncompressor);
ChunkReaderContext context = reader.createContext();
for (int i = 0; i < NUM_STRINGS; i++) {
String actual = reader.getString(i, context);
Assert.assertEquals(actual, expected[i]);
}
reader.close();
FileUtils.deleteQuietly(outFile);
}
Aggregations