use of com.linkedin.pinot.core.io.reader.impl.v1.FixedByteChunkSingleValueReader in project pinot by linkedin.
the class ColumnIndexContainer method getRawIndexReader.
public static SingleColumnSingleValueReader getRawIndexReader(PinotDataBuffer fwdIndexBuffer, FieldSpec.DataType dataType) throws IOException {
SingleColumnSingleValueReader reader;
// TODO: Make compression/decompression configurable.
ChunkDecompressor decompressor = ChunkCompressorFactory.getDecompressor("snappy");
switch(dataType) {
case INT:
case LONG:
case FLOAT:
case DOUBLE:
reader = new FixedByteChunkSingleValueReader(fwdIndexBuffer, decompressor);
break;
case STRING:
reader = new VarByteChunkSingleValueReader(fwdIndexBuffer, decompressor);
break;
default:
throw new IllegalArgumentException("Illegal data type for raw index reader: " + dataType);
}
return reader;
}
use of com.linkedin.pinot.core.io.reader.impl.v1.FixedByteChunkSingleValueReader in project pinot by linkedin.
the class FixedByteChunkSingleValueReaderWriteTest method testInt.
@Test
public void testInt() throws Exception {
int[] expected = new int[NUM_VALUES];
for (int i = 0; i < NUM_VALUES; i++) {
expected[i] = _random.nextInt();
}
File outFile = new File(TEST_FILE);
FileUtils.deleteQuietly(outFile);
ChunkCompressor compressor = ChunkCompressorFactory.getCompressor("snappy");
FixedByteChunkSingleValueWriter writer = new FixedByteChunkSingleValueWriter(outFile, compressor, NUM_VALUES, NUM_DOCS_PER_CHUNK, V1Constants.Numbers.INTEGER_SIZE);
for (int i = 0; i < NUM_VALUES; i++) {
writer.setInt(i, expected[i]);
}
writer.close();
PinotDataBuffer pinotDataBuffer = PinotDataBuffer.fromFile(outFile, ReadMode.mmap, FileChannel.MapMode.READ_ONLY, getClass().getName());
ChunkDecompressor uncompressor = ChunkCompressorFactory.getDecompressor("snappy");
FixedByteChunkSingleValueReader reader = new FixedByteChunkSingleValueReader(pinotDataBuffer, uncompressor);
ChunkReaderContext context = reader.createContext();
for (int i = 0; i < NUM_VALUES; i++) {
int actual = reader.getInt(i, context);
Assert.assertEquals(actual, expected[i]);
}
reader.close();
FileUtils.deleteQuietly(outFile);
}
use of com.linkedin.pinot.core.io.reader.impl.v1.FixedByteChunkSingleValueReader in project pinot by linkedin.
the class RawIndexCreatorTest method testFixedLengthRawIndexCreator.
/**
* Helper method to perform actual tests for a given column.
*
* @param column Column for which to perform the test
* @param dataType Data type of the column
* @throws Exception
*/
private void testFixedLengthRawIndexCreator(String column, FieldSpec.DataType dataType) throws Exception {
PinotDataBuffer indexBuffer = getIndexBufferForColumn(column);
FixedByteChunkSingleValueReader rawIndexReader = new FixedByteChunkSingleValueReader(indexBuffer, ChunkCompressorFactory.getDecompressor("snappy"));
_recordReader.rewind();
for (int row = 0; row < NUM_ROWS; row++) {
GenericRow expectedRow = _recordReader.next();
Object expected = expectedRow.getValue(column);
Object actual;
actual = readValueFromIndex(rawIndexReader, dataType, row);
Assert.assertEquals(actual, expected);
}
}
use of com.linkedin.pinot.core.io.reader.impl.v1.FixedByteChunkSingleValueReader in project pinot by linkedin.
the class FixedByteChunkSingleValueReaderWriteTest method testLong.
@Test
public void testLong() throws Exception {
long[] expected = new long[NUM_VALUES];
for (int i = 0; i < NUM_VALUES; i++) {
expected[i] = _random.nextLong();
}
File outFile = new File(TEST_FILE);
FileUtils.deleteQuietly(outFile);
ChunkCompressor compressor = ChunkCompressorFactory.getCompressor("snappy");
FixedByteChunkSingleValueWriter writer = new FixedByteChunkSingleValueWriter(outFile, compressor, NUM_VALUES, NUM_DOCS_PER_CHUNK, V1Constants.Numbers.LONG_SIZE);
for (int i = 0; i < NUM_VALUES; i++) {
writer.setLong(i, expected[i]);
}
writer.close();
PinotDataBuffer pinotDataBuffer = PinotDataBuffer.fromFile(outFile, ReadMode.mmap, FileChannel.MapMode.READ_ONLY, getClass().getName());
ChunkDecompressor uncompressor = ChunkCompressorFactory.getDecompressor("snappy");
FixedByteChunkSingleValueReader reader = new FixedByteChunkSingleValueReader(pinotDataBuffer, uncompressor);
ChunkReaderContext context = reader.createContext();
for (int i = 0; i < NUM_VALUES; i++) {
long actual = reader.getLong(i, context);
Assert.assertEquals(actual, expected[i]);
}
reader.close();
FileUtils.deleteQuietly(outFile);
}
use of com.linkedin.pinot.core.io.reader.impl.v1.FixedByteChunkSingleValueReader in project pinot by linkedin.
the class FixedByteChunkSingleValueReaderWriteTest method testFloat.
@Test
public void testFloat() throws Exception {
float[] expected = new float[NUM_VALUES];
for (int i = 0; i < NUM_VALUES; i++) {
expected[i] = _random.nextFloat();
}
File outFile = new File(TEST_FILE);
FileUtils.deleteQuietly(outFile);
ChunkCompressor compressor = ChunkCompressorFactory.getCompressor("snappy");
FixedByteChunkSingleValueWriter writer = new FixedByteChunkSingleValueWriter(outFile, compressor, NUM_VALUES, NUM_DOCS_PER_CHUNK, V1Constants.Numbers.FLOAT_SIZE);
for (int i = 0; i < NUM_VALUES; i++) {
writer.setFloat(i, expected[i]);
}
writer.close();
PinotDataBuffer pinotDataBuffer = PinotDataBuffer.fromFile(outFile, ReadMode.mmap, FileChannel.MapMode.READ_ONLY, getClass().getName());
ChunkDecompressor uncompressor = ChunkCompressorFactory.getDecompressor("snappy");
FixedByteChunkSingleValueReader reader = new FixedByteChunkSingleValueReader(pinotDataBuffer, uncompressor);
ChunkReaderContext context = reader.createContext();
for (int i = 0; i < NUM_VALUES; i++) {
float actual = reader.getFloat(i, context);
Assert.assertEquals(actual, expected[i]);
}
reader.close();
FileUtils.deleteQuietly(outFile);
}
Aggregations