use of com.linkedin.pinot.core.segment.memory.PinotDataBuffer in project pinot by linkedin.
the class SegmentV1V2ToV3FormatConverter method readCopyBuffers.
private void readCopyBuffers(SegmentDirectory.Reader reader, SegmentDirectory.Writer writer, String column, ColumnIndexType indexType) throws IOException {
PinotDataBuffer oldBuffer = reader.getIndexFor(column, indexType);
Preconditions.checkState(oldBuffer.size() >= 0 && oldBuffer.size() < Integer.MAX_VALUE, "Buffer sizes of greater than 2GB is not supported. segment: " + reader.toString() + ", column: " + column);
PinotDataBuffer newDictBuffer = writer.newIndexFor(column, indexType, (int) oldBuffer.size());
// this shouldn't copy data
ByteBuffer bb = oldBuffer.toDirectByteBuffer(0, (int) oldBuffer.size());
newDictBuffer.readFrom(bb, 0, 0, bb.limit());
}
use of com.linkedin.pinot.core.segment.memory.PinotDataBuffer in project pinot by linkedin.
the class FilePerIndexDirectory method getWriteBufferFor.
private PinotDataBuffer getWriteBufferFor(IndexKey key, int sizeBytes) throws IOException {
if (indexBuffers.containsKey(key)) {
return indexBuffers.get(key).duplicate();
}
File filename = getFileFor(key.name, key.type);
PinotDataBuffer buffer = mapForWrites(filename, sizeBytes, key.type.toString() + ".writer");
indexBuffers.put(key, buffer);
return buffer.duplicate();
}
use of com.linkedin.pinot.core.segment.memory.PinotDataBuffer in project pinot by linkedin.
the class SingleFileIndexDirectory method close.
@Override
public void close() {
for (PinotDataBuffer buf : allocBuffers) {
buf.close();
}
columnEntries.clear();
allocBuffers.clear();
}
use of com.linkedin.pinot.core.segment.memory.PinotDataBuffer in project pinot by linkedin.
the class SingleFileIndexDirectory method mapAndSliceFile.
private void mapAndSliceFile(SortedMap<Long, IndexEntry> startOffsets, List<Long> offsetAccum, long endOffset) throws IOException {
Preconditions.checkNotNull(startOffsets);
Preconditions.checkNotNull(offsetAccum);
Preconditions.checkArgument(offsetAccum.size() >= 1);
long fromFilePos = offsetAccum.get(0);
long toFilePos = endOffset - fromFilePos;
String context = allocationContext(indexFile, "single_file_index.rw." + "." + String.valueOf(fromFilePos) + "." + String.valueOf(toFilePos));
PinotDataBuffer buffer = PinotDataBuffer.fromFile(indexFile, fromFilePos, toFilePos, readMode, FileChannel.MapMode.READ_WRITE, context);
allocBuffers.add(buffer);
int prevSlicePoint = 0;
for (Long fileOffset : offsetAccum) {
IndexEntry entry = startOffsets.get(fileOffset);
int endSlicePoint = prevSlicePoint + (int) entry.size;
validateMagicMarker(buffer, prevSlicePoint);
PinotDataBuffer viewBuffer = buffer.view(prevSlicePoint + MAGIC_MARKER_SIZE_BYTES, endSlicePoint);
entry.buffer = viewBuffer;
prevSlicePoint = endSlicePoint;
}
}
use of com.linkedin.pinot.core.segment.memory.PinotDataBuffer in project pinot by linkedin.
the class FixedBitMultiValueTest method testSingleColMultiValue.
public void testSingleColMultiValue(Class<? extends SingleColumnMultiValueWriter> writerClazz, Class<? extends SingleColumnMultiValueReader> readerClazz) throws Exception {
LOGGER.info("Testing for writerClazz:{} readerClass:{}", writerClazz.getName(), readerClazz.getName());
Constructor<? extends SingleColumnMultiValueWriter> writerClazzConstructor = writerClazz.getConstructor(new Class[] { File.class, int.class, int.class, int.class });
Constructor<? extends SingleColumnMultiValueReader> readerClazzConstructor = readerClazz.getConstructor(new Class[] { PinotDataBuffer.class, int.class, int.class, int.class, boolean.class });
int maxBits = 1;
while (maxBits < 32) {
final String fileName = getClass().getName() + "_test_single_col_mv_fixed_bit.dat";
final File f = new File(fileName);
f.delete();
int numDocs = 10;
int maxNumValues = 100;
final int[][] data = new int[numDocs][];
final Random r = new Random();
final int maxValue = (int) Math.pow(2, maxBits);
int totalNumValues = 0;
int[] startOffsets = new int[numDocs];
int[] lengths = new int[numDocs];
for (int i = 0; i < data.length; i++) {
final int numValues = r.nextInt(maxNumValues) + 1;
data[i] = new int[numValues];
for (int j = 0; j < numValues; j++) {
data[i][j] = r.nextInt(maxValue);
}
startOffsets[i] = totalNumValues;
lengths[i] = numValues;
totalNumValues = totalNumValues + numValues;
}
SingleColumnMultiValueWriter writer = writerClazzConstructor.newInstance(new Object[] { f, numDocs, totalNumValues, maxBits });
for (int i = 0; i < data.length; i++) {
writer.setIntArray(i, data[i]);
}
writer.close();
final RandomAccessFile raf = new RandomAccessFile(f, "rw");
raf.close();
// Test heap mode
PinotDataBuffer heapBuffer = PinotDataBuffer.fromFile(f, ReadMode.heap, FileChannel.MapMode.READ_ONLY, "testing");
SingleColumnMultiValueReader<? extends ReaderContext> heapReader = readerClazzConstructor.newInstance(new Object[] { heapBuffer, numDocs, totalNumValues, maxBits, false });
final int[] readValues = new int[maxNumValues];
for (int i = 0; i < data.length; i++) {
final int numValues = heapReader.getIntArray(i, readValues);
Assert.assertEquals(numValues, data[i].length);
for (int j = 0; j < numValues; j++) {
Assert.assertEquals(readValues[j], data[i][j]);
}
}
heapReader.close();
heapBuffer.close();
// Test mmap mode
PinotDataBuffer mmapBuffer = PinotDataBuffer.fromFile(f, ReadMode.mmap, FileChannel.MapMode.READ_ONLY, "testing");
SingleColumnMultiValueReader<? extends ReaderContext> mmapReader = readerClazzConstructor.newInstance(new Object[] { mmapBuffer, numDocs, totalNumValues, maxBits, false });
for (int i = 0; i < data.length; i++) {
final int numValues = mmapReader.getIntArray(i, readValues);
Assert.assertEquals(numValues, data[i].length);
for (int j = 0; j < numValues; j++) {
Assert.assertEquals(readValues[j], data[i][j]);
}
}
// Assert.assertEquals(FileReaderTestUtils.getNumOpenFiles(f), 2);
mmapReader.close();
mmapBuffer.close();
// Assert.assertEquals(FileReaderTestUtils.getNumOpenFiles(f), 0);
f.delete();
maxBits = maxBits + 1;
}
LOGGER.info("DONE: Testing for writerClazz:{} readerClass:{}", writerClazz.getName(), readerClazz.getName());
}
Aggregations