use of org.apache.carbondata.core.datastore.chunk.impl.MeasureRawColumnChunk in project carbondata by apache.
the class CompressedMeasureChunkFileBasedReaderV1 method readRawMeasureChunk.
/**
* Method to read the blocks data based on block index
*
* @param fileReader file reader to read the blocks
* @param blockIndex block to be read
* @return measure data chunk
*/
@Override
public MeasureRawColumnChunk readRawMeasureChunk(FileHolder fileReader, int blockIndex) throws IOException {
DataChunk dataChunk = measureColumnChunks.get(blockIndex);
ByteBuffer buffer = fileReader.readByteBuffer(filePath, dataChunk.getDataPageOffset(), dataChunk.getDataPageLength());
MeasureRawColumnChunk rawColumnChunk = new MeasureRawColumnChunk(blockIndex, buffer, 0, dataChunk.getDataPageLength(), this);
rawColumnChunk.setFileReader(fileReader);
rawColumnChunk.setPagesCount(1);
rawColumnChunk.setRowCount(new int[] { numberOfRows });
return rawColumnChunk;
}
use of org.apache.carbondata.core.datastore.chunk.impl.MeasureRawColumnChunk in project carbondata by apache.
the class CompressedMeasureChunkFileBasedReaderV2 method readRawMeasureChunk.
@Override
public MeasureRawColumnChunk readRawMeasureChunk(FileHolder fileReader, int blockIndex) throws IOException {
int dataLength = 0;
if (measureColumnChunkOffsets.size() - 1 == blockIndex) {
dataLength = measureColumnChunkLength.get(blockIndex);
} else {
long currentMeasureOffset = measureColumnChunkOffsets.get(blockIndex);
dataLength = (int) (measureColumnChunkOffsets.get(blockIndex + 1) - currentMeasureOffset);
}
ByteBuffer buffer = null;
synchronized (fileReader) {
buffer = fileReader.readByteBuffer(filePath, measureColumnChunkOffsets.get(blockIndex), dataLength);
}
MeasureRawColumnChunk rawColumnChunk = new MeasureRawColumnChunk(blockIndex, buffer, 0, dataLength, this);
rawColumnChunk.setFileReader(fileReader);
rawColumnChunk.setPagesCount(1);
rawColumnChunk.setRowCount(new int[] { numberOfRows });
return rawColumnChunk;
}
use of org.apache.carbondata.core.datastore.chunk.impl.MeasureRawColumnChunk in project carbondata by apache.
the class CompressedMeasureChunkFileBasedReaderV2 method readRawMeasureChunksInGroup.
/**
* Below method will be used to read measure chunk data in group.
* This method will be useful to avoid multiple IO while reading the
* data from
*
* @param fileReader file reader to read the data
* @param startColumnBlockletIndex first column blocklet index to be read
* @param endColumnBlockletIndex end column blocklet index to be read
* @return measure raw chunkArray
* @throws IOException
*/
protected MeasureRawColumnChunk[] readRawMeasureChunksInGroup(FileHolder fileReader, int startColumnBlockletIndex, int endColumnBlockletIndex) throws IOException {
long currentMeasureOffset = measureColumnChunkOffsets.get(startColumnBlockletIndex);
ByteBuffer buffer = null;
synchronized (fileReader) {
buffer = fileReader.readByteBuffer(filePath, currentMeasureOffset, (int) (measureColumnChunkOffsets.get(endColumnBlockletIndex + 1) - currentMeasureOffset));
}
MeasureRawColumnChunk[] dataChunks = new MeasureRawColumnChunk[endColumnBlockletIndex - startColumnBlockletIndex + 1];
int runningLength = 0;
int index = 0;
for (int i = startColumnBlockletIndex; i <= endColumnBlockletIndex; i++) {
int currentLength = (int) (measureColumnChunkOffsets.get(i + 1) - measureColumnChunkOffsets.get(i));
MeasureRawColumnChunk measureRawColumnChunk = new MeasureRawColumnChunk(i, buffer, runningLength, currentLength, this);
measureRawColumnChunk.setFileReader(fileReader);
measureRawColumnChunk.setRowCount(new int[] { numberOfRows });
measureRawColumnChunk.setPagesCount(1);
dataChunks[index] = measureRawColumnChunk;
runningLength += currentLength;
index++;
}
return dataChunks;
}
Aggregations