Search in sources :

Example 6 with MeasureRawColumnChunk

use of org.apache.carbondata.core.datastore.chunk.impl.MeasureRawColumnChunk in project carbondata by apache.

the class CompressedMeasureChunkFileBasedReaderV1 method readRawMeasureChunk.

/**
   * Method to read the blocks data based on block index
   *
   * @param fileReader file reader to read the blocks
   * @param blockIndex block to be read
   * @return measure data chunk
   */
@Override
public MeasureRawColumnChunk readRawMeasureChunk(FileHolder fileReader, int blockIndex) throws IOException {
    DataChunk dataChunk = measureColumnChunks.get(blockIndex);
    ByteBuffer buffer = fileReader.readByteBuffer(filePath, dataChunk.getDataPageOffset(), dataChunk.getDataPageLength());
    MeasureRawColumnChunk rawColumnChunk = new MeasureRawColumnChunk(blockIndex, buffer, 0, dataChunk.getDataPageLength(), this);
    rawColumnChunk.setFileReader(fileReader);
    rawColumnChunk.setPagesCount(1);
    rawColumnChunk.setRowCount(new int[] { numberOfRows });
    return rawColumnChunk;
}
Also used : MeasureColumnDataChunk(org.apache.carbondata.core.datastore.chunk.MeasureColumnDataChunk) DataChunk(org.apache.carbondata.core.metadata.blocklet.datachunk.DataChunk) MeasureRawColumnChunk(org.apache.carbondata.core.datastore.chunk.impl.MeasureRawColumnChunk) ByteBuffer(java.nio.ByteBuffer)

Example 7 with MeasureRawColumnChunk

use of org.apache.carbondata.core.datastore.chunk.impl.MeasureRawColumnChunk in project carbondata by apache.

the class CompressedMeasureChunkFileBasedReaderV2 method readRawMeasureChunk.

@Override
public MeasureRawColumnChunk readRawMeasureChunk(FileHolder fileReader, int blockIndex) throws IOException {
    int dataLength = 0;
    if (measureColumnChunkOffsets.size() - 1 == blockIndex) {
        dataLength = measureColumnChunkLength.get(blockIndex);
    } else {
        long currentMeasureOffset = measureColumnChunkOffsets.get(blockIndex);
        dataLength = (int) (measureColumnChunkOffsets.get(blockIndex + 1) - currentMeasureOffset);
    }
    ByteBuffer buffer = null;
    synchronized (fileReader) {
        buffer = fileReader.readByteBuffer(filePath, measureColumnChunkOffsets.get(blockIndex), dataLength);
    }
    MeasureRawColumnChunk rawColumnChunk = new MeasureRawColumnChunk(blockIndex, buffer, 0, dataLength, this);
    rawColumnChunk.setFileReader(fileReader);
    rawColumnChunk.setPagesCount(1);
    rawColumnChunk.setRowCount(new int[] { numberOfRows });
    return rawColumnChunk;
}
Also used : MeasureRawColumnChunk(org.apache.carbondata.core.datastore.chunk.impl.MeasureRawColumnChunk) ByteBuffer(java.nio.ByteBuffer)

Example 8 with MeasureRawColumnChunk

use of org.apache.carbondata.core.datastore.chunk.impl.MeasureRawColumnChunk in project carbondata by apache.

the class CompressedMeasureChunkFileBasedReaderV2 method readRawMeasureChunksInGroup.

/**
   * Below method will be used to read measure chunk data in group.
   * This method will be useful to avoid multiple IO while reading the
   * data from
   *
   * @param fileReader               file reader to read the data
   * @param startColumnBlockletIndex first column blocklet index to be read
   * @param endColumnBlockletIndex   end column blocklet index to be read
   * @return measure raw chunkArray
   * @throws IOException
   */
protected MeasureRawColumnChunk[] readRawMeasureChunksInGroup(FileHolder fileReader, int startColumnBlockletIndex, int endColumnBlockletIndex) throws IOException {
    long currentMeasureOffset = measureColumnChunkOffsets.get(startColumnBlockletIndex);
    ByteBuffer buffer = null;
    synchronized (fileReader) {
        buffer = fileReader.readByteBuffer(filePath, currentMeasureOffset, (int) (measureColumnChunkOffsets.get(endColumnBlockletIndex + 1) - currentMeasureOffset));
    }
    MeasureRawColumnChunk[] dataChunks = new MeasureRawColumnChunk[endColumnBlockletIndex - startColumnBlockletIndex + 1];
    int runningLength = 0;
    int index = 0;
    for (int i = startColumnBlockletIndex; i <= endColumnBlockletIndex; i++) {
        int currentLength = (int) (measureColumnChunkOffsets.get(i + 1) - measureColumnChunkOffsets.get(i));
        MeasureRawColumnChunk measureRawColumnChunk = new MeasureRawColumnChunk(i, buffer, runningLength, currentLength, this);
        measureRawColumnChunk.setFileReader(fileReader);
        measureRawColumnChunk.setRowCount(new int[] { numberOfRows });
        measureRawColumnChunk.setPagesCount(1);
        dataChunks[index] = measureRawColumnChunk;
        runningLength += currentLength;
        index++;
    }
    return dataChunks;
}
Also used : MeasureRawColumnChunk(org.apache.carbondata.core.datastore.chunk.impl.MeasureRawColumnChunk) ByteBuffer(java.nio.ByteBuffer)

Aggregations

MeasureRawColumnChunk (org.apache.carbondata.core.datastore.chunk.impl.MeasureRawColumnChunk)8 ByteBuffer (java.nio.ByteBuffer)5 MeasureColumnDataChunk (org.apache.carbondata.core.datastore.chunk.MeasureColumnDataChunk)3 DimensionRawColumnChunk (org.apache.carbondata.core.datastore.chunk.impl.DimensionRawColumnChunk)3 QueryStatistic (org.apache.carbondata.core.stats.QueryStatistic)3 DimensionColumnDataChunk (org.apache.carbondata.core.datastore.chunk.DimensionColumnDataChunk)2 BlockletDeleteDeltaCacheLoader (org.apache.carbondata.core.mutate.data.BlockletDeleteDeltaCacheLoader)2 DeleteDeltaCacheLoaderIntf (org.apache.carbondata.core.mutate.data.DeleteDeltaCacheLoaderIntf)2 AbstractScannedResult (org.apache.carbondata.core.scan.result.AbstractScannedResult)2 DataChunk3 (org.apache.carbondata.format.DataChunk3)2 BitSet (java.util.BitSet)1 FileHolder (org.apache.carbondata.core.datastore.FileHolder)1 DataChunk (org.apache.carbondata.core.metadata.blocklet.datachunk.DataChunk)1 FilterQueryScannedResult (org.apache.carbondata.core.scan.result.impl.FilterQueryScannedResult)1 NonFilterQueryScannedResult (org.apache.carbondata.core.scan.result.impl.NonFilterQueryScannedResult)1 BitSetGroup (org.apache.carbondata.core.util.BitSetGroup)1