Search in sources :

Example 11 with MeasureRawColumnChunk

use of org.apache.carbondata.core.datastore.chunk.impl.MeasureRawColumnChunk in project carbondata by apache.

the class BlockletDataRefNode method readMeasureChunks.

@Override
public MeasureRawColumnChunk[] readMeasureChunks(FileReader fileReader, int[][] columnIndexRange) throws IOException {
    MeasureColumnChunkReader measureColumnChunkReader = getMeasureColumnChunkReader(fileReader);
    MeasureRawColumnChunk[] measureRawColumnChunks = measureColumnChunkReader.readRawMeasureChunks(fileReader, columnIndexRange);
    updateMeasureRawColumnChunkMinMaxValues(measureRawColumnChunks);
    return measureRawColumnChunks;
}
Also used : MeasureColumnChunkReader(org.apache.carbondata.core.datastore.chunk.reader.MeasureColumnChunkReader) MeasureRawColumnChunk(org.apache.carbondata.core.datastore.chunk.impl.MeasureRawColumnChunk)

Example 12 with MeasureRawColumnChunk

use of org.apache.carbondata.core.datastore.chunk.impl.MeasureRawColumnChunk in project carbondata by apache.

the class CompressedMeasureChunkFileBasedReaderV1 method readRawMeasureChunk.

/**
   * Method to read the blocks data based on block index
   *
   * @param fileReader file reader to read the blocks
   * @param blockIndex block to be read
   * @return measure data chunk
   */
@Override
public MeasureRawColumnChunk readRawMeasureChunk(FileHolder fileReader, int blockIndex) throws IOException {
    DataChunk dataChunk = measureColumnChunks.get(blockIndex);
    ByteBuffer buffer = fileReader.readByteBuffer(filePath, dataChunk.getDataPageOffset(), dataChunk.getDataPageLength());
    MeasureRawColumnChunk rawColumnChunk = new MeasureRawColumnChunk(blockIndex, buffer, 0, dataChunk.getDataPageLength(), this);
    rawColumnChunk.setFileReader(fileReader);
    rawColumnChunk.setPagesCount(1);
    rawColumnChunk.setRowCount(new int[] { numberOfRows });
    return rawColumnChunk;
}
Also used : MeasureColumnDataChunk(org.apache.carbondata.core.datastore.chunk.MeasureColumnDataChunk) DataChunk(org.apache.carbondata.core.metadata.blocklet.datachunk.DataChunk) MeasureRawColumnChunk(org.apache.carbondata.core.datastore.chunk.impl.MeasureRawColumnChunk) ByteBuffer(java.nio.ByteBuffer)

Example 13 with MeasureRawColumnChunk

use of org.apache.carbondata.core.datastore.chunk.impl.MeasureRawColumnChunk in project carbondata by apache.

the class CompressedMeasureChunkFileBasedReaderV2 method readRawMeasureChunk.

@Override
public MeasureRawColumnChunk readRawMeasureChunk(FileHolder fileReader, int blockIndex) throws IOException {
    int dataLength = 0;
    if (measureColumnChunkOffsets.size() - 1 == blockIndex) {
        dataLength = measureColumnChunkLength.get(blockIndex);
    } else {
        long currentMeasureOffset = measureColumnChunkOffsets.get(blockIndex);
        dataLength = (int) (measureColumnChunkOffsets.get(blockIndex + 1) - currentMeasureOffset);
    }
    ByteBuffer buffer = null;
    synchronized (fileReader) {
        buffer = fileReader.readByteBuffer(filePath, measureColumnChunkOffsets.get(blockIndex), dataLength);
    }
    MeasureRawColumnChunk rawColumnChunk = new MeasureRawColumnChunk(blockIndex, buffer, 0, dataLength, this);
    rawColumnChunk.setFileReader(fileReader);
    rawColumnChunk.setPagesCount(1);
    rawColumnChunk.setRowCount(new int[] { numberOfRows });
    return rawColumnChunk;
}
Also used : MeasureRawColumnChunk(org.apache.carbondata.core.datastore.chunk.impl.MeasureRawColumnChunk) ByteBuffer(java.nio.ByteBuffer)

Example 14 with MeasureRawColumnChunk

use of org.apache.carbondata.core.datastore.chunk.impl.MeasureRawColumnChunk in project carbondata by apache.

the class CompressedMeasureChunkFileBasedReaderV2 method readRawMeasureChunksInGroup.

/**
   * Below method will be used to read measure chunk data in group.
   * This method will be useful to avoid multiple IO while reading the
   * data from
   *
   * @param fileReader               file reader to read the data
   * @param startColumnBlockletIndex first column blocklet index to be read
   * @param endColumnBlockletIndex   end column blocklet index to be read
   * @return measure raw chunkArray
   * @throws IOException
   */
protected MeasureRawColumnChunk[] readRawMeasureChunksInGroup(FileHolder fileReader, int startColumnBlockletIndex, int endColumnBlockletIndex) throws IOException {
    long currentMeasureOffset = measureColumnChunkOffsets.get(startColumnBlockletIndex);
    ByteBuffer buffer = null;
    synchronized (fileReader) {
        buffer = fileReader.readByteBuffer(filePath, currentMeasureOffset, (int) (measureColumnChunkOffsets.get(endColumnBlockletIndex + 1) - currentMeasureOffset));
    }
    MeasureRawColumnChunk[] dataChunks = new MeasureRawColumnChunk[endColumnBlockletIndex - startColumnBlockletIndex + 1];
    int runningLength = 0;
    int index = 0;
    for (int i = startColumnBlockletIndex; i <= endColumnBlockletIndex; i++) {
        int currentLength = (int) (measureColumnChunkOffsets.get(i + 1) - measureColumnChunkOffsets.get(i));
        MeasureRawColumnChunk measureRawColumnChunk = new MeasureRawColumnChunk(i, buffer, runningLength, currentLength, this);
        measureRawColumnChunk.setFileReader(fileReader);
        measureRawColumnChunk.setRowCount(new int[] { numberOfRows });
        measureRawColumnChunk.setPagesCount(1);
        dataChunks[index] = measureRawColumnChunk;
        runningLength += currentLength;
        index++;
    }
    return dataChunks;
}
Also used : MeasureRawColumnChunk(org.apache.carbondata.core.datastore.chunk.impl.MeasureRawColumnChunk) ByteBuffer(java.nio.ByteBuffer)

Example 15 with MeasureRawColumnChunk

use of org.apache.carbondata.core.datastore.chunk.impl.MeasureRawColumnChunk in project carbondata by apache.

the class FilterScanner method fillScannedResult.

/**
   * This method will process the data in below order
   * 1. first apply min max on the filter tree and check whether any of the filter
   * is fall on the range of min max, if not then return empty result
   * 2. If filter falls on min max range then apply filter on actual
   * data and get the filtered row index
   * 3. if row index is empty then return the empty result
   * 4. if row indexes is not empty then read only those blocks(measure or dimension)
   * which was present in the query but not present in the filter, as while applying filter
   * some of the blocks where already read and present in chunk holder so not need to
   * read those blocks again, this is to avoid reading of same blocks which was already read
   * 5. Set the blocks and filter indexes to result
   *
   * @param blocksChunkHolder
   * @throws FilterUnsupportedException
   */
private AbstractScannedResult fillScannedResult(BlocksChunkHolder blocksChunkHolder) throws FilterUnsupportedException, IOException {
    long startTime = System.currentTimeMillis();
    QueryStatistic totalBlockletStatistic = queryStatisticsModel.getStatisticsTypeAndObjMap().get(QueryStatisticsConstants.TOTAL_BLOCKLET_NUM);
    totalBlockletStatistic.addCountStatistic(QueryStatisticsConstants.TOTAL_BLOCKLET_NUM, totalBlockletStatistic.getCount() + 1);
    // apply filter on actual data
    BitSetGroup bitSetGroup = this.filterExecuter.applyFilter(blocksChunkHolder);
    // if indexes is empty then return with empty result
    if (bitSetGroup.isEmpty()) {
        CarbonUtil.freeMemory(blocksChunkHolder.getDimensionRawDataChunk(), blocksChunkHolder.getMeasureRawDataChunk());
        QueryStatistic scanTime = queryStatisticsModel.getStatisticsTypeAndObjMap().get(QueryStatisticsConstants.SCAN_BLOCKlET_TIME);
        scanTime.addCountStatistic(QueryStatisticsConstants.SCAN_BLOCKlET_TIME, scanTime.getCount() + (System.currentTimeMillis() - startTime));
        QueryStatistic scannedPages = queryStatisticsModel.getStatisticsTypeAndObjMap().get(QueryStatisticsConstants.PAGE_SCANNED);
        scannedPages.addCountStatistic(QueryStatisticsConstants.PAGE_SCANNED, scannedPages.getCount() + bitSetGroup.getScannedPages());
        return createEmptyResult();
    }
    AbstractScannedResult scannedResult = new FilterQueryScannedResult(blockExecutionInfo);
    scannedResult.setBlockletId(blockExecutionInfo.getBlockId() + CarbonCommonConstants.FILE_SEPARATOR + blocksChunkHolder.getDataBlock().nodeNumber());
    // valid scanned blocklet
    QueryStatistic validScannedBlockletStatistic = queryStatisticsModel.getStatisticsTypeAndObjMap().get(QueryStatisticsConstants.VALID_SCAN_BLOCKLET_NUM);
    validScannedBlockletStatistic.addCountStatistic(QueryStatisticsConstants.VALID_SCAN_BLOCKLET_NUM, validScannedBlockletStatistic.getCount() + 1);
    // adding statistics for valid number of pages
    QueryStatistic validPages = queryStatisticsModel.getStatisticsTypeAndObjMap().get(QueryStatisticsConstants.VALID_PAGE_SCANNED);
    validPages.addCountStatistic(QueryStatisticsConstants.VALID_PAGE_SCANNED, validPages.getCount() + bitSetGroup.getValidPages());
    QueryStatistic scannedPages = queryStatisticsModel.getStatisticsTypeAndObjMap().get(QueryStatisticsConstants.PAGE_SCANNED);
    scannedPages.addCountStatistic(QueryStatisticsConstants.PAGE_SCANNED, scannedPages.getCount() + bitSetGroup.getScannedPages());
    int[] rowCount = new int[bitSetGroup.getNumberOfPages()];
    // get the row indexes from bot set
    int[][] indexesGroup = new int[bitSetGroup.getNumberOfPages()][];
    for (int k = 0; k < indexesGroup.length; k++) {
        BitSet bitSet = bitSetGroup.getBitSet(k);
        if (bitSet != null && !bitSet.isEmpty()) {
            int[] indexes = new int[bitSet.cardinality()];
            int index = 0;
            for (int i = bitSet.nextSetBit(0); i >= 0; i = bitSet.nextSetBit(i + 1)) {
                indexes[index++] = i;
            }
            rowCount[k] = indexes.length;
            indexesGroup[k] = indexes;
        }
    }
    // loading delete data cache in blockexecutioninfo instance
    DeleteDeltaCacheLoaderIntf deleteCacheLoader = new BlockletDeleteDeltaCacheLoader(scannedResult.getBlockletId(), blocksChunkHolder.getDataBlock(), blockExecutionInfo.getAbsoluteTableIdentifier());
    deleteCacheLoader.loadDeleteDeltaFileDataToCache();
    scannedResult.setBlockletDeleteDeltaCache(blocksChunkHolder.getDataBlock().getDeleteDeltaDataCache());
    FileHolder fileReader = blocksChunkHolder.getFileReader();
    int[][] allSelectedDimensionBlocksIndexes = blockExecutionInfo.getAllSelectedDimensionBlocksIndexes();
    long dimensionReadTime = System.currentTimeMillis();
    DimensionRawColumnChunk[] projectionListDimensionChunk = blocksChunkHolder.getDataBlock().getDimensionChunks(fileReader, allSelectedDimensionBlocksIndexes);
    dimensionReadTime = System.currentTimeMillis() - dimensionReadTime;
    DimensionRawColumnChunk[] dimensionRawColumnChunks = new DimensionRawColumnChunk[blockExecutionInfo.getTotalNumberDimensionBlock()];
    // read dimension chunk blocks from file which is not present
    for (int i = 0; i < dimensionRawColumnChunks.length; i++) {
        if (null != blocksChunkHolder.getDimensionRawDataChunk()[i]) {
            dimensionRawColumnChunks[i] = blocksChunkHolder.getDimensionRawDataChunk()[i];
        }
    }
    for (int i = 0; i < allSelectedDimensionBlocksIndexes.length; i++) {
        for (int j = allSelectedDimensionBlocksIndexes[i][0]; j <= allSelectedDimensionBlocksIndexes[i][1]; j++) {
            dimensionRawColumnChunks[j] = projectionListDimensionChunk[j];
        }
    }
    long dimensionReadTime1 = System.currentTimeMillis();
    /**
     * in case projection if the projected dimension are not loaded in the dimensionColumnDataChunk
     * then loading them
     */
    int[] projectionListDimensionIndexes = blockExecutionInfo.getProjectionListDimensionIndexes();
    int projectionListDimensionIndexesLength = projectionListDimensionIndexes.length;
    for (int i = 0; i < projectionListDimensionIndexesLength; i++) {
        if (null == dimensionRawColumnChunks[projectionListDimensionIndexes[i]]) {
            dimensionRawColumnChunks[projectionListDimensionIndexes[i]] = blocksChunkHolder.getDataBlock().getDimensionChunk(fileReader, projectionListDimensionIndexes[i]);
        }
    }
    dimensionReadTime += (System.currentTimeMillis() - dimensionReadTime1);
    dimensionReadTime1 = System.currentTimeMillis();
    MeasureRawColumnChunk[] measureRawColumnChunks = new MeasureRawColumnChunk[blockExecutionInfo.getTotalNumberOfMeasureBlock()];
    int[][] allSelectedMeasureBlocksIndexes = blockExecutionInfo.getAllSelectedMeasureBlocksIndexes();
    MeasureRawColumnChunk[] projectionListMeasureChunk = blocksChunkHolder.getDataBlock().getMeasureChunks(fileReader, allSelectedMeasureBlocksIndexes);
    dimensionReadTime += System.currentTimeMillis() - dimensionReadTime1;
    // read the measure chunk blocks which is not present
    for (int i = 0; i < measureRawColumnChunks.length; i++) {
        if (null != blocksChunkHolder.getMeasureRawDataChunk()[i]) {
            measureRawColumnChunks[i] = blocksChunkHolder.getMeasureRawDataChunk()[i];
        }
    }
    for (int i = 0; i < allSelectedMeasureBlocksIndexes.length; i++) {
        for (int j = allSelectedMeasureBlocksIndexes[i][0]; j <= allSelectedMeasureBlocksIndexes[i][1]; j++) {
            measureRawColumnChunks[j] = projectionListMeasureChunk[j];
        }
    }
    dimensionReadTime1 = System.currentTimeMillis();
    /**
     * in case projection if the projected measure are not loaded in the measureColumnDataChunk
     * then loading them
     */
    int[] projectionListMeasureIndexes = blockExecutionInfo.getProjectionListMeasureIndexes();
    int projectionListMeasureIndexesLength = projectionListMeasureIndexes.length;
    for (int i = 0; i < projectionListMeasureIndexesLength; i++) {
        if (null == measureRawColumnChunks[projectionListMeasureIndexes[i]]) {
            measureRawColumnChunks[projectionListMeasureIndexes[i]] = blocksChunkHolder.getDataBlock().getMeasureChunk(fileReader, projectionListMeasureIndexes[i]);
        }
    }
    dimensionReadTime += System.currentTimeMillis() - dimensionReadTime1;
    DimensionColumnDataChunk[][] dimensionColumnDataChunks = new DimensionColumnDataChunk[dimensionRawColumnChunks.length][indexesGroup.length];
    MeasureColumnDataChunk[][] measureColumnDataChunks = new MeasureColumnDataChunk[measureRawColumnChunks.length][indexesGroup.length];
    for (int i = 0; i < dimensionRawColumnChunks.length; i++) {
        for (int j = 0; j < indexesGroup.length; j++) {
            if (dimensionRawColumnChunks[i] != null) {
                dimensionColumnDataChunks[i][j] = dimensionRawColumnChunks[i].convertToDimColDataChunk(j);
            }
        }
    }
    for (int i = 0; i < measureRawColumnChunks.length; i++) {
        for (int j = 0; j < indexesGroup.length; j++) {
            if (measureRawColumnChunks[i] != null) {
                measureColumnDataChunks[i][j] = measureRawColumnChunks[i].convertToMeasureColDataChunk(j);
            }
        }
    }
    scannedResult.setDimensionChunks(dimensionColumnDataChunks);
    scannedResult.setIndexes(indexesGroup);
    scannedResult.setMeasureChunks(measureColumnDataChunks);
    scannedResult.setRawColumnChunks(dimensionRawColumnChunks);
    scannedResult.setNumberOfRows(rowCount);
    // adding statistics for carbon scan time
    QueryStatistic scanTime = queryStatisticsModel.getStatisticsTypeAndObjMap().get(QueryStatisticsConstants.SCAN_BLOCKlET_TIME);
    scanTime.addCountStatistic(QueryStatisticsConstants.SCAN_BLOCKlET_TIME, scanTime.getCount() + (System.currentTimeMillis() - startTime - dimensionReadTime));
    QueryStatistic readTime = queryStatisticsModel.getStatisticsTypeAndObjMap().get(QueryStatisticsConstants.READ_BLOCKlET_TIME);
    readTime.addCountStatistic(QueryStatisticsConstants.READ_BLOCKlET_TIME, readTime.getCount() + dimensionReadTime);
    return scannedResult;
}
Also used : FilterQueryScannedResult(org.apache.carbondata.core.scan.result.impl.FilterQueryScannedResult) DimensionColumnDataChunk(org.apache.carbondata.core.datastore.chunk.DimensionColumnDataChunk) BitSet(java.util.BitSet) DeleteDeltaCacheLoaderIntf(org.apache.carbondata.core.mutate.data.DeleteDeltaCacheLoaderIntf) BlockletDeleteDeltaCacheLoader(org.apache.carbondata.core.mutate.data.BlockletDeleteDeltaCacheLoader) MeasureColumnDataChunk(org.apache.carbondata.core.datastore.chunk.MeasureColumnDataChunk) FileHolder(org.apache.carbondata.core.datastore.FileHolder) BitSetGroup(org.apache.carbondata.core.util.BitSetGroup) MeasureRawColumnChunk(org.apache.carbondata.core.datastore.chunk.impl.MeasureRawColumnChunk) DimensionRawColumnChunk(org.apache.carbondata.core.datastore.chunk.impl.DimensionRawColumnChunk) AbstractScannedResult(org.apache.carbondata.core.scan.result.AbstractScannedResult) QueryStatistic(org.apache.carbondata.core.stats.QueryStatistic)

Aggregations

MeasureRawColumnChunk (org.apache.carbondata.core.datastore.chunk.impl.MeasureRawColumnChunk)24 DimensionRawColumnChunk (org.apache.carbondata.core.datastore.chunk.impl.DimensionRawColumnChunk)12 ByteBuffer (java.nio.ByteBuffer)9 BitSet (java.util.BitSet)8 BitSetGroup (org.apache.carbondata.core.util.BitSetGroup)8 QueryStatistic (org.apache.carbondata.core.stats.QueryStatistic)6 DimensionColumnPage (org.apache.carbondata.core.datastore.chunk.DimensionColumnPage)5 ColumnPage (org.apache.carbondata.core.datastore.page.ColumnPage)4 MeasureColumnDataChunk (org.apache.carbondata.core.datastore.chunk.MeasureColumnDataChunk)3 DataChunk3 (org.apache.carbondata.format.DataChunk3)3 DimensionColumnDataChunk (org.apache.carbondata.core.datastore.chunk.DimensionColumnDataChunk)2 MeasureColumnChunkReader (org.apache.carbondata.core.datastore.chunk.reader.MeasureColumnChunkReader)2 DataChunk (org.apache.carbondata.core.metadata.blocklet.datachunk.DataChunk)2 DataType (org.apache.carbondata.core.metadata.datatype.DataType)2 BlockletDeleteDeltaCacheLoader (org.apache.carbondata.core.mutate.data.BlockletDeleteDeltaCacheLoader)2 DeleteDeltaCacheLoaderIntf (org.apache.carbondata.core.mutate.data.DeleteDeltaCacheLoaderIntf)2 AbstractScannedResult (org.apache.carbondata.core.scan.result.AbstractScannedResult)2 BlockletScannedResult (org.apache.carbondata.core.scan.result.BlockletScannedResult)2 FilterQueryScannedResult (org.apache.carbondata.core.scan.result.impl.FilterQueryScannedResult)2 NonFilterQueryScannedResult (org.apache.carbondata.core.scan.result.impl.NonFilterQueryScannedResult)2