Search in sources :

Example 1 with ReusableDataBuffer

use of org.apache.carbondata.core.datastore.ReusableDataBuffer in project carbondata by apache.

the class AbstractQueryExecutor method getBlockExecutionInfos.

protected List<BlockExecutionInfo> getBlockExecutionInfos(QueryModel queryModel) throws IOException {
    initQuery(queryModel);
    List<BlockExecutionInfo> blockExecutionInfoList = new ArrayList<BlockExecutionInfo>();
    // fill all the block execution infos for all the blocks selected in
    // query
    // and query will be executed based on that infos
    ReusableDataBuffer[] dimensionReusableDataBuffers = null;
    ReusableDataBuffer[] measureReusableDataBuffers = null;
    for (int i = 0; i < queryProperties.dataBlocks.size(); i++) {
        AbstractIndex abstractIndex = queryProperties.dataBlocks.get(i);
        BlockletDataRefNode dataRefNode = (BlockletDataRefNode) abstractIndex.getDataRefNode();
        final BlockExecutionInfo blockExecutionInfoForBlock = getBlockExecutionInfoForBlock(queryModel, abstractIndex, dataRefNode.numberOfNodes(), dataRefNode.getTableBlockInfo().getFilePath(), dataRefNode.getTableBlockInfo().getDeletedDeltaFilePath(), dataRefNode.getTableBlockInfo().getSegment());
        if (null == dimensionReusableDataBuffers || null == measureReusableDataBuffers) {
            dimensionReusableDataBuffers = blockExecutionInfoForBlock.getDimensionReusableDataBuffer();
            measureReusableDataBuffers = blockExecutionInfoForBlock.getMeasureReusableDataBuffer();
        } else {
            if (dimensionReusableDataBuffers.length == blockExecutionInfoForBlock.getDimensionReusableDataBuffer().length) {
                blockExecutionInfoForBlock.setDimensionReusableDataBuffer(dimensionReusableDataBuffers);
            }
            if (measureReusableDataBuffers.length == blockExecutionInfoForBlock.getMeasureReusableDataBuffer().length) {
                blockExecutionInfoForBlock.setMeasureReusableDataBuffer(measureReusableDataBuffers);
            }
        }
        blockExecutionInfoList.add(blockExecutionInfoForBlock);
    }
    if (null != queryModel.getStatisticsRecorder()) {
        QueryStatistic queryStatistic = new QueryStatistic();
        queryStatistic.addCountStatistic(QueryStatisticsConstants.SCAN_BLOCKS_NUM, blockExecutionInfoList.size());
        queryModel.getStatisticsRecorder().recordStatistics(queryStatistic);
    }
    return blockExecutionInfoList;
}
Also used : ArrayList(java.util.ArrayList) ReusableDataBuffer(org.apache.carbondata.core.datastore.ReusableDataBuffer) AbstractIndex(org.apache.carbondata.core.datastore.block.AbstractIndex) BlockExecutionInfo(org.apache.carbondata.core.scan.executor.infos.BlockExecutionInfo) BlockletDataRefNode(org.apache.carbondata.core.indexstore.blockletindex.BlockletDataRefNode) QueryStatistic(org.apache.carbondata.core.stats.QueryStatistic)

Example 2 with ReusableDataBuffer

use of org.apache.carbondata.core.datastore.ReusableDataBuffer in project carbondata by apache.

the class AdaptiveDeltaIntegralCodec method createDecoder.

@Override
public ColumnPageDecoder createDecoder(final ColumnPageEncoderMeta meta) {
    return new ColumnPageDecoder() {

        @Override
        public ColumnPage decode(byte[] input, int offset, int length) {
            ColumnPage page = null;
            if (DataTypes.isDecimal(meta.getSchemaDataType())) {
                page = ColumnPage.decompressDecimalPage(meta, input, offset, length);
            } else {
                page = ColumnPage.decompress(meta, input, offset, length, false, false);
            }
            return LazyColumnPage.newPage(page, converter);
        }

        @Override
        public void decodeAndFillVector(byte[] input, int offset, int length, ColumnVectorInfo vectorInfo, BitSet nullBits, boolean isLVEncoded, int pageSize, ReusableDataBuffer reusableDataBuffer) {
            Compressor compressor = CompressorFactory.getInstance().getCompressor(meta.getCompressorName());
            byte[] unCompressData;
            if (null != reusableDataBuffer && compressor.supportReusableBuffer()) {
                int uncompressedLength = compressor.unCompressedLength(input, offset, length);
                unCompressData = reusableDataBuffer.getDataBuffer(uncompressedLength);
                compressor.rawUncompress(input, offset, length, unCompressData);
            } else {
                unCompressData = compressor.unCompressByte(input, offset, length);
            }
            if (DataTypes.isDecimal(meta.getSchemaDataType())) {
                TableSpec.ColumnSpec columnSpec = meta.getColumnSpec();
                vectorInfo.decimalConverter = DecimalConverterFactory.INSTANCE.getDecimalConverter(columnSpec.getPrecision(), columnSpec.getScale());
            }
            converter.decodeAndFillVector(unCompressData, vectorInfo, nullBits, meta.getStoreDataType(), pageSize);
        }

        @Override
        public ColumnPage decode(byte[] input, int offset, int length, boolean isLVEncoded) {
            return decode(input, offset, length);
        }
    };
}
Also used : TableSpec(org.apache.carbondata.core.datastore.TableSpec) ColumnPage(org.apache.carbondata.core.datastore.page.ColumnPage) LazyColumnPage(org.apache.carbondata.core.datastore.page.LazyColumnPage) ColumnVectorInfo(org.apache.carbondata.core.scan.result.vector.ColumnVectorInfo) BitSet(java.util.BitSet) ReusableDataBuffer(org.apache.carbondata.core.datastore.ReusableDataBuffer) Compressor(org.apache.carbondata.core.datastore.compression.Compressor) ColumnPageDecoder(org.apache.carbondata.core.datastore.page.encoding.ColumnPageDecoder)

Example 3 with ReusableDataBuffer

use of org.apache.carbondata.core.datastore.ReusableDataBuffer in project carbondata by apache.

the class AdaptiveDeltaFloatingCodec method createDecoder.

@Override
public ColumnPageDecoder createDecoder(final ColumnPageEncoderMeta meta) {
    return new ColumnPageDecoder() {

        @Override
        public ColumnPage decode(byte[] input, int offset, int length) {
            ColumnPage page = ColumnPage.decompress(meta, input, offset, length, false, false);
            return LazyColumnPage.newPage(page, converter);
        }

        @Override
        public void decodeAndFillVector(byte[] input, int offset, int length, ColumnVectorInfo vectorInfo, BitSet nullBits, boolean isLVEncoded, int pageSize, ReusableDataBuffer reusableDataBuffer) {
            Compressor compressor = CompressorFactory.getInstance().getCompressor(meta.getCompressorName());
            byte[] unCompressData;
            if (null != reusableDataBuffer && compressor.supportReusableBuffer()) {
                int uncompressedLength = compressor.unCompressedLength(input, offset, length);
                unCompressData = reusableDataBuffer.getDataBuffer(uncompressedLength);
                compressor.rawUncompress(input, offset, length, unCompressData);
            } else {
                unCompressData = compressor.unCompressByte(input, offset, length);
            }
            converter.decodeAndFillVector(unCompressData, vectorInfo, nullBits, meta.getStoreDataType(), pageSize);
        }

        @Override
        public ColumnPage decode(byte[] input, int offset, int length, boolean isLVEncoded) {
            return decode(input, offset, length);
        }
    };
}
Also used : ColumnPage(org.apache.carbondata.core.datastore.page.ColumnPage) LazyColumnPage(org.apache.carbondata.core.datastore.page.LazyColumnPage) ColumnVectorInfo(org.apache.carbondata.core.scan.result.vector.ColumnVectorInfo) BitSet(java.util.BitSet) ReusableDataBuffer(org.apache.carbondata.core.datastore.ReusableDataBuffer) Compressor(org.apache.carbondata.core.datastore.compression.Compressor) ColumnPageDecoder(org.apache.carbondata.core.datastore.page.encoding.ColumnPageDecoder)

Example 4 with ReusableDataBuffer

use of org.apache.carbondata.core.datastore.ReusableDataBuffer in project carbondata by apache.

the class AdaptiveFloatingCodec method createDecoder.

@Override
public ColumnPageDecoder createDecoder(final ColumnPageEncoderMeta meta) {
    return new ColumnPageDecoder() {

        @Override
        public ColumnPage decode(byte[] input, int offset, int length) {
            ColumnPage page = ColumnPage.decompress(meta, input, offset, length, false, false);
            return LazyColumnPage.newPage(page, converter);
        }

        @Override
        public void decodeAndFillVector(byte[] input, int offset, int length, ColumnVectorInfo vectorInfo, BitSet nullBits, boolean isLVEncoded, int pageSize, ReusableDataBuffer reusableDataBuffer) {
            Compressor compressor = CompressorFactory.getInstance().getCompressor(meta.getCompressorName());
            byte[] unCompressData;
            if (null != reusableDataBuffer && compressor.supportReusableBuffer()) {
                int uncompressedLength = compressor.unCompressedLength(input, offset, length);
                unCompressData = reusableDataBuffer.getDataBuffer(uncompressedLength);
                compressor.rawUncompress(input, offset, length, unCompressData);
            } else {
                unCompressData = compressor.unCompressByte(input, offset, length);
            }
            converter.decodeAndFillVector(unCompressData, vectorInfo, nullBits, meta.getStoreDataType(), pageSize);
        }

        @Override
        public ColumnPage decode(byte[] input, int offset, int length, boolean isLVEncoded) {
            return decode(input, offset, length);
        }
    };
}
Also used : ColumnPage(org.apache.carbondata.core.datastore.page.ColumnPage) LazyColumnPage(org.apache.carbondata.core.datastore.page.LazyColumnPage) ColumnVectorInfo(org.apache.carbondata.core.scan.result.vector.ColumnVectorInfo) BitSet(java.util.BitSet) ReusableDataBuffer(org.apache.carbondata.core.datastore.ReusableDataBuffer) Compressor(org.apache.carbondata.core.datastore.compression.Compressor) ColumnPageDecoder(org.apache.carbondata.core.datastore.page.encoding.ColumnPageDecoder)

Example 5 with ReusableDataBuffer

use of org.apache.carbondata.core.datastore.ReusableDataBuffer in project carbondata by apache.

the class DirectCompressCodec method createDecoder.

@Override
public ColumnPageDecoder createDecoder(final ColumnPageEncoderMeta meta) {
    return new ColumnPageDecoder() {

        @Override
        public ColumnPage decode(byte[] input, int offset, int length) {
            ColumnPage decodedPage;
            if (DataTypes.isDecimal(dataType)) {
                decodedPage = ColumnPage.decompressDecimalPage(meta, input, offset, length);
            } else {
                decodedPage = ColumnPage.decompress(meta, input, offset, length, false, isComplexPrimitiveIntLengthEncoding);
            }
            return LazyColumnPage.newPage(decodedPage, converter);
        }

        @Override
        public void decodeAndFillVector(byte[] input, int offset, int length, ColumnVectorInfo vectorInfo, BitSet nullBits, boolean isLVEncoded, int pageSize, ReusableDataBuffer reusableDataBuffer) {
            Compressor compressor = CompressorFactory.getInstance().getCompressor(meta.getCompressorName());
            int uncompressedLength;
            byte[] unCompressData;
            if (null != reusableDataBuffer && compressor.supportReusableBuffer()) {
                uncompressedLength = compressor.unCompressedLength(input, offset, length);
                unCompressData = reusableDataBuffer.getDataBuffer(uncompressedLength);
                compressor.rawUncompress(input, offset, length, unCompressData);
            } else {
                unCompressData = compressor.unCompressByte(input, offset, length);
                uncompressedLength = unCompressData.length;
            }
            if (DataTypes.isDecimal(dataType)) {
                TableSpec.ColumnSpec columnSpec = meta.getColumnSpec();
                DecimalConverterFactory.DecimalConverter decimalConverter = DecimalConverterFactory.INSTANCE.getDecimalConverter(columnSpec.getPrecision(), columnSpec.getScale());
                vectorInfo.decimalConverter = decimalConverter;
                if (DataTypes.isDecimal(meta.getStoreDataType())) {
                    ColumnPage decimalColumnPage = VarLengthColumnPageBase.newDecimalColumnPage(meta, unCompressData, uncompressedLength);
                    decimalConverter.fillVector(decimalColumnPage.getByteArrayPage(), pageSize, vectorInfo, nullBits, meta.getStoreDataType());
                } else {
                    converter.decodeAndFillVector(unCompressData, vectorInfo, nullBits, meta.getStoreDataType(), pageSize);
                }
            } else {
                converter.decodeAndFillVector(unCompressData, vectorInfo, nullBits, meta.getStoreDataType(), pageSize);
            }
        }

        @Override
        public ColumnPage decode(byte[] input, int offset, int length, boolean isLVEncoded) {
            return LazyColumnPage.newPage(ColumnPage.decompress(meta, input, offset, length, isLVEncoded, isComplexPrimitiveIntLengthEncoding), converter);
        }
    };
}
Also used : TableSpec(org.apache.carbondata.core.datastore.TableSpec) VariableLengthDimensionColumnPage(org.apache.carbondata.core.datastore.chunk.impl.VariableLengthDimensionColumnPage) ColumnPage(org.apache.carbondata.core.datastore.page.ColumnPage) LazyColumnPage(org.apache.carbondata.core.datastore.page.LazyColumnPage) ColumnVectorInfo(org.apache.carbondata.core.scan.result.vector.ColumnVectorInfo) BitSet(java.util.BitSet) ReusableDataBuffer(org.apache.carbondata.core.datastore.ReusableDataBuffer) DecimalConverterFactory(org.apache.carbondata.core.metadata.datatype.DecimalConverterFactory) Compressor(org.apache.carbondata.core.datastore.compression.Compressor) ColumnPageDecoder(org.apache.carbondata.core.datastore.page.encoding.ColumnPageDecoder)

Aggregations

ReusableDataBuffer (org.apache.carbondata.core.datastore.ReusableDataBuffer)7 BitSet (java.util.BitSet)5 Compressor (org.apache.carbondata.core.datastore.compression.Compressor)5 ColumnPage (org.apache.carbondata.core.datastore.page.ColumnPage)5 LazyColumnPage (org.apache.carbondata.core.datastore.page.LazyColumnPage)5 ColumnPageDecoder (org.apache.carbondata.core.datastore.page.encoding.ColumnPageDecoder)5 ColumnVectorInfo (org.apache.carbondata.core.scan.result.vector.ColumnVectorInfo)5 TableSpec (org.apache.carbondata.core.datastore.TableSpec)3 ArrayList (java.util.ArrayList)2 BlockExecutionInfo (org.apache.carbondata.core.scan.executor.infos.BlockExecutionInfo)2 LinkedHashSet (java.util.LinkedHashSet)1 AbstractIndex (org.apache.carbondata.core.datastore.block.AbstractIndex)1 SegmentProperties (org.apache.carbondata.core.datastore.block.SegmentProperties)1 TableBlockInfo (org.apache.carbondata.core.datastore.block.TableBlockInfo)1 VariableLengthDimensionColumnPage (org.apache.carbondata.core.datastore.chunk.impl.VariableLengthDimensionColumnPage)1 BlockletDataRefNode (org.apache.carbondata.core.indexstore.blockletindex.BlockletDataRefNode)1 DecimalConverterFactory (org.apache.carbondata.core.metadata.datatype.DecimalConverterFactory)1 CarbonDimension (org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension)1 CarbonMeasure (org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure)1 FilterResolverIntf (org.apache.carbondata.core.scan.filter.resolver.FilterResolverIntf)1