use of org.apache.carbondata.core.datastore.ReusableDataBuffer in project carbondata by apache.
the class AbstractQueryExecutor method getBlockExecutionInfos.
protected List<BlockExecutionInfo> getBlockExecutionInfos(QueryModel queryModel) throws IOException {
initQuery(queryModel);
List<BlockExecutionInfo> blockExecutionInfoList = new ArrayList<BlockExecutionInfo>();
// fill all the block execution infos for all the blocks selected in
// query
// and query will be executed based on that infos
ReusableDataBuffer[] dimensionReusableDataBuffers = null;
ReusableDataBuffer[] measureReusableDataBuffers = null;
for (int i = 0; i < queryProperties.dataBlocks.size(); i++) {
AbstractIndex abstractIndex = queryProperties.dataBlocks.get(i);
BlockletDataRefNode dataRefNode = (BlockletDataRefNode) abstractIndex.getDataRefNode();
final BlockExecutionInfo blockExecutionInfoForBlock = getBlockExecutionInfoForBlock(queryModel, abstractIndex, dataRefNode.numberOfNodes(), dataRefNode.getTableBlockInfo().getFilePath(), dataRefNode.getTableBlockInfo().getDeletedDeltaFilePath(), dataRefNode.getTableBlockInfo().getSegment());
if (null == dimensionReusableDataBuffers || null == measureReusableDataBuffers) {
dimensionReusableDataBuffers = blockExecutionInfoForBlock.getDimensionReusableDataBuffer();
measureReusableDataBuffers = blockExecutionInfoForBlock.getMeasureReusableDataBuffer();
} else {
if (dimensionReusableDataBuffers.length == blockExecutionInfoForBlock.getDimensionReusableDataBuffer().length) {
blockExecutionInfoForBlock.setDimensionReusableDataBuffer(dimensionReusableDataBuffers);
}
if (measureReusableDataBuffers.length == blockExecutionInfoForBlock.getMeasureReusableDataBuffer().length) {
blockExecutionInfoForBlock.setMeasureReusableDataBuffer(measureReusableDataBuffers);
}
}
blockExecutionInfoList.add(blockExecutionInfoForBlock);
}
if (null != queryModel.getStatisticsRecorder()) {
QueryStatistic queryStatistic = new QueryStatistic();
queryStatistic.addCountStatistic(QueryStatisticsConstants.SCAN_BLOCKS_NUM, blockExecutionInfoList.size());
queryModel.getStatisticsRecorder().recordStatistics(queryStatistic);
}
return blockExecutionInfoList;
}
use of org.apache.carbondata.core.datastore.ReusableDataBuffer in project carbondata by apache.
the class AdaptiveDeltaIntegralCodec method createDecoder.
@Override
public ColumnPageDecoder createDecoder(final ColumnPageEncoderMeta meta) {
return new ColumnPageDecoder() {
@Override
public ColumnPage decode(byte[] input, int offset, int length) {
ColumnPage page = null;
if (DataTypes.isDecimal(meta.getSchemaDataType())) {
page = ColumnPage.decompressDecimalPage(meta, input, offset, length);
} else {
page = ColumnPage.decompress(meta, input, offset, length, false, false);
}
return LazyColumnPage.newPage(page, converter);
}
@Override
public void decodeAndFillVector(byte[] input, int offset, int length, ColumnVectorInfo vectorInfo, BitSet nullBits, boolean isLVEncoded, int pageSize, ReusableDataBuffer reusableDataBuffer) {
Compressor compressor = CompressorFactory.getInstance().getCompressor(meta.getCompressorName());
byte[] unCompressData;
if (null != reusableDataBuffer && compressor.supportReusableBuffer()) {
int uncompressedLength = compressor.unCompressedLength(input, offset, length);
unCompressData = reusableDataBuffer.getDataBuffer(uncompressedLength);
compressor.rawUncompress(input, offset, length, unCompressData);
} else {
unCompressData = compressor.unCompressByte(input, offset, length);
}
if (DataTypes.isDecimal(meta.getSchemaDataType())) {
TableSpec.ColumnSpec columnSpec = meta.getColumnSpec();
vectorInfo.decimalConverter = DecimalConverterFactory.INSTANCE.getDecimalConverter(columnSpec.getPrecision(), columnSpec.getScale());
}
converter.decodeAndFillVector(unCompressData, vectorInfo, nullBits, meta.getStoreDataType(), pageSize);
}
@Override
public ColumnPage decode(byte[] input, int offset, int length, boolean isLVEncoded) {
return decode(input, offset, length);
}
};
}
use of org.apache.carbondata.core.datastore.ReusableDataBuffer in project carbondata by apache.
the class AdaptiveDeltaFloatingCodec method createDecoder.
@Override
public ColumnPageDecoder createDecoder(final ColumnPageEncoderMeta meta) {
return new ColumnPageDecoder() {
@Override
public ColumnPage decode(byte[] input, int offset, int length) {
ColumnPage page = ColumnPage.decompress(meta, input, offset, length, false, false);
return LazyColumnPage.newPage(page, converter);
}
@Override
public void decodeAndFillVector(byte[] input, int offset, int length, ColumnVectorInfo vectorInfo, BitSet nullBits, boolean isLVEncoded, int pageSize, ReusableDataBuffer reusableDataBuffer) {
Compressor compressor = CompressorFactory.getInstance().getCompressor(meta.getCompressorName());
byte[] unCompressData;
if (null != reusableDataBuffer && compressor.supportReusableBuffer()) {
int uncompressedLength = compressor.unCompressedLength(input, offset, length);
unCompressData = reusableDataBuffer.getDataBuffer(uncompressedLength);
compressor.rawUncompress(input, offset, length, unCompressData);
} else {
unCompressData = compressor.unCompressByte(input, offset, length);
}
converter.decodeAndFillVector(unCompressData, vectorInfo, nullBits, meta.getStoreDataType(), pageSize);
}
@Override
public ColumnPage decode(byte[] input, int offset, int length, boolean isLVEncoded) {
return decode(input, offset, length);
}
};
}
use of org.apache.carbondata.core.datastore.ReusableDataBuffer in project carbondata by apache.
the class AdaptiveFloatingCodec method createDecoder.
@Override
public ColumnPageDecoder createDecoder(final ColumnPageEncoderMeta meta) {
return new ColumnPageDecoder() {
@Override
public ColumnPage decode(byte[] input, int offset, int length) {
ColumnPage page = ColumnPage.decompress(meta, input, offset, length, false, false);
return LazyColumnPage.newPage(page, converter);
}
@Override
public void decodeAndFillVector(byte[] input, int offset, int length, ColumnVectorInfo vectorInfo, BitSet nullBits, boolean isLVEncoded, int pageSize, ReusableDataBuffer reusableDataBuffer) {
Compressor compressor = CompressorFactory.getInstance().getCompressor(meta.getCompressorName());
byte[] unCompressData;
if (null != reusableDataBuffer && compressor.supportReusableBuffer()) {
int uncompressedLength = compressor.unCompressedLength(input, offset, length);
unCompressData = reusableDataBuffer.getDataBuffer(uncompressedLength);
compressor.rawUncompress(input, offset, length, unCompressData);
} else {
unCompressData = compressor.unCompressByte(input, offset, length);
}
converter.decodeAndFillVector(unCompressData, vectorInfo, nullBits, meta.getStoreDataType(), pageSize);
}
@Override
public ColumnPage decode(byte[] input, int offset, int length, boolean isLVEncoded) {
return decode(input, offset, length);
}
};
}
use of org.apache.carbondata.core.datastore.ReusableDataBuffer in project carbondata by apache.
the class DirectCompressCodec method createDecoder.
@Override
public ColumnPageDecoder createDecoder(final ColumnPageEncoderMeta meta) {
return new ColumnPageDecoder() {
@Override
public ColumnPage decode(byte[] input, int offset, int length) {
ColumnPage decodedPage;
if (DataTypes.isDecimal(dataType)) {
decodedPage = ColumnPage.decompressDecimalPage(meta, input, offset, length);
} else {
decodedPage = ColumnPage.decompress(meta, input, offset, length, false, isComplexPrimitiveIntLengthEncoding);
}
return LazyColumnPage.newPage(decodedPage, converter);
}
@Override
public void decodeAndFillVector(byte[] input, int offset, int length, ColumnVectorInfo vectorInfo, BitSet nullBits, boolean isLVEncoded, int pageSize, ReusableDataBuffer reusableDataBuffer) {
Compressor compressor = CompressorFactory.getInstance().getCompressor(meta.getCompressorName());
int uncompressedLength;
byte[] unCompressData;
if (null != reusableDataBuffer && compressor.supportReusableBuffer()) {
uncompressedLength = compressor.unCompressedLength(input, offset, length);
unCompressData = reusableDataBuffer.getDataBuffer(uncompressedLength);
compressor.rawUncompress(input, offset, length, unCompressData);
} else {
unCompressData = compressor.unCompressByte(input, offset, length);
uncompressedLength = unCompressData.length;
}
if (DataTypes.isDecimal(dataType)) {
TableSpec.ColumnSpec columnSpec = meta.getColumnSpec();
DecimalConverterFactory.DecimalConverter decimalConverter = DecimalConverterFactory.INSTANCE.getDecimalConverter(columnSpec.getPrecision(), columnSpec.getScale());
vectorInfo.decimalConverter = decimalConverter;
if (DataTypes.isDecimal(meta.getStoreDataType())) {
ColumnPage decimalColumnPage = VarLengthColumnPageBase.newDecimalColumnPage(meta, unCompressData, uncompressedLength);
decimalConverter.fillVector(decimalColumnPage.getByteArrayPage(), pageSize, vectorInfo, nullBits, meta.getStoreDataType());
} else {
converter.decodeAndFillVector(unCompressData, vectorInfo, nullBits, meta.getStoreDataType(), pageSize);
}
} else {
converter.decodeAndFillVector(unCompressData, vectorInfo, nullBits, meta.getStoreDataType(), pageSize);
}
}
@Override
public ColumnPage decode(byte[] input, int offset, int length, boolean isLVEncoded) {
return LazyColumnPage.newPage(ColumnPage.decompress(meta, input, offset, length, isLVEncoded, isComplexPrimitiveIntLengthEncoding), converter);
}
};
}
Aggregations