use of org.apache.carbondata.core.datastore.page.encoding.ColumnPageDecoder in project carbondata by apache.
the class DirectCompressCodec method createDecoder.
@Override
public ColumnPageDecoder createDecoder(final ColumnPageEncoderMeta meta) {
return new ColumnPageDecoder() {
@Override
public ColumnPage decode(byte[] input, int offset, int length) {
ColumnPage decodedPage;
if (DataTypes.isDecimal(dataType)) {
decodedPage = ColumnPage.decompressDecimalPage(meta, input, offset, length);
} else {
decodedPage = ColumnPage.decompress(meta, input, offset, length, false, isComplexPrimitiveIntLengthEncoding);
}
return LazyColumnPage.newPage(decodedPage, converter);
}
@Override
public void decodeAndFillVector(byte[] input, int offset, int length, ColumnVectorInfo vectorInfo, BitSet nullBits, boolean isLVEncoded, int pageSize, ReusableDataBuffer reusableDataBuffer) {
Compressor compressor = CompressorFactory.getInstance().getCompressor(meta.getCompressorName());
int uncompressedLength;
byte[] unCompressData;
if (null != reusableDataBuffer && compressor.supportReusableBuffer()) {
uncompressedLength = compressor.unCompressedLength(input, offset, length);
unCompressData = reusableDataBuffer.getDataBuffer(uncompressedLength);
compressor.rawUncompress(input, offset, length, unCompressData);
} else {
unCompressData = compressor.unCompressByte(input, offset, length);
uncompressedLength = unCompressData.length;
}
if (DataTypes.isDecimal(dataType)) {
TableSpec.ColumnSpec columnSpec = meta.getColumnSpec();
DecimalConverterFactory.DecimalConverter decimalConverter = DecimalConverterFactory.INSTANCE.getDecimalConverter(columnSpec.getPrecision(), columnSpec.getScale());
vectorInfo.decimalConverter = decimalConverter;
if (DataTypes.isDecimal(meta.getStoreDataType())) {
ColumnPage decimalColumnPage = VarLengthColumnPageBase.newDecimalColumnPage(meta, unCompressData, uncompressedLength);
decimalConverter.fillVector(decimalColumnPage.getByteArrayPage(), pageSize, vectorInfo, nullBits, meta.getStoreDataType());
} else {
converter.decodeAndFillVector(unCompressData, vectorInfo, nullBits, meta.getStoreDataType(), pageSize);
}
} else {
converter.decodeAndFillVector(unCompressData, vectorInfo, nullBits, meta.getStoreDataType(), pageSize);
}
}
@Override
public ColumnPage decode(byte[] input, int offset, int length, boolean isLVEncoded) {
return LazyColumnPage.newPage(ColumnPage.decompress(meta, input, offset, length, isLVEncoded, isComplexPrimitiveIntLengthEncoding), converter);
}
};
}
use of org.apache.carbondata.core.datastore.page.encoding.ColumnPageDecoder in project carbondata by apache.
the class CompressedMeasureChunkFileBasedReaderV1 method decodeColumnPage.
@Override
public ColumnPage decodeColumnPage(MeasureRawColumnChunk measureRawColumnChunk, int pageNumber) throws IOException, MemoryException {
int blockIndex = measureRawColumnChunk.getColumnIndex();
DataChunk dataChunk = measureColumnChunks.get(blockIndex);
ValueEncoderMeta meta = dataChunk.getValueEncoderMeta().get(0);
ColumnPageDecoder codec = encodingFactory.createDecoderLegacy(meta);
ColumnPage decodedPage = codec.decode(measureRawColumnChunk.getRawData().array(), (int) measureRawColumnChunk.getOffSet(), dataChunk.getDataPageLength());
decodedPage.setNullBits(dataChunk.getNullValueIndexForColumn());
return decodedPage;
}
use of org.apache.carbondata.core.datastore.page.encoding.ColumnPageDecoder in project carbondata by apache.
the class CompressedMeasureChunkFileBasedReaderV3 method decodeMeasure.
/**
* Decode measure column page with page header and raw data starting from offset
*/
protected ColumnPage decodeMeasure(DataChunk2 pageMetadata, ByteBuffer pageData, int offset) throws MemoryException, IOException {
List<Encoding> encodings = pageMetadata.getEncoders();
List<ByteBuffer> encoderMetas = pageMetadata.getEncoder_meta();
ColumnPageDecoder codec = encodingFactory.createDecoder(encodings, encoderMetas);
return codec.decode(pageData.array(), offset, pageMetadata.data_page_length);
}
use of org.apache.carbondata.core.datastore.page.encoding.ColumnPageDecoder in project carbondata by apache.
the class CompressedDimensionChunkFileBasedReaderV3 method decodeDimensionByMeta.
private ColumnPage decodeDimensionByMeta(DataChunk2 pageMetadata, ByteBuffer pageData, int offset) throws IOException, MemoryException {
List<Encoding> encodings = pageMetadata.getEncoders();
List<ByteBuffer> encoderMetas = pageMetadata.getEncoder_meta();
ColumnPageDecoder decoder = encodingFactory.createDecoder(encodings, encoderMetas);
return decoder.decode(pageData.array(), offset, pageMetadata.data_page_length);
}
use of org.apache.carbondata.core.datastore.page.encoding.ColumnPageDecoder in project carbondata by apache.
the class AdaptiveIntegralCodec method createDecoder.
@Override
public ColumnPageDecoder createDecoder(final ColumnPageEncoderMeta meta) {
return new ColumnPageDecoder() {
@Override
public ColumnPage decode(byte[] input, int offset, int length) {
ColumnPage page = null;
if (DataTypes.isDecimal(meta.getSchemaDataType())) {
page = ColumnPage.decompressDecimalPage(meta, input, offset, length);
} else {
page = ColumnPage.decompress(meta, input, offset, length, false, false);
}
return LazyColumnPage.newPage(page, converter);
}
@Override
public void decodeAndFillVector(byte[] input, int offset, int length, ColumnVectorInfo vectorInfo, BitSet nullBits, boolean isLVEncoded, int pageSize, ReusableDataBuffer reusableDataBuffer) {
Compressor compressor = CompressorFactory.getInstance().getCompressor(meta.getCompressorName());
byte[] unCompressData;
if (null != reusableDataBuffer && compressor.supportReusableBuffer()) {
int uncompressedLength = compressor.unCompressedLength(input, offset, length);
unCompressData = reusableDataBuffer.getDataBuffer(uncompressedLength);
compressor.rawUncompress(input, offset, length, unCompressData);
} else {
unCompressData = compressor.unCompressByte(input, offset, length);
}
if (DataTypes.isDecimal(meta.getSchemaDataType())) {
TableSpec.ColumnSpec columnSpec = meta.getColumnSpec();
vectorInfo.decimalConverter = DecimalConverterFactory.INSTANCE.getDecimalConverter(columnSpec.getPrecision(), columnSpec.getScale());
}
converter.decodeAndFillVector(unCompressData, vectorInfo, nullBits, meta.getStoreDataType(), pageSize);
}
@Override
public ColumnPage decode(byte[] input, int offset, int length, boolean isLVEncoded) {
return decode(input, offset, length);
}
};
}
Aggregations