use of org.apache.carbondata.core.datastore.compression.ValueCompressionHolder in project carbondata by apache.
the class CompressedMeasureChunkFileBasedReaderV2 method convertToMeasureChunk.
public MeasureColumnDataChunk convertToMeasureChunk(MeasureRawColumnChunk measureRawColumnChunk, int pageNumber) throws IOException {
MeasureColumnDataChunk datChunk = new MeasureColumnDataChunk();
DataChunk2 measureColumnChunk = null;
int copyPoint = measureRawColumnChunk.getOffSet();
int blockIndex = measureRawColumnChunk.getBlockletId();
ByteBuffer rawData = measureRawColumnChunk.getRawData();
if (measureColumnChunkOffsets.size() - 1 == blockIndex) {
measureColumnChunk = CarbonUtil.readDataChunk(rawData, copyPoint, measureColumnChunkLength.get(blockIndex));
synchronized (measureRawColumnChunk.getFileReader()) {
rawData = measureRawColumnChunk.getFileReader().readByteBuffer(filePath, measureColumnChunkOffsets.get(blockIndex) + measureColumnChunkLength.get(blockIndex), measureColumnChunk.data_page_length);
}
} else {
measureColumnChunk = CarbonUtil.readDataChunk(rawData, copyPoint, measureColumnChunkLength.get(blockIndex));
copyPoint += measureColumnChunkLength.get(blockIndex);
}
List<ValueEncoderMeta> valueEncodeMeta = new ArrayList<>();
for (int i = 0; i < measureColumnChunk.getEncoder_meta().size(); i++) {
valueEncodeMeta.add(CarbonUtil.deserializeEncoderMeta(measureColumnChunk.getEncoder_meta().get(i).array()));
}
WriterCompressModel compressionModel = CarbonUtil.getValueCompressionModel(valueEncodeMeta);
ValueCompressionHolder values = compressionModel.getValueCompressionHolder()[0];
// uncompress
values.uncompress(compressionModel.getConvertedDataType()[0], rawData.array(), copyPoint, measureColumnChunk.data_page_length, compressionModel.getMantissa()[0], compressionModel.getMaxValue()[0], numberOfRows);
CarbonReadDataHolder measureDataHolder = new CarbonReadDataHolder(values);
// set the data chunk
datChunk.setMeasureDataHolder(measureDataHolder);
// set the enun value indexes
datChunk.setNullValueIndexHolder(getPresenceMeta(measureColumnChunk.presence));
return datChunk;
}
use of org.apache.carbondata.core.datastore.compression.ValueCompressionHolder in project carbondata by apache.
the class CompressedMeasureChunkFileBasedReaderV3 method convertToMeasureChunk.
/**
* Below method will be used to convert the compressed measure chunk raw data to actual data
*
* @param measureRawColumnChunk measure raw chunk
* @param pageNumber number
* @return DimensionColumnDataChunk
*/
@Override
public MeasureColumnDataChunk convertToMeasureChunk(MeasureRawColumnChunk measureRawColumnChunk, int pageNumber) throws IOException {
MeasureColumnDataChunk datChunk = new MeasureColumnDataChunk();
// data chunk of blocklet column
DataChunk3 dataChunk3 = measureRawColumnChunk.getDataChunkV3();
// data chunk of page
DataChunk2 measureColumnChunk = dataChunk3.getData_chunk_list().get(pageNumber);
// calculating the start point of data
// as buffer can contain multiple column data, start point will be datachunkoffset +
// data chunk length + page offset
int copyPoint = measureRawColumnChunk.getOffSet() + measureColumnChunkLength.get(measureRawColumnChunk.getBlockletId()) + dataChunk3.getPage_offset().get(pageNumber);
List<ValueEncoderMeta> valueEncodeMeta = new ArrayList<>();
for (int i = 0; i < measureColumnChunk.getEncoder_meta().size(); i++) {
valueEncodeMeta.add(CarbonUtil.deserializeEncoderMetaNew(measureColumnChunk.getEncoder_meta().get(i).array()));
}
WriterCompressModel compressionModel = CarbonUtil.getValueCompressionModel(valueEncodeMeta);
ValueCompressionHolder values = compressionModel.getValueCompressionHolder()[0];
// uncompress
ByteBuffer rawData = measureRawColumnChunk.getRawData();
values.uncompress(compressionModel.getConvertedDataType()[0], rawData.array(), copyPoint, measureColumnChunk.data_page_length, compressionModel.getMantissa()[0], compressionModel.getMaxValue()[0], measureRawColumnChunk.getRowCount()[pageNumber]);
CarbonReadDataHolder measureDataHolder = new CarbonReadDataHolder(values);
// set the data chunk
datChunk.setMeasureDataHolder(measureDataHolder);
// set the null value indexes
datChunk.setNullValueIndexHolder(getPresenceMeta(measureColumnChunk.presence));
return datChunk;
}
use of org.apache.carbondata.core.datastore.compression.ValueCompressionHolder in project carbondata by apache.
the class ValueCompressionUtil method getWriterCompressModel.
/**
* Create Value compression model for write path
*/
public static WriterCompressModel getWriterCompressModel(MeasureMetaDataModel measureMDMdl) {
int measureCount = measureMDMdl.getMeasureCount();
Object[] minValue = measureMDMdl.getMinValue();
Object[] maxValue = measureMDMdl.getMaxValue();
Object[] uniqueValue = measureMDMdl.getUniqueValue();
int[] mantissa = measureMDMdl.getMantissa();
DataType[] type = measureMDMdl.getType();
byte[] dataTypeSelected = measureMDMdl.getDataTypeSelected();
WriterCompressModel compressionModel = new WriterCompressModel();
DataType[] actualType = new DataType[measureCount];
DataType[] convertedType = new DataType[measureCount];
CompressionFinder[] compressionFinders = new CompressionFinder[measureCount];
for (int i = 0; i < measureCount; i++) {
CompressionFinder compresssionFinder = ValueCompressionUtil.getCompressionFinder(maxValue[i], minValue[i], mantissa[i], type[i], dataTypeSelected[i]);
compressionFinders[i] = compresssionFinder;
actualType[i] = compresssionFinder.getActualDataType();
convertedType[i] = compresssionFinder.getConvertedDataType();
}
compressionModel.setCompressionFinders(compressionFinders);
compressionModel.setMaxValue(maxValue);
compressionModel.setMantissa(mantissa);
compressionModel.setConvertedDataType(convertedType);
compressionModel.setActualDataType(actualType);
compressionModel.setMinValue(minValue);
compressionModel.setUniqueValue(uniqueValue);
compressionModel.setType(type);
compressionModel.setDataTypeSelected(dataTypeSelected);
ValueCompressionHolder[] values = ValueCompressionUtil.getValueCompressionHolder(compressionFinders);
compressionModel.setValueCompressionHolder(values);
return compressionModel;
}
use of org.apache.carbondata.core.datastore.compression.ValueCompressionHolder in project carbondata by apache.
the class CompressedMeasureChunkFileBasedReaderV1 method convertToMeasureChunk.
@Override
public MeasureColumnDataChunk convertToMeasureChunk(MeasureRawColumnChunk measureRawColumnChunk, int pageNumber) throws IOException {
int blockIndex = measureRawColumnChunk.getBlockletId();
DataChunk dataChunk = measureColumnChunks.get(blockIndex);
ValueEncoderMeta meta = dataChunk.getValueEncoderMeta().get(0);
ReaderCompressModel compressModel = ValueCompressionUtil.getReaderCompressModel(meta);
ValueCompressionHolder values = compressModel.getValueCompressionHolder();
ByteBuffer rawData = measureRawColumnChunk.getRawData();
// unCompress data
values.uncompress(compressModel.getConvertedDataType(), rawData.array(), measureRawColumnChunk.getOffSet(), dataChunk.getDataPageLength(), compressModel.getMantissa(), compressModel.getMaxValue(), numberOfRows);
CarbonReadDataHolder measureDataHolder = new CarbonReadDataHolder(values);
// create and set the data chunk
MeasureColumnDataChunk datChunk = new MeasureColumnDataChunk();
datChunk.setMeasureDataHolder(measureDataHolder);
// set the enun value indexes
datChunk.setNullValueIndexHolder(dataChunk.getNullValueIndexForColumn());
return datChunk;
}
use of org.apache.carbondata.core.datastore.compression.ValueCompressionHolder in project carbondata by apache.
the class ValueCompressionUtilTest method testToUnCompressNonDecimalForLong.
@Test
public void testToUnCompressNonDecimalForLong() {
ValueCompressionHolder result = ValueCompressionUtil.getCompressionNonDecimal(DataType.LONG);
assertEquals(result.getClass(), CompressionNonDecimalLong.class);
}
Aggregations