use of org.apache.carbondata.core.metadata.blocklet.datachunk.DataChunk in project carbondata by apache.
the class AbstractDataFileFooterConverter method getDataChunk.
/**
* Below method will be used to convert the thrift data chunk to wrapper
* data chunk
*
* @param datachunkThrift
* @return wrapper data chunk
*/
protected DataChunk getDataChunk(org.apache.carbondata.format.DataChunk datachunkThrift, boolean isPresenceMetaPresent) {
DataChunk dataChunk = new DataChunk();
dataChunk.setDataPageLength(datachunkThrift.getData_page_length());
dataChunk.setDataPageOffset(datachunkThrift.getData_page_offset());
if (isPresenceMetaPresent) {
dataChunk.setNullValueIndexForColumn(getPresenceMeta(datachunkThrift.getPresence()));
}
dataChunk.setRlePageLength(datachunkThrift.getRle_page_length());
dataChunk.setRlePageOffset(datachunkThrift.getRle_page_offset());
dataChunk.setRowMajor(datachunkThrift.isRowMajor());
dataChunk.setRowIdPageLength(datachunkThrift.getRowid_page_length());
dataChunk.setRowIdPageOffset(datachunkThrift.getRowid_page_offset());
List<Encoding> encodingList = new ArrayList<Encoding>(datachunkThrift.getEncoders().size());
for (int i = 0; i < datachunkThrift.getEncoders().size(); i++) {
encodingList.add(fromExternalToWrapperEncoding(datachunkThrift.getEncoders().get(i)));
}
dataChunk.setEncodingList(encodingList);
if (encodingList.contains(Encoding.DELTA)) {
List<ByteBuffer> thriftEncoderMeta = datachunkThrift.getEncoder_meta();
List<ValueEncoderMeta> encodeMetaList = new ArrayList<ValueEncoderMeta>(thriftEncoderMeta.size());
for (int i = 0; i < thriftEncoderMeta.size(); i++) {
encodeMetaList.add(CarbonUtil.deserializeEncoderMeta(thriftEncoderMeta.get(i).array()));
}
dataChunk.setValueEncoderMeta(encodeMetaList);
}
return dataChunk;
}
use of org.apache.carbondata.core.metadata.blocklet.datachunk.DataChunk in project carbondata by apache.
the class DataFileFooterConverter method getBlockletInfo.
/**
* Below method is to convert the blocklet info of the thrift to wrapper
* blocklet info
*
* @param blockletInfoThrift blocklet info of the thrift
* @return blocklet info wrapper
*/
private BlockletInfo getBlockletInfo(org.apache.carbondata.format.BlockletInfo blockletInfoThrift) {
BlockletInfo blockletInfo = new BlockletInfo();
List<DataChunk> dimensionColumnChunk = new ArrayList<DataChunk>();
List<DataChunk> measureChunk = new ArrayList<DataChunk>();
Iterator<org.apache.carbondata.format.DataChunk> column_data_chunksIterator = blockletInfoThrift.getColumn_data_chunksIterator();
if (null != column_data_chunksIterator) {
while (column_data_chunksIterator.hasNext()) {
org.apache.carbondata.format.DataChunk next = column_data_chunksIterator.next();
if (next.isRowMajor()) {
dimensionColumnChunk.add(getDataChunk(next, false));
} else if (next.getEncoders().contains(org.apache.carbondata.format.Encoding.DELTA)) {
measureChunk.add(getDataChunk(next, true));
} else {
dimensionColumnChunk.add(getDataChunk(next, false));
}
}
}
blockletInfo.setDimensionColumnChunk(dimensionColumnChunk);
blockletInfo.setMeasureColumnChunk(measureChunk);
blockletInfo.setNumberOfRows(blockletInfoThrift.getNum_rows());
return blockletInfo;
}
use of org.apache.carbondata.core.metadata.blocklet.datachunk.DataChunk in project carbondata by apache.
the class CarbonUtilTest method testToGetValueCompressionModel.
@Test
public void testToGetValueCompressionModel() {
List<DataChunk> dataChunkList = new ArrayList<>();
DataChunk dataChunk = new DataChunk();
List<Encoding> encodingList = new ArrayList<>();
encodingList.add(Encoding.DELTA);
dataChunk.setEncodingList(encodingList);
List<ValueEncoderMeta> valueEncoderMetas = new ArrayList<>();
ValueEncoderMeta valueEncoderMeta = new ValueEncoderMeta();
valueEncoderMeta.setMaxValue(5.0);
valueEncoderMeta.setMinValue(1.0);
valueEncoderMeta.setUniqueValue(2.0);
valueEncoderMeta.setType('n');
valueEncoderMeta.setDataTypeSelected((byte) 'v');
valueEncoderMetas.add(valueEncoderMeta);
dataChunk.setValueEncoderMeta(valueEncoderMetas);
dataChunkList.add(dataChunk);
WriterCompressModel writerCompressModel = CarbonUtil.getValueCompressionModel(dataChunkList.get(0).getValueEncoderMeta());
assertEquals(1, writerCompressModel.getMaxValue().length);
}
use of org.apache.carbondata.core.metadata.blocklet.datachunk.DataChunk in project carbondata by apache.
the class CompressedMeasureChunkFileBasedReaderV1 method readRawMeasureChunk.
/**
* Method to read the blocks data based on block index
*
* @param fileReader file reader to read the blocks
* @param blockIndex block to be read
* @return measure data chunk
*/
@Override
public MeasureRawColumnChunk readRawMeasureChunk(FileHolder fileReader, int blockIndex) throws IOException {
DataChunk dataChunk = measureColumnChunks.get(blockIndex);
ByteBuffer buffer = fileReader.readByteBuffer(filePath, dataChunk.getDataPageOffset(), dataChunk.getDataPageLength());
MeasureRawColumnChunk rawColumnChunk = new MeasureRawColumnChunk(blockIndex, buffer, 0, dataChunk.getDataPageLength(), this);
rawColumnChunk.setFileReader(fileReader);
rawColumnChunk.setPagesCount(1);
rawColumnChunk.setRowCount(new int[] { numberOfRows });
return rawColumnChunk;
}
use of org.apache.carbondata.core.metadata.blocklet.datachunk.DataChunk in project carbondata by apache.
the class CompressedMeasureChunkFileBasedReaderV1 method convertToMeasureChunk.
@Override
public MeasureColumnDataChunk convertToMeasureChunk(MeasureRawColumnChunk measureRawColumnChunk, int pageNumber) throws IOException {
int blockIndex = measureRawColumnChunk.getBlockletId();
DataChunk dataChunk = measureColumnChunks.get(blockIndex);
ValueEncoderMeta meta = dataChunk.getValueEncoderMeta().get(0);
ReaderCompressModel compressModel = ValueCompressionUtil.getReaderCompressModel(meta);
ValueCompressionHolder values = compressModel.getValueCompressionHolder();
ByteBuffer rawData = measureRawColumnChunk.getRawData();
// unCompress data
values.uncompress(compressModel.getConvertedDataType(), rawData.array(), measureRawColumnChunk.getOffSet(), dataChunk.getDataPageLength(), compressModel.getMantissa(), compressModel.getMaxValue(), numberOfRows);
CarbonReadDataHolder measureDataHolder = new CarbonReadDataHolder(values);
// create and set the data chunk
MeasureColumnDataChunk datChunk = new MeasureColumnDataChunk();
datChunk.setMeasureDataHolder(measureDataHolder);
// set the enun value indexes
datChunk.setNullValueIndexHolder(dataChunk.getNullValueIndexForColumn());
return datChunk;
}
Aggregations