use of org.apache.carbondata.core.datastore.page.encoding.ColumnPageDecoder in project carbondata by apache.
the class MeasureChunkReaderV3 method decodeMeasure.
/**
* Decode measure column page with page header and raw data starting from offset
*/
protected ColumnPage decodeMeasure(DataChunk2 pageMetadata, ByteBuffer pageData, int offset, ColumnVectorInfo vectorInfo, BitSet nullBitSet, ReusableDataBuffer reusableDataBuffer) throws IOException {
List<Encoding> encodings = pageMetadata.getEncoders();
org.apache.carbondata.core.metadata.encoder.Encoding.validateEncodingTypes(encodings);
List<ByteBuffer> encoderMetas = pageMetadata.getEncoder_meta();
String compressorName = CarbonMetadataUtil.getCompressorNameFromChunkMeta(pageMetadata.getChunk_meta());
ColumnPageDecoder codec = encodingFactory.createDecoder(encodings, encoderMetas, compressorName, vectorInfo != null);
if (vectorInfo != null) {
vectorInfo.vector.setCarbonDataFileWrittenVersion(vectorInfo.carbonDataFileWrittenVersion);
codec.decodeAndFillVector(pageData.array(), offset, pageMetadata.data_page_length, vectorInfo, nullBitSet, false, pageMetadata.numberOfRowsInpage, reusableDataBuffer);
return null;
} else {
return codec.decode(pageData.array(), offset, pageMetadata.data_page_length);
}
}
use of org.apache.carbondata.core.datastore.page.encoding.ColumnPageDecoder in project carbondata by apache.
the class TestPageLevelDictionary method testPageLevelDictionaryContainsOnlyUsedDictionaryValuesWhenMultiplePagesUseSameDictionary.
@Test
public void testPageLevelDictionaryContainsOnlyUsedDictionaryValuesWhenMultiplePagesUseSameDictionary() {
LocalDictionaryGenerator generator = new ColumnLocalDictionaryGenerator(1000, 2);
String columnName = "column1";
PageLevelDictionary pageLevelDictionary1 = new PageLevelDictionary(generator, columnName, DataTypes.STRING, false, compressorName);
byte[][] validateData = new byte[10][];
int index = 0;
try {
for (int i = 1; i <= 5; i++) {
byte[] data = ("vishal" + i).getBytes();
ByteBuffer byteBuffer = ByteBuffer.allocate(data.length + 2);
byteBuffer.putShort((short) data.length);
byteBuffer.put(data);
validateData[index] = data;
pageLevelDictionary1.getDictionaryValue(byteBuffer.array());
index++;
}
Assert.assertTrue(true);
} catch (DictionaryThresholdReachedException e) {
Assert.assertTrue(false);
}
PageLevelDictionary pageLevelDictionary2 = new PageLevelDictionary(generator, columnName, DataTypes.STRING, false, compressorName);
try {
for (int i = 1; i <= 5; i++) {
byte[] data = ("vikas" + i).getBytes();
ByteBuffer byteBuffer = ByteBuffer.allocate(data.length + 2);
byteBuffer.putShort((short) data.length);
byteBuffer.put(data);
pageLevelDictionary2.getDictionaryValue(byteBuffer.array());
}
Assert.assertTrue(true);
} catch (DictionaryThresholdReachedException e) {
Assert.assertTrue(false);
}
try {
for (int i = 6; i <= 10; i++) {
byte[] data = ("vishal" + i).getBytes();
ByteBuffer byteBuffer = ByteBuffer.allocate(data.length + 2);
byteBuffer.putShort((short) data.length);
byteBuffer.put(data);
validateData[index] = data;
pageLevelDictionary1.getDictionaryValue(byteBuffer.array());
index++;
}
Assert.assertTrue(true);
} catch (DictionaryThresholdReachedException e) {
Assert.assertTrue(false);
}
try {
for (int i = 6; i <= 10; i++) {
byte[] data = ("vikas" + i).getBytes();
ByteBuffer byteBuffer = ByteBuffer.allocate(data.length + 2);
byteBuffer.putShort((short) data.length);
byteBuffer.put(data);
pageLevelDictionary2.getDictionaryValue(byteBuffer.array());
}
Assert.assertTrue(true);
} catch (DictionaryThresholdReachedException e) {
Assert.assertTrue(false);
}
try {
LocalDictionaryChunk localDictionaryChunkForBlocklet = pageLevelDictionary1.getLocalDictionaryChunkForBlocklet();
List<Encoding> encodings = localDictionaryChunkForBlocklet.getDictionary_meta().getEncoders();
EncodingFactory encodingFactory = DefaultEncodingFactory.getInstance();
List<ByteBuffer> encoderMetas = localDictionaryChunkForBlocklet.getDictionary_meta().getEncoder_meta();
ColumnPageDecoder decoder = encodingFactory.createDecoder(encodings, encoderMetas, compressorName);
ColumnPage decode = decoder.decode(localDictionaryChunkForBlocklet.getDictionary_data(), 0, localDictionaryChunkForBlocklet.getDictionary_data().length);
BitSet bitSet = BitSet.valueOf(CompressorFactory.getInstance().getCompressor(compressorName).unCompressByte(localDictionaryChunkForBlocklet.getDictionary_values()));
Assert.assertTrue(bitSet.cardinality() == validateData.length);
for (int i = 0; i < validateData.length; i++) {
Assert.assertTrue(Arrays.equals(decode.getBytes(i), validateData[i]));
}
} catch (IOException e) {
Assert.assertTrue(false);
}
}
use of org.apache.carbondata.core.datastore.page.encoding.ColumnPageDecoder in project carbondata by apache.
the class TestPageLevelDictionary method testPageLevelDictionaryContainsOnlyUsedDictionaryValues.
@Test
public void testPageLevelDictionaryContainsOnlyUsedDictionaryValues() {
LocalDictionaryGenerator generator = new ColumnLocalDictionaryGenerator(1000, 2);
String columnName = "column1";
PageLevelDictionary pageLevelDictionary1 = new PageLevelDictionary(generator, columnName, DataTypes.STRING, false, compressorName);
byte[][] validateData = new byte[500][];
try {
for (int i = 1; i <= 500; i++) {
byte[] data = ("vishal" + i).getBytes();
ByteBuffer byteBuffer = ByteBuffer.allocate(data.length + 2);
byteBuffer.putShort((short) data.length);
byteBuffer.put(data);
validateData[i - 1] = data;
pageLevelDictionary1.getDictionaryValue(byteBuffer.array());
}
Assert.assertTrue(true);
} catch (DictionaryThresholdReachedException e) {
Assert.assertTrue(false);
}
PageLevelDictionary pageLevelDictionary2 = new PageLevelDictionary(generator, columnName, DataTypes.STRING, false, compressorName);
try {
for (int i = 1; i <= 500; i++) {
byte[] data = ("vikas" + i).getBytes();
ByteBuffer byteBuffer = ByteBuffer.allocate(data.length + 2);
byteBuffer.putShort((short) data.length);
byteBuffer.put(data);
pageLevelDictionary2.getDictionaryValue(byteBuffer.array());
}
Assert.assertTrue(true);
} catch (DictionaryThresholdReachedException e) {
Assert.assertTrue(false);
}
try {
LocalDictionaryChunk localDictionaryChunkForBlocklet = pageLevelDictionary1.getLocalDictionaryChunkForBlocklet();
List<Encoding> encodings = localDictionaryChunkForBlocklet.getDictionary_meta().getEncoders();
EncodingFactory encodingFactory = DefaultEncodingFactory.getInstance();
List<ByteBuffer> encoderMetas = localDictionaryChunkForBlocklet.getDictionary_meta().getEncoder_meta();
ColumnPageDecoder decoder = encodingFactory.createDecoder(encodings, encoderMetas, compressorName);
ColumnPage decode = decoder.decode(localDictionaryChunkForBlocklet.getDictionary_data(), 0, localDictionaryChunkForBlocklet.getDictionary_data().length);
for (int i = 0; i < 500; i++) {
Arrays.equals(decode.getBytes(i), validateData[i]);
}
} catch (IOException e) {
Assert.assertTrue(false);
}
}
use of org.apache.carbondata.core.datastore.page.encoding.ColumnPageDecoder in project carbondata by apache.
the class DimensionChunkReaderV3 method decodeDimensionByMeta.
private ColumnPage decodeDimensionByMeta(DataChunk2 pageMetadata, ByteBuffer pageData, int offset, boolean isLocalDictEncodedPage, ColumnVectorInfo vectorInfo, BitSet nullBitSet, ReusableDataBuffer reusableDataBuffer) throws IOException {
List<Encoding> encodings = pageMetadata.getEncoders();
List<ByteBuffer> encoderMetas = pageMetadata.getEncoder_meta();
String compressorName = CarbonMetadataUtil.getCompressorNameFromChunkMeta(pageMetadata.getChunk_meta());
ColumnPageDecoder decoder = encodingFactory.createDecoder(encodings, encoderMetas, compressorName, vectorInfo != null);
if (vectorInfo != null) {
// set encodings of current page in the vectorInfo, used for decoding the complex child page
vectorInfo.encodings = encodings;
vectorInfo.vector.setCarbonDataFileWrittenVersion(vectorInfo.carbonDataFileWrittenVersion);
decoder.decodeAndFillVector(pageData.array(), offset, pageMetadata.data_page_length, vectorInfo, nullBitSet, isLocalDictEncodedPage, pageMetadata.numberOfRowsInpage, reusableDataBuffer);
if (vectorInfo.vector.getType().isComplexType() && !vectorInfo.vectorStack.isEmpty()) {
// For complex type, always top of the vector stack is processed in decodeAndFillVector.
// so, pop() the top vector as its processing is finished.
vectorInfo.vectorStack.pop();
}
return null;
} else {
return decoder.decode(pageData.array(), offset, pageMetadata.data_page_length, isLocalDictEncodedPage);
}
}
use of org.apache.carbondata.core.datastore.page.encoding.ColumnPageDecoder in project carbondata by apache.
the class CarbonTestUtil method validateDictionary.
public static Boolean validateDictionary(DimensionRawColumnChunk rawColumnPage, String[] data) throws IOException {
LocalDictionaryChunk local_dictionary = rawColumnPage.getDataChunkV3().local_dictionary;
if (null != local_dictionary) {
String compressorName = CarbonMetadataUtil.getCompressorNameFromChunkMeta(rawColumnPage.getDataChunkV3().getData_chunk_list().get(0).getChunk_meta());
List<org.apache.carbondata.format.Encoding> encodings = local_dictionary.getDictionary_meta().encoders;
DefaultEncodingFactory encodingFactory = (DefaultEncodingFactory) DefaultEncodingFactory.getInstance();
ColumnPageDecoder decoder = encodingFactory.createDecoder(encodings, local_dictionary.getDictionary_meta().getEncoder_meta(), compressorName);
LazyColumnPage dictionaryPage = (LazyColumnPage) decoder.decode(local_dictionary.getDictionary_data(), 0, local_dictionary.getDictionary_data().length);
HashMap<DictionaryByteArrayWrapper, Integer> dictionaryMap = new HashMap<>();
BitSet usedDictionaryValues = BitSet.valueOf(CompressorFactory.getInstance().getCompressor(compressorName).unCompressByte(local_dictionary.getDictionary_values()));
int index = 0;
int i = usedDictionaryValues.nextSetBit(0);
while (i >= 0) {
dictionaryMap.put(new DictionaryByteArrayWrapper(dictionaryPage.getBytes(index)), i);
i = usedDictionaryValues.nextSetBit(i + 1);
index += 1;
}
for (i = 0; i < data.length; i++) {
if (null == dictionaryMap.get(new DictionaryByteArrayWrapper(data[i].getBytes(Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET))))) {
return false;
}
}
return true;
}
return false;
}
Aggregations