use of org.apache.carbondata.core.datastore.compression.Compressor in project carbondata by apache.
the class AdaptiveDeltaFloatingCodec method createEncoder.
@Override
public ColumnPageEncoder createEncoder(Map<String, String> parameter) {
final Compressor compressor = CompressorFactory.getInstance().getCompressor();
return new ColumnPageEncoder() {
@Override
protected byte[] encodeData(ColumnPage input) throws MemoryException, IOException {
if (encodedPage != null) {
throw new IllegalStateException("already encoded");
}
encodedPage = ColumnPage.newPage(input.getColumnSpec(), targetDataType, input.getPageSize());
input.convertValue(converter);
byte[] result = encodedPage.compress(compressor);
encodedPage.freeMemory();
return result;
}
@Override
protected List<Encoding> getEncodingList() {
List<Encoding> encodings = new ArrayList<Encoding>();
encodings.add(Encoding.ADAPTIVE_DELTA_FLOATING);
return encodings;
}
@Override
protected ColumnPageEncoderMeta getEncoderMeta(ColumnPage inputPage) {
return new ColumnPageEncoderMeta(inputPage.getColumnSpec(), targetDataType, stats, compressor.getName());
}
};
}
use of org.apache.carbondata.core.datastore.compression.Compressor in project carbondata by apache.
the class AdaptiveIntegralCodec method createEncoder.
@Override
public ColumnPageEncoder createEncoder(Map<String, String> parameter) {
final Compressor compressor = CompressorFactory.getInstance().getCompressor();
return new ColumnPageEncoder() {
@Override
protected byte[] encodeData(ColumnPage input) throws MemoryException, IOException {
if (encodedPage != null) {
throw new IllegalStateException("already encoded");
}
encodedPage = ColumnPage.newPage(input.getColumnSpec(), targetDataType, input.getPageSize());
input.convertValue(converter);
byte[] result = encodedPage.compress(compressor);
encodedPage.freeMemory();
return result;
}
@Override
protected List<Encoding> getEncodingList() {
List<Encoding> encodings = new ArrayList<Encoding>();
encodings.add(Encoding.ADAPTIVE_INTEGRAL);
return encodings;
}
@Override
protected ColumnPageEncoderMeta getEncoderMeta(ColumnPage inputPage) {
return new ColumnPageEncoderMeta(inputPage.getColumnSpec(), targetDataType, stats, compressor.getName());
}
};
}
use of org.apache.carbondata.core.datastore.compression.Compressor in project carbondata by apache.
the class ColumnPage method decompress.
/**
* Decompress data and create a column page using the decompressed data,
* except for decimal page
*/
public static ColumnPage decompress(ColumnPageEncoderMeta meta, byte[] compressedData, int offset, int length) throws MemoryException {
Compressor compressor = CompressorFactory.getInstance().getCompressor(meta.getCompressorName());
TableSpec.ColumnSpec columnSpec = meta.getColumnSpec();
DataType storeDataType = meta.getStoreDataType();
if (storeDataType == DataTypes.BOOLEAN || storeDataType == DataTypes.BYTE) {
byte[] byteData = compressor.unCompressByte(compressedData, offset, length);
return newBytePage(columnSpec, byteData);
} else if (storeDataType == DataTypes.SHORT) {
short[] shortData = compressor.unCompressShort(compressedData, offset, length);
return newShortPage(columnSpec, shortData);
} else if (storeDataType == DataTypes.SHORT_INT) {
byte[] shortIntData = compressor.unCompressByte(compressedData, offset, length);
return newShortIntPage(columnSpec, shortIntData);
} else if (storeDataType == DataTypes.INT) {
int[] intData = compressor.unCompressInt(compressedData, offset, length);
return newIntPage(columnSpec, intData);
} else if (storeDataType == DataTypes.LONG) {
long[] longData = compressor.unCompressLong(compressedData, offset, length);
return newLongPage(columnSpec, longData);
} else if (storeDataType == DataTypes.FLOAT) {
float[] floatData = compressor.unCompressFloat(compressedData, offset, length);
return newFloatPage(columnSpec, floatData);
} else if (storeDataType == DataTypes.DOUBLE) {
double[] doubleData = compressor.unCompressDouble(compressedData, offset, length);
return newDoublePage(columnSpec, doubleData);
} else if (storeDataType == DataTypes.BYTE_ARRAY) {
byte[] lvVarBytes = compressor.unCompressByte(compressedData, offset, length);
return newLVBytesPage(columnSpec, lvVarBytes);
} else {
throw new UnsupportedOperationException("unsupport uncompress column page: " + meta.getStoreDataType());
}
}
use of org.apache.carbondata.core.datastore.compression.Compressor in project carbondata by apache.
the class ColumnPageEncoder method fillNullBitSet.
private void fillNullBitSet(ColumnPage inputPage, DataChunk2 dataChunk) {
PresenceMeta presenceMeta = new PresenceMeta();
presenceMeta.setPresent_bit_streamIsSet(true);
Compressor compressor = CompressorFactory.getInstance().getCompressor();
presenceMeta.setPresent_bit_stream(compressor.compressByte(inputPage.getNullBits().toByteArray()));
dataChunk.setPresence(presenceMeta);
}
use of org.apache.carbondata.core.datastore.compression.Compressor in project carbondata by apache.
the class AdaptiveFloatingCodec method createEncoder.
@Override
public ColumnPageEncoder createEncoder(Map<String, String> parameter) {
final Compressor compressor = CompressorFactory.getInstance().getCompressor();
return new ColumnPageEncoder() {
@Override
protected byte[] encodeData(ColumnPage input) throws MemoryException, IOException {
if (encodedPage != null) {
throw new IllegalStateException("already encoded");
}
encodedPage = ColumnPage.newPage(input.getColumnSpec(), targetDataType, input.getPageSize());
input.convertValue(converter);
byte[] result = encodedPage.compress(compressor);
encodedPage.freeMemory();
return result;
}
@Override
protected List<Encoding> getEncodingList() {
List<Encoding> encodings = new ArrayList<Encoding>();
encodings.add(Encoding.ADAPTIVE_FLOATING);
return encodings;
}
@Override
protected ColumnPageEncoderMeta getEncoderMeta(ColumnPage inputPage) {
return new ColumnPageEncoderMeta(inputPage.getColumnSpec(), targetDataType, stats, compressor.getName());
}
};
}
Aggregations