use of org.apache.carbondata.core.datastore.page.encoding.adaptive.AdaptiveDeltaIntegralCodec in project carbondata by apache.
the class TestEncodingFactory method testSelectProperDeltaType2.
@Test
public void testSelectProperDeltaType2() {
PrimitivePageStatsCollector primitivePageStatsCollector = PrimitivePageStatsCollector.newInstance(DataTypes.LONG);
// for Byte
primitivePageStatsCollector.update((long) 200);
ColumnPageCodec columnPageCodec = DefaultEncodingFactory.selectCodecByAlgorithmForIntegral(primitivePageStatsCollector);
assert (columnPageCodec instanceof AdaptiveDeltaIntegralCodec);
assert (DataTypes.BYTE == ((AdaptiveDeltaIntegralCodec) columnPageCodec).getTargetDataType());
// for Short
primitivePageStatsCollector.update((long) 634767);
columnPageCodec = DefaultEncodingFactory.selectCodecByAlgorithmForIntegral(primitivePageStatsCollector);
assert (columnPageCodec instanceof AdaptiveIntegralCodec);
assert (DataTypes.SHORT_INT == ((AdaptiveIntegralCodec) columnPageCodec).getTargetDataType());
// for int
primitivePageStatsCollector.update((long) (Integer.MAX_VALUE + 200));
columnPageCodec = DefaultEncodingFactory.selectCodecByAlgorithmForIntegral(primitivePageStatsCollector);
assert (columnPageCodec instanceof AdaptiveIntegralCodec);
assert (DataTypes.INT == ((AdaptiveIntegralCodec) columnPageCodec).getTargetDataType());
// for int
primitivePageStatsCollector.update(Long.MAX_VALUE);
columnPageCodec = DefaultEncodingFactory.selectCodecByAlgorithmForIntegral(primitivePageStatsCollector);
assert (columnPageCodec instanceof DirectCompressCodec);
assert ("DirectCompressCodec".equals(columnPageCodec.getName()));
}
use of org.apache.carbondata.core.datastore.page.encoding.adaptive.AdaptiveDeltaIntegralCodec in project carbondata by apache.
the class DefaultEncodingFactory method selectCodecByAlgorithmForDecimal.
/**
* choose between adaptive encoder or delta adaptive encoder, based on whose target data type
* size is smaller for decimal data type
*/
static ColumnPageCodec selectCodecByAlgorithmForDecimal(SimpleStatsResult stats, DecimalConverterFactory.DecimalConverterType decimalConverterType) {
DataType srcDataType = stats.getDataType();
DataType adaptiveDataType = fitMinMaxForDecimalType(stats.getDataType(), stats.getMax(), stats.getMin(), decimalConverterType);
DataType deltaDataType;
if (adaptiveDataType == DataTypes.LONG) {
deltaDataType = DataTypes.LONG;
} else {
deltaDataType = fitDeltaForDecimalType(stats.getDataType(), stats.getMax(), stats.getMin(), decimalConverterType);
}
// that get size in bytes
if (Math.min(adaptiveDataType.getSizeInBytes(), deltaDataType.getSizeInBytes()) == srcDataType.getSizeInBytes()) {
// no effect to use adaptive or delta, use compression only
return new DirectCompressCodec(stats.getDataType());
}
if (adaptiveDataType.getSizeInBytes() <= deltaDataType.getSizeInBytes()) {
// choose adaptive encoding
return new AdaptiveIntegralCodec(stats.getDataType(), adaptiveDataType, stats);
} else {
// choose delta adaptive encoding
return new AdaptiveDeltaIntegralCodec(stats.getDataType(), deltaDataType, stats);
}
}
use of org.apache.carbondata.core.datastore.page.encoding.adaptive.AdaptiveDeltaIntegralCodec in project carbondata by apache.
the class EncodingFactory method createDecoderLegacy.
/**
* Old way of creating decoder, based on algorithm
*/
public ColumnPageDecoder createDecoderLegacy(ValueEncoderMeta metadata) {
SimpleStatsResult stats = PrimitivePageStatsCollector.newInstance(metadata);
TableSpec.ColumnSpec spec = TableSpec.ColumnSpec.newInstanceLegacy("legacy", stats.getDataType(), ColumnType.MEASURE);
String compressor = "snappy";
DataType dataType = DataType.getDataType(metadata.getType());
if (dataType == DataTypes.BYTE || dataType == DataTypes.SHORT || dataType == DataTypes.INT || dataType == DataTypes.LONG) {
// create the codec based on algorithm and create decoder by recovering the metadata
ColumnPageCodec codec = DefaultEncodingFactory.selectCodecByAlgorithmForIntegral(stats);
if (codec instanceof AdaptiveIntegralCodec) {
AdaptiveIntegralCodec adaptiveCodec = (AdaptiveIntegralCodec) codec;
ColumnPageEncoderMeta meta = new ColumnPageEncoderMeta(spec, adaptiveCodec.getTargetDataType(), stats, compressor);
return codec.createDecoder(meta);
} else if (codec instanceof AdaptiveDeltaIntegralCodec) {
AdaptiveDeltaIntegralCodec adaptiveCodec = (AdaptiveDeltaIntegralCodec) codec;
ColumnPageEncoderMeta meta = new ColumnPageEncoderMeta(spec, adaptiveCodec.getTargetDataType(), stats, compressor);
return codec.createDecoder(meta);
} else if (codec instanceof DirectCompressCodec) {
ColumnPageEncoderMeta meta = new ColumnPageEncoderMeta(spec, DataType.getDataType(metadata.getType()), stats, compressor);
return codec.createDecoder(meta);
} else {
throw new RuntimeException("internal error");
}
} else if (dataType == DataTypes.FLOAT || dataType == DataTypes.DOUBLE) {
// create the codec based on algorithm and create decoder by recovering the metadata
ColumnPageCodec codec = DefaultEncodingFactory.selectCodecByAlgorithmForFloating(stats);
if (codec instanceof AdaptiveFloatingCodec) {
AdaptiveFloatingCodec adaptiveCodec = (AdaptiveFloatingCodec) codec;
ColumnPageEncoderMeta meta = new ColumnPageEncoderMeta(spec, adaptiveCodec.getTargetDataType(), stats, compressor);
return codec.createDecoder(meta);
} else if (codec instanceof DirectCompressCodec) {
ColumnPageEncoderMeta meta = new ColumnPageEncoderMeta(spec, DataType.getDataType(metadata.getType()), stats, compressor);
return codec.createDecoder(meta);
} else if (codec instanceof AdaptiveDeltaFloatingCodec) {
AdaptiveDeltaFloatingCodec adaptiveCodec = (AdaptiveDeltaFloatingCodec) codec;
ColumnPageEncoderMeta meta = new ColumnPageEncoderMeta(spec, adaptiveCodec.getTargetDataType(), stats, compressor);
return codec.createDecoder(meta);
} else {
throw new RuntimeException("internal error");
}
} else if (DataTypes.isDecimal(dataType) || dataType == DataTypes.BYTE_ARRAY) {
// no dictionary dimension
return new DirectCompressCodec(stats.getDataType()).createDecoder(new ColumnPageEncoderMeta(spec, stats.getDataType(), stats, compressor));
} else if (dataType == DataTypes.LEGACY_LONG) {
// In case of older versions like in V1 format it has special datatype to handle
AdaptiveIntegralCodec adaptiveCodec = new AdaptiveIntegralCodec(DataTypes.LONG, DataTypes.LONG, stats);
ColumnPageEncoderMeta meta = new ColumnPageEncoderMeta(spec, adaptiveCodec.getTargetDataType(), stats, compressor);
return adaptiveCodec.createDecoder(meta);
} else {
throw new RuntimeException("unsupported data type: " + stats.getDataType());
}
}
use of org.apache.carbondata.core.datastore.page.encoding.adaptive.AdaptiveDeltaIntegralCodec in project carbondata by apache.
the class DefaultEncodingFactory method selectCodecByAlgorithmForIntegral.
/**
* choose between adaptive encoder or delta adaptive encoder, based on whose target data type
* size is smaller
*/
static ColumnPageCodec selectCodecByAlgorithmForIntegral(SimpleStatsResult stats) {
DataType srcDataType = stats.getDataType();
DataType adaptiveDataType = fitMinMax(stats.getDataType(), stats.getMax(), stats.getMin());
DataType deltaDataType;
if (adaptiveDataType == DataTypes.LONG) {
deltaDataType = DataTypes.LONG;
} else {
deltaDataType = fitDelta(stats.getDataType(), stats.getMax(), stats.getMin());
}
// that get size in bytes
if (Math.min(adaptiveDataType.getSizeInBytes(), deltaDataType.getSizeInBytes()) == srcDataType.getSizeInBytes()) {
// no effect to use adaptive or delta, use compression only
return new DirectCompressCodec(stats.getDataType());
}
if (adaptiveDataType.getSizeInBytes() <= deltaDataType.getSizeInBytes()) {
// choose adaptive encoding
return new AdaptiveIntegralCodec(stats.getDataType(), adaptiveDataType, stats);
} else {
// choose delta adaptive encoding
return new AdaptiveDeltaIntegralCodec(stats.getDataType(), deltaDataType, stats);
}
}
use of org.apache.carbondata.core.datastore.page.encoding.adaptive.AdaptiveDeltaIntegralCodec in project carbondata by apache.
the class EncodingFactory method createDecoder.
/**
* Return new decoder based on encoder metadata read from file
*/
public ColumnPageDecoder createDecoder(List<Encoding> encodings, List<ByteBuffer> encoderMetas) throws IOException {
assert (encodings.size() == 1);
assert (encoderMetas.size() == 1);
Encoding encoding = encodings.get(0);
byte[] encoderMeta = encoderMetas.get(0).array();
ByteArrayInputStream stream = new ByteArrayInputStream(encoderMeta);
DataInputStream in = new DataInputStream(stream);
if (encoding == DIRECT_COMPRESS) {
ColumnPageEncoderMeta metadata = new ColumnPageEncoderMeta();
metadata.readFields(in);
return new DirectCompressCodec(metadata.getStoreDataType()).createDecoder(metadata);
} else if (encoding == ADAPTIVE_INTEGRAL) {
ColumnPageEncoderMeta metadata = new ColumnPageEncoderMeta();
metadata.readFields(in);
SimpleStatsResult stats = PrimitivePageStatsCollector.newInstance(metadata);
return new AdaptiveIntegralCodec(metadata.getSchemaDataType(), metadata.getStoreDataType(), stats).createDecoder(metadata);
} else if (encoding == ADAPTIVE_DELTA_INTEGRAL) {
ColumnPageEncoderMeta metadata = new ColumnPageEncoderMeta();
metadata.readFields(in);
SimpleStatsResult stats = PrimitivePageStatsCollector.newInstance(metadata);
return new AdaptiveDeltaIntegralCodec(metadata.getSchemaDataType(), metadata.getStoreDataType(), stats).createDecoder(metadata);
} else if (encoding == ADAPTIVE_FLOATING) {
ColumnPageEncoderMeta metadata = new ColumnPageEncoderMeta();
metadata.readFields(in);
SimpleStatsResult stats = PrimitivePageStatsCollector.newInstance(metadata);
return new AdaptiveFloatingCodec(metadata.getSchemaDataType(), metadata.getStoreDataType(), stats).createDecoder(metadata);
} else if (encoding == ADAPTIVE_DELTA_FLOATING) {
ColumnPageEncoderMeta metadata = new ColumnPageEncoderMeta();
metadata.readFields(in);
SimpleStatsResult stats = PrimitivePageStatsCollector.newInstance(metadata);
return new AdaptiveDeltaFloatingCodec(metadata.getSchemaDataType(), metadata.getStoreDataType(), stats).createDecoder(metadata);
} else if (encoding == RLE_INTEGRAL) {
RLEEncoderMeta metadata = new RLEEncoderMeta();
metadata.readFields(in);
return new RLECodec().createDecoder(metadata);
} else if (encoding == BOOL_BYTE) {
ColumnPageEncoderMeta metadata = new ColumnPageEncoderMeta();
metadata.readFields(in);
return new DirectCompressCodec(metadata.getStoreDataType()).createDecoder(metadata);
} else {
// for backward compatibility
ValueEncoderMeta metadata = CarbonUtil.deserializeEncoderMetaV3(encoderMeta);
return createDecoderLegacy(metadata);
}
}
Aggregations