use of org.apache.carbondata.core.datastore.compression.WriterCompressModel in project carbondata by apache.
the class ValueCompressionUtilTest method testToGetValueCompressionModelForShortAndByte.
@Test
public void testToGetValueCompressionModelForShortAndByte() {
Object[] maxValues = { 32600.00 };
Object[] minValues = { 32500.00 };
int[] decimalLength = { 0 };
Object[] uniqueValues = { 5 };
DataType[] types = { DataType.DOUBLE };
byte[] dataTypeSelected = { 1 };
MeasureMetaDataModel measureMetaDataModel = new MeasureMetaDataModel(maxValues, minValues, decimalLength, 1, uniqueValues, types, dataTypeSelected);
WriterCompressModel writerCompressModel = ValueCompressionUtil.getWriterCompressModel(measureMetaDataModel);
assertEquals(ValueCompressionUtil.COMPRESSION_TYPE.DELTA_DOUBLE, writerCompressModel.getCompType(0));
}
use of org.apache.carbondata.core.datastore.compression.WriterCompressModel in project carbondata by apache.
the class ValueCompressionUtilTest method testToGetValueCompressionModelForByteAndIntAndDecimal1.
@Test
public void testToGetValueCompressionModelForByteAndIntAndDecimal1() {
Object[] maxValues = { -32766.00 };
Object[] minValues = { 32744.0 };
int[] decimalLength = { 1 };
Object[] uniqueValues = { 5 };
DataType[] types = { DataType.DOUBLE };
byte[] dataTypeSelected = { 1 };
MeasureMetaDataModel measureMetaDataModel = new MeasureMetaDataModel(maxValues, minValues, decimalLength, 1, uniqueValues, types, dataTypeSelected);
WriterCompressModel writerCompressModel = ValueCompressionUtil.getWriterCompressModel(measureMetaDataModel);
assertEquals(ValueCompressionUtil.COMPRESSION_TYPE.DELTA_DOUBLE, writerCompressModel.getCompType(0));
}
use of org.apache.carbondata.core.datastore.compression.WriterCompressModel in project carbondata by apache.
the class CarbonMetadataUtilTest method testConvertFileFooter.
@Test
public void testConvertFileFooter() throws Exception {
int[] intArr = { 1, 2, 3, 4, 5 };
boolean[] boolArr = { true, true, true, true, true };
long[] longArr = { 1, 2, 3, 4, 5 };
byte[][] maxByteArr = { { 1, 2 }, { 3, 4 }, { 5, 6 }, { 2, 4 }, { 1, 2 } };
int[] cardinality = { 1, 2, 3, 4, 5 };
org.apache.carbondata.core.metadata.datatype.DataType[] dataType = { org.apache.carbondata.core.metadata.datatype.DataType.INT, org.apache.carbondata.core.metadata.datatype.DataType.INT, org.apache.carbondata.core.metadata.datatype.DataType.INT, org.apache.carbondata.core.metadata.datatype.DataType.INT, org.apache.carbondata.core.metadata.datatype.DataType.INT };
org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema colSchema = new org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema();
org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema colSchema1 = new org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema();
List<org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema> columnSchemaList = new ArrayList<>();
columnSchemaList.add(colSchema);
columnSchemaList.add(colSchema1);
SegmentProperties segmentProperties = new SegmentProperties(columnSchemaList, cardinality);
final List<Integer> integerList = new ArrayList<>();
integerList.add(new Integer("1"));
integerList.add(new Integer("2"));
WriterCompressModel writerCompressModel = new WriterCompressModel();
writerCompressModel.setMaxValue(objMaxArr);
writerCompressModel.setMinValue(objMinArr);
writerCompressModel.setDataTypeSelected(byteArr);
writerCompressModel.setMantissa(intArr);
writerCompressModel.setType(dataType);
writerCompressModel.setUniqueValue(objMinArr);
BlockletInfoColumnar blockletInfoColumnar = new BlockletInfoColumnar();
BitSet[] bitSetArr = new BitSet[6];
bitSetArr[0] = new BitSet();
bitSetArr[1] = new BitSet();
bitSetArr[2] = new BitSet();
bitSetArr[3] = new BitSet();
bitSetArr[4] = new BitSet();
bitSetArr[5] = new BitSet();
blockletInfoColumnar.setColumnMaxData(maxByteArr);
blockletInfoColumnar.setColumnMinData(maxByteArr);
blockletInfoColumnar.setKeyLengths(intArr);
blockletInfoColumnar.setColGrpBlocks(boolArr);
blockletInfoColumnar.setKeyOffSets(longArr);
blockletInfoColumnar.setDataIndexMapOffsets(longArr);
blockletInfoColumnar.setAggKeyBlock(boolArr);
blockletInfoColumnar.setDataIndexMapLength(intArr);
blockletInfoColumnar.setIsSortedKeyColumn(boolArr);
blockletInfoColumnar.setKeyOffSets(longArr);
blockletInfoColumnar.setMeasureLength(intArr);
blockletInfoColumnar.setMeasureOffset(longArr);
blockletInfoColumnar.setMeasureNullValueIndex(bitSetArr);
blockletInfoColumnar.setCompressionModel(writerCompressModel);
BlockletInfoColumnar blockletInfoColumnar1 = new BlockletInfoColumnar();
blockletInfoColumnar1.setColumnMaxData(maxByteArr);
blockletInfoColumnar1.setColumnMinData(maxByteArr);
blockletInfoColumnar1.setKeyLengths(intArr);
blockletInfoColumnar1.setKeyOffSets(longArr);
blockletInfoColumnar1.setDataIndexMapOffsets(longArr);
blockletInfoColumnar1.setAggKeyBlock(boolArr);
blockletInfoColumnar1.setDataIndexMapLength(intArr);
blockletInfoColumnar1.setIsSortedKeyColumn(boolArr);
blockletInfoColumnar1.setColGrpBlocks(boolArr);
blockletInfoColumnar1.setKeyOffSets(longArr);
blockletInfoColumnar1.setMeasureLength(intArr);
blockletInfoColumnar1.setMeasureOffset(longArr);
blockletInfoColumnar1.setMeasureNullValueIndex(bitSetArr);
blockletInfoColumnar1.setCompressionModel(writerCompressModel);
blockletInfoColumnar1.setColGrpBlocks(boolArr);
List<BlockletInfoColumnar> blockletInfoColumnarList = new ArrayList<>();
blockletInfoColumnarList.add(blockletInfoColumnar);
blockletInfoColumnarList.add(blockletInfoColumnar1);
new MockUp<CarbonUtil>() {
@SuppressWarnings("unused")
@Mock
public List<Integer> convertToIntegerList(int[] array) {
return integerList;
}
};
final Set<Integer> integerSet = new HashSet<>();
integerSet.add(new Integer("1"));
integerSet.add(new Integer("2"));
new MockUp<SegmentProperties>() {
@SuppressWarnings("unused")
@Mock
public Set<Integer> getDimensionOrdinalForBlock(int blockIndex) {
return integerSet;
}
};
SegmentInfo segmentInfo = new SegmentInfo();
segmentInfo.setNum_cols(4);
segmentInfo.setColumn_cardinalities(integerList);
FileFooter fileFooter = new FileFooter();
fileFooter.setNum_rows(4);
fileFooter.setSegment_info(segmentInfo);
byte[] byteMaxArr = "1".getBytes();
byte[] byteMinArr = "2".getBytes();
BlockletMinMaxIndex blockletMinMaxIndex = new BlockletMinMaxIndex();
blockletMinMaxIndex.addToMax_values(ByteBuffer.wrap(byteMaxArr));
blockletMinMaxIndex.addToMin_values(ByteBuffer.wrap(byteMinArr));
FileFooter result = convertFileFooter(blockletInfoColumnarList, 4, cardinality, columnSchemas, segmentProperties);
assertEquals(result.getTable_columns(), columnSchemas);
}
use of org.apache.carbondata.core.datastore.compression.WriterCompressModel in project carbondata by apache.
the class CarbonUtilTest method testToGetValueCompressionModel.
@Test
public void testToGetValueCompressionModel() {
List<DataChunk> dataChunkList = new ArrayList<>();
DataChunk dataChunk = new DataChunk();
List<Encoding> encodingList = new ArrayList<>();
encodingList.add(Encoding.DELTA);
dataChunk.setEncodingList(encodingList);
List<ValueEncoderMeta> valueEncoderMetas = new ArrayList<>();
ValueEncoderMeta valueEncoderMeta = new ValueEncoderMeta();
valueEncoderMeta.setMaxValue(5.0);
valueEncoderMeta.setMinValue(1.0);
valueEncoderMeta.setUniqueValue(2.0);
valueEncoderMeta.setType('n');
valueEncoderMeta.setDataTypeSelected((byte) 'v');
valueEncoderMetas.add(valueEncoderMeta);
dataChunk.setValueEncoderMeta(valueEncoderMetas);
dataChunkList.add(dataChunk);
WriterCompressModel writerCompressModel = CarbonUtil.getValueCompressionModel(dataChunkList.get(0).getValueEncoderMeta());
assertEquals(1, writerCompressModel.getMaxValue().length);
}
Aggregations