use of org.apache.carbondata.core.metadata.BlockletInfoColumnar in project carbondata by apache.
the class CarbonMetadataUtil method getFileFooter.
/**
* Below method will be used to get the file footer object
*
* @param infoList blocklet info
* @param cardinalities cardinlaity of dimension columns
* @param columnSchemaList column schema list
* @return file footer
*/
private static FileFooter getFileFooter(List<BlockletInfoColumnar> infoList, int[] cardinalities, List<ColumnSchema> columnSchemaList) {
SegmentInfo segmentInfo = new SegmentInfo();
segmentInfo.setNum_cols(columnSchemaList.size());
segmentInfo.setColumn_cardinalities(CarbonUtil.convertToIntegerList(cardinalities));
ColumnarFormatVersion version = CarbonProperties.getInstance().getFormatVersion();
FileFooter footer = new FileFooter();
footer.setVersion(version.number());
footer.setNum_rows(getTotalNumberOfRows(infoList));
footer.setSegment_info(segmentInfo);
footer.setTable_columns(columnSchemaList);
for (BlockletInfoColumnar info : infoList) {
footer.addToBlocklet_index_list(getBlockletIndex(info));
}
return footer;
}
use of org.apache.carbondata.core.metadata.BlockletInfoColumnar in project carbondata by apache.
the class CarbonMetadataUtil method convertFilterFooter2.
/**
* Below method will be used to get the file footer object for
*
* @param infoList blocklet info
* @param cardinalities cardinality of each column
* @param columnSchemaList column schema list
* @param dataChunksOffset data chunks offsets
* @param dataChunksLength data chunks length
* @return filefooter thrift object
*/
public static FileFooter convertFilterFooter2(List<BlockletInfoColumnar> infoList, int[] cardinalities, List<ColumnSchema> columnSchemaList, List<List<Long>> dataChunksOffset, List<List<Short>> dataChunksLength) {
FileFooter footer = getFileFooter(infoList, cardinalities, columnSchemaList);
int index = 0;
for (BlockletInfoColumnar info : infoList) {
footer.addToBlocklet_info_list2(getBlockletInfo2(info, dataChunksOffset.get(index), dataChunksLength.get(index)));
index++;
}
return footer;
}
use of org.apache.carbondata.core.metadata.BlockletInfoColumnar in project carbondata by apache.
the class CarbonFooterWriterTest method testReadFactMetadata.
/**
* test writing fact metadata.
*/
@Test
public void testReadFactMetadata() throws IOException {
deleteFile();
createFile();
CarbonFooterWriter writer = new CarbonFooterWriter(filePath);
List<BlockletInfoColumnar> infoColumnars = getBlockletInfoColumnars();
int[] cardinalities = new int[] { 2, 4, 5, 7, 9, 10 };
List<ColumnSchema> columnSchema = Arrays.asList(new ColumnSchema[] { getDimensionColumn("IMEI1"), getDimensionColumn("IMEI2"), getDimensionColumn("IMEI3"), getDimensionColumn("IMEI4"), getDimensionColumn("IMEI5"), getDimensionColumn("IMEI6") });
List<org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema> wrapperColumnSchema = Arrays.asList(new org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema[] { getWrapperDimensionColumn("IMEI1"), getWrapperDimensionColumn("IMEI2"), getWrapperDimensionColumn("IMEI3"), getWrapperDimensionColumn("IMEI4"), getWrapperDimensionColumn("IMEI5"), getWrapperDimensionColumn("IMEI6") });
int[] colCardinality = CarbonUtil.getFormattedCardinality(cardinalities, wrapperColumnSchema);
SegmentProperties segmentProperties = new SegmentProperties(wrapperColumnSchema, cardinalities);
writer.writeFooter(CarbonMetadataUtil.convertFileFooter(infoColumnars, 6, colCardinality, columnSchema, segmentProperties), 0);
CarbonFooterReader metaDataReader = new CarbonFooterReader(filePath, 0);
List<BlockletInfoColumnar> nodeInfoColumnars = CarbonMetadataUtil.convertBlockletInfo(metaDataReader.readFooter());
assertTrue(nodeInfoColumnars.size() == infoColumnars.size());
}
use of org.apache.carbondata.core.metadata.BlockletInfoColumnar in project carbondata by apache.
the class CarbonFooterWriterTest method getBlockletInfoColumnars.
private List<BlockletInfoColumnar> getBlockletInfoColumnars() {
BlockletInfoColumnar infoColumnar = new BlockletInfoColumnar();
infoColumnar.setStartKey(new byte[] { 1, 2, 3 });
infoColumnar.setEndKey(new byte[] { 8, 9, 10 });
infoColumnar.setKeyLengths(new int[] { 1, 2, 3, 4 });
infoColumnar.setKeyOffSets(new long[] { 22, 44, 55, 77 });
infoColumnar.setIsSortedKeyColumn(new boolean[] { false, true, false, true });
infoColumnar.setColumnMaxData(new byte[][] { new byte[] { 1, 2 }, new byte[] { 3, 4 }, new byte[] { 4, 5 }, new byte[] { 5, 6 } });
infoColumnar.setColumnMinData(new byte[][] { new byte[] { 1, 2 }, new byte[] { 3, 4 }, new byte[] { 4, 5 }, new byte[] { 5, 6 } });
infoColumnar.setKeyBlockIndexLength(new int[] { 4, 7 });
infoColumnar.setKeyBlockIndexOffSets(new long[] { 55, 88 });
infoColumnar.setDataIndexMapLength(new int[] { 2, 6, 7, 8 });
infoColumnar.setDataIndexMapOffsets(new long[] { 77, 88, 99, 111 });
infoColumnar.setMeasureLength(new int[] { 6, 7 });
infoColumnar.setMeasureOffset(new long[] { 33, 99 });
infoColumnar.setAggKeyBlock(new boolean[] { true, true, true, true });
infoColumnar.setColGrpBlocks(new boolean[] { false, false, false, false });
infoColumnar.setMeasureNullValueIndex(new BitSet[] { new BitSet(), new BitSet() });
WriterCompressModel compressionModel = new WriterCompressModel();
compressionModel.setMaxValue(new Object[] { 44d, 55d });
compressionModel.setMinValue(new Object[] { 0d, 0d });
compressionModel.setMantissa(new int[] { 0, 0 });
compressionModel.setType(new DataType[] { DataType.DOUBLE, DataType.DOUBLE });
compressionModel.setUniqueValue(new Object[] { 0d, 0d });
compressionModel.setDataTypeSelected(new byte[2]);
infoColumnar.setCompressionModel(compressionModel);
List<BlockletInfoColumnar> infoColumnars = new ArrayList<BlockletInfoColumnar>();
infoColumnars.add(infoColumnar);
return infoColumnars;
}
use of org.apache.carbondata.core.metadata.BlockletInfoColumnar in project carbondata by apache.
the class CarbonFactDataWriterImplV1 method getBlockletInfo.
/**
* This method will be used to get the blocklet metadata
*
* @return BlockletInfo - blocklet metadata
*/
protected BlockletInfoColumnar getBlockletInfo(NodeHolder nodeHolder, long offset) {
// create the info object for leaf entry
BlockletInfoColumnar info = new BlockletInfoColumnar();
//add aggBlocks array
info.setAggKeyBlock(nodeHolder.getAggBlocks());
// add total entry count
info.setNumberOfKeys(nodeHolder.getEntryCount());
// add the key array length
info.setKeyLengths(nodeHolder.getKeyLengths());
// adding null measure index bit set
info.setMeasureNullValueIndex(nodeHolder.getMeasureNullValueIndex());
//add column min max length
info.setColumnMaxData(nodeHolder.getColumnMaxData());
info.setColumnMinData(nodeHolder.getColumnMinData());
long[] keyOffSets = new long[nodeHolder.getKeyLengths().length];
for (int i = 0; i < keyOffSets.length; i++) {
keyOffSets[i] = offset;
offset += nodeHolder.getKeyLengths()[i];
}
// key offset will be 8 bytes from current offset because first 4 bytes
// will be for number of entry in leaf, then next 4 bytes will be for
// key lenght;
// offset += CarbonCommonConstants.INT_SIZE_IN_BYTE * 2;
// add key offset
info.setKeyOffSets(keyOffSets);
// add measure length
info.setMeasureLength(nodeHolder.getMeasureLenght());
long[] msrOffset = new long[dataWriterVo.getMeasureCount()];
for (int i = 0; i < msrOffset.length; i++) {
// increment the current offset by 4 bytes because 4 bytes will be
// used for measure byte length
// offset += CarbonCommonConstants.INT_SIZE_IN_BYTE;
msrOffset[i] = offset;
// now increment the offset by adding measure length to get the next
// measure offset;
offset += nodeHolder.getMeasureLenght()[i];
}
// add measure offset
info.setMeasureOffset(msrOffset);
info.setIsSortedKeyColumn(nodeHolder.getIsSortedKeyBlock());
info.setKeyBlockIndexLength(nodeHolder.getKeyBlockIndexLength());
long[] keyBlockIndexOffsets = new long[nodeHolder.getKeyBlockIndexLength().length];
for (int i = 0; i < keyBlockIndexOffsets.length; i++) {
keyBlockIndexOffsets[i] = offset;
offset += nodeHolder.getKeyBlockIndexLength()[i];
}
info.setDataIndexMapLength(nodeHolder.getDataIndexMapLength());
long[] dataIndexMapOffsets = new long[nodeHolder.getDataIndexMapLength().length];
for (int i = 0; i < dataIndexMapOffsets.length; i++) {
dataIndexMapOffsets[i] = offset;
offset += nodeHolder.getDataIndexMapLength()[i];
}
info.setDataIndexMapOffsets(dataIndexMapOffsets);
info.setKeyBlockIndexOffSets(keyBlockIndexOffsets);
// set startkey
info.setStartKey(nodeHolder.getStartKey());
// set end key
info.setEndKey(nodeHolder.getEndKey());
info.setCompressionModel(nodeHolder.getCompressionModel());
// return leaf metadata
//colGroup Blocks
info.setColGrpBlocks(nodeHolder.getColGrpBlocks());
return info;
}
Aggregations