use of org.apache.carbondata.core.reader.CarbonFooterReader in project carbondata by apache.
the class CarbonFooterWriterTest method testReadFactMetadata.
/**
* test writing fact metadata.
*/
@Test
public void testReadFactMetadata() throws IOException {
deleteFile();
createFile();
CarbonFooterWriter writer = new CarbonFooterWriter(filePath);
List<BlockletInfoColumnar> infoColumnars = getBlockletInfoColumnars();
int[] cardinalities = new int[] { 2, 4, 5, 7, 9, 10 };
List<ColumnSchema> columnSchema = Arrays.asList(new ColumnSchema[] { getDimensionColumn("IMEI1"), getDimensionColumn("IMEI2"), getDimensionColumn("IMEI3"), getDimensionColumn("IMEI4"), getDimensionColumn("IMEI5"), getDimensionColumn("IMEI6") });
List<org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema> wrapperColumnSchema = Arrays.asList(new org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema[] { getWrapperDimensionColumn("IMEI1"), getWrapperDimensionColumn("IMEI2"), getWrapperDimensionColumn("IMEI3"), getWrapperDimensionColumn("IMEI4"), getWrapperDimensionColumn("IMEI5"), getWrapperDimensionColumn("IMEI6") });
int[] colCardinality = CarbonUtil.getFormattedCardinality(cardinalities, wrapperColumnSchema);
SegmentProperties segmentProperties = new SegmentProperties(wrapperColumnSchema, cardinalities);
writer.writeFooter(CarbonMetadataUtil.convertFileFooter(infoColumnars, 6, colCardinality, columnSchema, segmentProperties), 0);
CarbonFooterReader metaDataReader = new CarbonFooterReader(filePath, 0);
List<BlockletInfoColumnar> nodeInfoColumnars = CarbonMetadataUtil.convertBlockletInfo(metaDataReader.readFooter());
assertTrue(nodeInfoColumnars.size() == infoColumnars.size());
}
use of org.apache.carbondata.core.reader.CarbonFooterReader in project carbondata by apache.
the class FileFooterValidator method setUp.
@Before
public void setUp() throws Exception {
if (setUpIsDone) {
return;
}
CarbonHiveContext.sql("CREATE CUBE validatefooter DIMENSIONS (empno Integer, empname String," + " designation String," + " doj Timestamp, workgroupcategory Integer, workgroupcategoryname String, " + "deptno Integer, deptname String, projectcode Integer, projectjoindate Timestamp," + " projectenddate Timestamp) MEASURES (attendance Integer,utilization Integer," + "salary Integer) OPTIONS (PARTITIONER [PARTITION_COUNT=1])");
CarbonHiveContext.sql("LOAD DATA fact from './src/test/resources/data.csv' INTO CUBE validatefooter " + "PARTITIONDATA(DELIMITER ',', QUOTECHAR '\"')");
String storePath = CarbonProperties.getInstance().getProperty(CarbonCommonConstants.STORE_LOCATION);
CarbonTableIdentifier tableIdentifier = new CarbonTableIdentifier(CarbonCommonConstants.DATABASE_DEFAULT_NAME, "validatefooter", "1");
String segmentPath = CarbonStorePath.getCarbonTablePath(storePath, tableIdentifier).getCarbonDataDirectoryPath("0", "0");
CarbonFile carbonFile = FileFactory.getCarbonFile(segmentPath, FileFactory.getFileType(segmentPath));
CarbonFile[] list = carbonFile.listFiles(new CarbonFileFilter() {
@Override
public boolean accept(CarbonFile file) {
if (file.getName().endsWith(CarbonCommonConstants.FACT_FILE_EXT)) {
return true;
}
return false;
}
});
for (CarbonFile file : list) {
String fileLocation = file.getAbsolutePath();
CarbonFile factFile = FileFactory.getCarbonFile(fileLocation, FileFactory.getFileType(fileLocation));
long offset = factFile.getSize() - CarbonCommonConstants.LONG_SIZE_IN_BYTE;
FileHolder fileHolder = FileFactory.getFileHolder(FileFactory.getFileType(fileLocation));
offset = fileHolder.readLong(fileLocation, offset);
CarbonFooterReader metaDataReader = new CarbonFooterReader(fileLocation, offset);
fileFooter = metaDataReader.readFooter();
}
setUpIsDone = true;
}
use of org.apache.carbondata.core.reader.CarbonFooterReader in project carbondata by apache.
the class DataFileFooterConverter method readDataFileFooter.
/**
* Below method will be used to convert thrift file meta to wrapper file meta
*/
@Override
public DataFileFooter readDataFileFooter(TableBlockInfo tableBlockInfo) throws IOException {
DataFileFooter dataFileFooter = new DataFileFooter();
FileReader fileReader = null;
try {
long completeBlockLength = tableBlockInfo.getBlockLength();
long footerPointer = completeBlockLength - 8;
fileReader = FileFactory.getFileHolder(FileFactory.getFileType(tableBlockInfo.getFilePath()));
long actualFooterOffset = fileReader.readLong(tableBlockInfo.getFilePath(), footerPointer);
CarbonFooterReader reader = new CarbonFooterReader(tableBlockInfo.getFilePath(), actualFooterOffset);
FileFooter footer = reader.readFooter();
dataFileFooter.setVersionId(ColumnarFormatVersion.valueOf((short) footer.getVersion()));
dataFileFooter.setNumberOfRows(footer.getNum_rows());
dataFileFooter.setSegmentInfo(getSegmentInfo(footer.getSegment_info()));
List<ColumnSchema> columnSchemaList = new ArrayList<ColumnSchema>();
List<org.apache.carbondata.format.ColumnSchema> table_columns = footer.getTable_columns();
for (int i = 0; i < table_columns.size(); i++) {
columnSchemaList.add(thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i)));
}
dataFileFooter.setColumnInTable(columnSchemaList);
List<org.apache.carbondata.format.BlockletIndex> leaf_node_indices_Thrift = footer.getBlocklet_index_list();
List<BlockletIndex> blockletIndexList = new ArrayList<BlockletIndex>();
for (int i = 0; i < leaf_node_indices_Thrift.size(); i++) {
BlockletIndex blockletIndex = getBlockletIndex(leaf_node_indices_Thrift.get(i));
blockletIndexList.add(blockletIndex);
}
List<org.apache.carbondata.format.BlockletInfo> leaf_node_infos_Thrift = footer.getBlocklet_info_list();
List<BlockletInfo> blockletInfoList = new ArrayList<BlockletInfo>();
for (int i = 0; i < leaf_node_infos_Thrift.size(); i++) {
BlockletInfo blockletInfo = getBlockletInfo(leaf_node_infos_Thrift.get(i));
blockletInfo.setBlockletIndex(blockletIndexList.get(i));
blockletInfoList.add(blockletInfo);
}
dataFileFooter.setBlockletList(blockletInfoList);
dataFileFooter.setBlockletIndex(getBlockletIndexForDataFileFooter(blockletIndexList));
} finally {
if (null != fileReader) {
fileReader.finish();
}
}
return dataFileFooter;
}
use of org.apache.carbondata.core.reader.CarbonFooterReader in project carbondata by apache.
the class DataFileFooterConverter2 method readDataFileFooter.
/**
* Below method will be used to convert thrift file meta to wrapper file meta
*/
@Override
public DataFileFooter readDataFileFooter(TableBlockInfo tableBlockInfo) throws IOException {
DataFileFooter dataFileFooter = new DataFileFooter();
CarbonFooterReader reader = new CarbonFooterReader(tableBlockInfo.getFilePath(), tableBlockInfo.getBlockOffset());
FileFooter footer = reader.readFooter();
dataFileFooter.setVersionId(ColumnarFormatVersion.valueOf((short) footer.getVersion()));
dataFileFooter.setNumberOfRows(footer.getNum_rows());
dataFileFooter.setSegmentInfo(getSegmentInfo(footer.getSegment_info()));
List<ColumnSchema> columnSchemaList = new ArrayList<ColumnSchema>();
List<org.apache.carbondata.format.ColumnSchema> table_columns = footer.getTable_columns();
for (int i = 0; i < table_columns.size(); i++) {
columnSchemaList.add(thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i)));
}
dataFileFooter.setColumnInTable(columnSchemaList);
List<org.apache.carbondata.format.BlockletIndex> leaf_node_indices_Thrift = footer.getBlocklet_index_list();
List<BlockletIndex> blockletIndexList = new ArrayList<BlockletIndex>();
for (int i = 0; i < leaf_node_indices_Thrift.size(); i++) {
BlockletIndex blockletIndex = getBlockletIndex(leaf_node_indices_Thrift.get(i));
blockletIndexList.add(blockletIndex);
}
List<org.apache.carbondata.format.BlockletInfo2> leaf_node_infos_Thrift = footer.getBlocklet_info_list2();
List<BlockletInfo> blockletInfoList = new ArrayList<BlockletInfo>();
for (int i = 0; i < leaf_node_infos_Thrift.size(); i++) {
BlockletInfo blockletInfo = getBlockletInfo(leaf_node_infos_Thrift.get(i), getNumberOfDimensionColumns(columnSchemaList));
blockletInfo.setBlockletIndex(blockletIndexList.get(i));
blockletInfoList.add(blockletInfo);
}
dataFileFooter.setBlockletList(blockletInfoList);
dataFileFooter.setBlockletIndex(getBlockletIndexForDataFileFooter(blockletIndexList));
return dataFileFooter;
}
use of org.apache.carbondata.core.reader.CarbonFooterReader in project carbondata by apache.
the class CarbonFooterWriterTest method testWriteFactMetadata.
/**
* test writing fact metadata.
*/
@Test
public void testWriteFactMetadata() throws IOException {
deleteFile();
createFile();
CarbonFooterWriter writer = new CarbonFooterWriter(filePath);
List<BlockletInfoColumnar> infoColumnars = getBlockletInfoColumnars();
int[] cardinalities = new int[] { 2, 4, 5, 7, 9, 10 };
List<ColumnSchema> columnSchema = Arrays.asList(new ColumnSchema[] { getDimensionColumn("IMEI1"), getDimensionColumn("IMEI2"), getDimensionColumn("IMEI3"), getDimensionColumn("IMEI4"), getDimensionColumn("IMEI5"), getDimensionColumn("IMEI6") });
List<org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema> wrapperColumnSchema = Arrays.asList(new org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema[] { getWrapperDimensionColumn("IMEI1"), getWrapperDimensionColumn("IMEI2"), getWrapperDimensionColumn("IMEI3"), getWrapperDimensionColumn("IMEI4"), getWrapperDimensionColumn("IMEI5"), getWrapperDimensionColumn("IMEI6") });
int[] colCardinality = CarbonUtil.getFormattedCardinality(cardinalities, wrapperColumnSchema);
SegmentProperties segmentProperties = new SegmentProperties(wrapperColumnSchema, colCardinality);
writer.writeFooter(CarbonMetadataUtil.convertFileFooter(infoColumnars, 6, cardinalities, columnSchema, segmentProperties), 0);
CarbonFooterReader metaDataReader = new CarbonFooterReader(filePath, 0);
assertTrue(metaDataReader.readFooter() != null);
}
Aggregations