use of org.apache.spark.sql.secondaryindex.exception.SecondaryIndexException in project carbondata by apache.
the class SecondaryIndexQueryResultProcessor method initDataHandler.
/**
* initialise carbon data writer instance
*/
private void initDataHandler() throws SecondaryIndexException {
String carbonStoreLocation = CarbonDataProcessorUtil.createCarbonStoreLocation(this.indexTable, segmentId);
CarbonFactDataHandlerModel carbonFactDataHandlerModel = CarbonFactDataHandlerModel.getCarbonFactDataHandlerModel(carbonLoadModel, indexTable, segmentProperties, indexTable.getTableName(), tempStoreLocation, carbonStoreLocation);
carbonFactDataHandlerModel.setSchemaUpdatedTimeStamp(indexTable.getTableLastUpdatedTime());
CarbonDataFileAttributes carbonDataFileAttributes = new CarbonDataFileAttributes(Integer.parseInt(carbonLoadModel.getTaskNo()), carbonLoadModel.getFactTimeStamp());
carbonFactDataHandlerModel.setCarbonDataFileAttributes(carbonDataFileAttributes);
dataHandler = CarbonFactHandlerFactory.createCarbonFactHandler(carbonFactDataHandlerModel);
try {
dataHandler.initialise();
} catch (CarbonDataWriterException e) {
this.sortDataRows.close();
LOGGER.error(e);
throw new SecondaryIndexException("Problem initialising data handler while creating secondary index: " + e.getMessage());
}
}
use of org.apache.spark.sql.secondaryindex.exception.SecondaryIndexException in project carbondata by apache.
the class SecondaryIndexQueryResultProcessor method readAndLoadDataFromSortTempFiles.
/**
* This method will read sort temp files, perform merge sort and add it to store for data loading
*/
private void readAndLoadDataFromSortTempFiles() throws SecondaryIndexException {
Throwable throwable = null;
try {
Object[] previousRow = null;
// comparator for grouping the similar data, means every record
// should be unique in index table
RowComparator comparator = new RowComparator(noDictionaryColMapping, SecondaryIndexUtil.getNoDictDataTypes(indexTable));
intermediateFileMerger.finish();
sortDataRows = null;
finalMerger.startFinalMerge();
while (finalMerger.hasNext()) {
Object[] rowRead = finalMerger.next();
if (null == previousRow) {
previousRow = rowRead;
} else {
int compareResult = comparator.compare(previousRow, rowRead);
if (0 == compareResult) {
// skip similar data rows
continue;
} else {
previousRow = rowRead;
}
}
CarbonRow row = new CarbonRow(rowRead);
dataHandler.addDataToStore(row);
}
dataHandler.finish();
} catch (CarbonDataWriterException e) {
LOGGER.error(e);
throw new SecondaryIndexException("Problem loading data while creating secondary index: ", e);
} catch (CarbonSortKeyAndGroupByException e) {
LOGGER.error(e);
throw new SecondaryIndexException("Problem in merging intermediate files while creating secondary index: ", e);
} catch (Throwable t) {
LOGGER.error(t);
throw new SecondaryIndexException("Problem while creating secondary index: ", t);
} finally {
if (null != dataHandler) {
try {
dataHandler.closeHandler();
} catch (CarbonDataWriterException e) {
LOGGER.error(e);
throwable = e;
}
}
}
if (null != throwable) {
throw new SecondaryIndexException("Problem closing data handler while creating secondary index: ", throwable);
}
dataHandler = null;
}
use of org.apache.spark.sql.secondaryindex.exception.SecondaryIndexException in project carbondata by apache.
the class SecondaryIndexQueryResultProcessor method processResult.
/**
* This method will iterate over the query result and perform row sorting operation
*/
private void processResult(List<CarbonIterator<RowBatch>> detailQueryResultIteratorList) throws SecondaryIndexException {
for (CarbonIterator<RowBatch> detailQueryIterator : detailQueryResultIteratorList) {
DetailQueryResultIterator queryIterator = (DetailQueryResultIterator) detailQueryIterator;
BlockExecutionInfo blockExecutionInfo = queryIterator.getBlockExecutionInfo();
// get complex dimension info map from block execution info
Map<Integer, GenericQueryType> complexDimensionInfoMap = blockExecutionInfo.getComplexDimensionInfoMap();
int[] complexColumnParentBlockIndexes = blockExecutionInfo.getComplexColumnParentBlockIndexes();
while (detailQueryIterator.hasNext()) {
RowBatch batchResult = detailQueryIterator.next();
while (batchResult.hasNext()) {
addRowForSorting(prepareRowObjectForSorting(batchResult.next(), complexDimensionInfoMap, complexColumnParentBlockIndexes));
isRecordFound = true;
}
}
}
try {
sortDataRows.startSorting();
} catch (CarbonSortKeyAndGroupByException e) {
this.sortDataRows.close();
LOGGER.error(e);
throw new SecondaryIndexException("Problem loading data while creating secondary index: " + e.getMessage());
}
}
Aggregations