use of org.apache.carbondata.core.scan.result.iterator.RawResultIterator in project carbondata by apache.
the class CompactionResultSortProcessor method processResult.
/**
* This method will iterate over the query result and perform row sorting operation
*
* @param resultIteratorList
*/
private void processResult(List<RawResultIterator> resultIteratorList) throws Exception {
for (RawResultIterator resultIterator : resultIteratorList) {
while (resultIterator.hasNext()) {
addRowForSorting(prepareRowObjectForSorting(resultIterator.next()));
isRecordFound = true;
}
}
try {
sortDataRows.startSorting();
} catch (CarbonSortKeyAndGroupByException e) {
LOGGER.error(e);
throw new Exception("Problem loading data during compaction: " + e.getMessage());
}
}
use of org.apache.carbondata.core.scan.result.iterator.RawResultIterator in project carbondata by apache.
the class RowResultMergerProcessor method execute.
/**
* Merge function
*
*/
public boolean execute(List<RawResultIterator> resultIteratorList) {
initRecordHolderHeap(resultIteratorList);
boolean mergeStatus = false;
int index = 0;
boolean isDataPresent = false;
try {
// add all iterators to the queue
for (RawResultIterator leaftTupleIterator : resultIteratorList) {
this.recordHolderHeap.add(leaftTupleIterator);
index++;
}
RawResultIterator iterator = null;
while (index > 1) {
// iterator the top record
iterator = this.recordHolderHeap.poll();
Object[] convertedRow = iterator.next();
if (null == convertedRow) {
index--;
continue;
}
if (!isDataPresent) {
dataHandler.initialise();
isDataPresent = true;
}
// get the mdkey
addRow(convertedRow);
// index
if (!iterator.hasNext()) {
index--;
continue;
}
// add record to heap
this.recordHolderHeap.add(iterator);
}
// if record holder is not empty then iterator the slice holder from
// heap
iterator = this.recordHolderHeap.poll();
while (true) {
Object[] convertedRow = iterator.next();
if (null == convertedRow) {
break;
}
// do it only once
if (!isDataPresent) {
dataHandler.initialise();
isDataPresent = true;
}
addRow(convertedRow);
// check if leaf contains no record
if (!iterator.hasNext()) {
break;
}
}
if (isDataPresent) {
this.dataHandler.finish();
}
mergeStatus = true;
} catch (Exception e) {
LOGGER.error(e, e.getMessage());
LOGGER.error("Exception in compaction merger " + e.getMessage());
mergeStatus = false;
} finally {
try {
if (isDataPresent) {
this.dataHandler.closeHandler();
}
} catch (CarbonDataWriterException e) {
LOGGER.error("Exception while closing the handler in compaction merger " + e.getMessage());
mergeStatus = false;
}
}
return mergeStatus;
}
use of org.apache.carbondata.core.scan.result.iterator.RawResultIterator in project carbondata by apache.
the class CarbonCompactionExecutor method processTableBlocks.
/**
* For processing of the table blocks.
*
* @return List of Carbon iterators
*/
public List<RawResultIterator> processTableBlocks() throws QueryExecutionException, IOException {
List<RawResultIterator> resultList = new ArrayList<>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
List<TableBlockInfo> list = null;
queryModel = prepareQueryModel(list);
// iterate each seg ID
for (Map.Entry<String, TaskBlockInfo> taskMap : segmentMapping.entrySet()) {
String segmentId = taskMap.getKey();
List<DataFileFooter> listMetadata = dataFileMetadataSegMapping.get(segmentId);
SegmentProperties sourceSegProperties = getSourceSegmentProperties(listMetadata);
// for each segment get taskblock info
TaskBlockInfo taskBlockInfo = taskMap.getValue();
Set<String> taskBlockListMapping = taskBlockInfo.getTaskSet();
for (String task : taskBlockListMapping) {
list = taskBlockInfo.getTableBlockInfoList(task);
Collections.sort(list);
LOGGER.info("for task -" + task + "-block size is -" + list.size());
queryModel.setTableBlockInfos(list);
resultList.add(new RawResultIterator(executeBlockList(list), sourceSegProperties, destinationSegProperties));
}
}
return resultList;
}
Aggregations