use of org.apache.carbondata.processing.sort.exception.CarbonSortKeyAndGroupByException in project carbondata by apache.
the class SortIntermediateFileMerger method finish.
public void finish() throws CarbonSortKeyAndGroupByException {
try {
executorService.shutdown();
executorService.awaitTermination(2, TimeUnit.DAYS);
} catch (InterruptedException e) {
throw new CarbonSortKeyAndGroupByException("Problem while shutdown the server ", e);
}
procFiles.clear();
procFiles = null;
checkForFailure();
}
use of org.apache.carbondata.processing.sort.exception.CarbonSortKeyAndGroupByException in project carbondata by apache.
the class UnsafeParallelReadMergeSorterImpl method sort.
@Override
public Iterator<CarbonRowBatch>[] sort(Iterator<CarbonRowBatch>[] iterators) throws CarbonDataLoadingException {
int inMemoryChunkSizeInMB = CarbonProperties.getInstance().getSortMemoryChunkSizeInMB();
UnsafeSortDataRows sortDataRow = new UnsafeSortDataRows(sortParameters, unsafeIntermediateFileMerger, inMemoryChunkSizeInMB);
final int batchSize = CarbonProperties.getInstance().getBatchSize();
try {
sortDataRow.initialize();
} catch (MemoryException e) {
throw new CarbonDataLoadingException(e);
}
this.executorService = Executors.newFixedThreadPool(iterators.length, new CarbonThreadFactory("UnsafeParallelSorterPool:" + sortParameters.getTableName()));
this.threadStatusObserver = new ThreadStatusObserver(executorService);
try {
for (int i = 0; i < iterators.length; i++) {
executorService.execute(new SortIteratorThread(iterators[i], sortDataRow, batchSize, rowCounter, this.threadStatusObserver));
}
executorService.shutdown();
executorService.awaitTermination(2, TimeUnit.DAYS);
processRowToNextStep(sortDataRow, sortParameters);
} catch (Exception e) {
checkError();
throw new CarbonDataLoadingException("Problem while shutdown the server ", e);
}
checkError();
try {
unsafeIntermediateFileMerger.finish();
List<UnsafeCarbonRowPage> rowPages = unsafeIntermediateFileMerger.getRowPages();
finalMerger.startFinalMerge(rowPages.toArray(new UnsafeCarbonRowPage[rowPages.size()]), unsafeIntermediateFileMerger.getMergedPages());
} catch (CarbonDataWriterException e) {
throw new CarbonDataLoadingException(e);
} catch (CarbonSortKeyAndGroupByException e) {
throw new CarbonDataLoadingException(e);
}
// Creates the iterator to read from merge sorter.
Iterator<CarbonRowBatch> batchIterator = new CarbonIterator<CarbonRowBatch>() {
@Override
public boolean hasNext() {
return finalMerger.hasNext();
}
@Override
public CarbonRowBatch next() {
int counter = 0;
CarbonRowBatch rowBatch = new CarbonRowBatch(batchSize);
while (finalMerger.hasNext() && counter < batchSize) {
rowBatch.addRow(new CarbonRow(finalMerger.next()));
counter++;
}
return rowBatch;
}
};
return new Iterator[] { batchIterator };
}
use of org.apache.carbondata.processing.sort.exception.CarbonSortKeyAndGroupByException in project carbondata by apache.
the class UnsafeSortDataRows method writeDataToFile.
/**
* write a page to sort temp file
* @param rowPage page
* @param file file
* @throws CarbonSortKeyAndGroupByException
*/
private void writeDataToFile(UnsafeCarbonRowPage rowPage, File file) throws CarbonSortKeyAndGroupByException {
DataOutputStream stream = null;
try {
// open stream
stream = FileFactory.getDataOutputStream(file.getPath(), FileFactory.FileType.LOCAL, parameters.getFileWriteBufferSize(), parameters.getSortTempCompressorName());
int actualSize = rowPage.getBuffer().getActualSize();
// write number of entries to the file
stream.writeInt(actualSize);
for (int i = 0; i < actualSize; i++) {
rowPage.writeRow(rowPage.getBuffer().get(i) + rowPage.getDataBlock().getBaseOffset(), stream);
}
} catch (IOException e) {
throw new CarbonSortKeyAndGroupByException("Problem while writing the file", e);
} finally {
// close streams
CarbonUtil.closeStreams(stream);
}
}
use of org.apache.carbondata.processing.sort.exception.CarbonSortKeyAndGroupByException in project carbondata by apache.
the class UnsafeSortTempFileChunkHolder method readRow.
/**
* This method will be used to read new row from file
*
* @throws CarbonSortKeyAndGroupByException problem while reading
*/
@Override
public void readRow() throws CarbonSortKeyAndGroupByException {
if (prefetch) {
fillDataForPrefetch();
} else {
try {
this.returnRow = sortStepRowHandler.readIntermediateSortTempRowFromInputStream(stream);
this.numberOfObjectRead++;
} catch (IOException e) {
throw new CarbonSortKeyAndGroupByException("Problems while reading row", e);
}
}
}
use of org.apache.carbondata.processing.sort.exception.CarbonSortKeyAndGroupByException in project carbondata by apache.
the class UnsafeIntermediateFileMerger method initialize.
/**
* This method is responsible for initializing the out stream
*
* @throws CarbonSortKeyAndGroupByException
*/
private void initialize() throws CarbonSortKeyAndGroupByException {
try {
stream = FileFactory.getDataOutputStream(outPutFile.getPath(), FileFactory.FileType.LOCAL, writeBufferSize, compressorName);
this.stream.writeInt(this.totalNumberOfRecords);
} catch (FileNotFoundException e) {
throw new CarbonSortKeyAndGroupByException("Problem while getting the file", e);
} catch (IOException e) {
throw new CarbonSortKeyAndGroupByException("Problem while writing the data to file", e);
}
}
Aggregations