use of org.apache.carbondata.core.memory.MemoryException in project carbondata by apache.
the class MinMaxIndexDataMapFactory method getDataMaps.
/**
* getDataMaps Factory method Initializes the Min Max Data Map and returns.
*
* @param segment
* @return
* @throws IOException
*/
@Override
public List<CoarseGrainDataMap> getDataMaps(Segment segment) throws IOException {
List<CoarseGrainDataMap> dataMapList = new ArrayList<>();
// Form a dataMap of Type MinMaxIndexDataMap.
MinMaxIndexDataMap dataMap = new MinMaxIndexDataMap();
try {
dataMap.init(new DataMapModel(CarbonTablePath.getSegmentPath(identifier.getTablePath(), segment.getSegmentNo())));
} catch (MemoryException ex) {
throw new IOException(ex);
}
dataMapList.add(dataMap);
return dataMapList;
}
use of org.apache.carbondata.core.memory.MemoryException in project carbondata by apache.
the class UnsafeParallelReadMergeSorterImpl method sort.
@Override
public Iterator<CarbonRowBatch>[] sort(Iterator<CarbonRowBatch>[] iterators) throws CarbonDataLoadingException {
int inMemoryChunkSizeInMB = CarbonProperties.getInstance().getSortMemoryChunkSizeInMB();
UnsafeSortDataRows sortDataRow = new UnsafeSortDataRows(sortParameters, unsafeIntermediateFileMerger, inMemoryChunkSizeInMB);
final int batchSize = CarbonProperties.getInstance().getBatchSize();
try {
sortDataRow.initialize();
} catch (MemoryException e) {
throw new CarbonDataLoadingException(e);
}
this.executorService = Executors.newFixedThreadPool(iterators.length, new CarbonThreadFactory("UnsafeParallelSorterPool:" + sortParameters.getTableName()));
this.threadStatusObserver = new ThreadStatusObserver(executorService);
try {
for (int i = 0; i < iterators.length; i++) {
executorService.execute(new SortIteratorThread(iterators[i], sortDataRow, batchSize, rowCounter, this.threadStatusObserver));
}
executorService.shutdown();
executorService.awaitTermination(2, TimeUnit.DAYS);
processRowToNextStep(sortDataRow, sortParameters);
} catch (Exception e) {
checkError();
throw new CarbonDataLoadingException("Problem while shutdown the server ", e);
}
checkError();
try {
unsafeIntermediateFileMerger.finish();
List<UnsafeCarbonRowPage> rowPages = unsafeIntermediateFileMerger.getRowPages();
finalMerger.startFinalMerge(rowPages.toArray(new UnsafeCarbonRowPage[rowPages.size()]), unsafeIntermediateFileMerger.getMergedPages());
} catch (CarbonDataWriterException e) {
throw new CarbonDataLoadingException(e);
} catch (CarbonSortKeyAndGroupByException e) {
throw new CarbonDataLoadingException(e);
}
// Creates the iterator to read from merge sorter.
Iterator<CarbonRowBatch> batchIterator = new CarbonIterator<CarbonRowBatch>() {
@Override
public boolean hasNext() {
return finalMerger.hasNext();
}
@Override
public CarbonRowBatch next() {
int counter = 0;
CarbonRowBatch rowBatch = new CarbonRowBatch(batchSize);
while (finalMerger.hasNext() && counter < batchSize) {
rowBatch.addRow(new CarbonRow(finalMerger.next()));
counter++;
}
return rowBatch;
}
};
return new Iterator[] { batchIterator };
}
use of org.apache.carbondata.core.memory.MemoryException in project carbondata by apache.
the class UnsafeParallelReadMergeSorterWithColumnRangeImpl method sort.
@Override
public Iterator<CarbonRowBatch>[] sort(Iterator<CarbonRowBatch>[] iterators) throws CarbonDataLoadingException {
UnsafeSortDataRows[] sortDataRows = new UnsafeSortDataRows[columnRangeInfo.getNumOfRanges()];
intermediateFileMergers = new UnsafeIntermediateMerger[columnRangeInfo.getNumOfRanges()];
SortParameters[] sortParameterArray = new SortParameters[columnRangeInfo.getNumOfRanges()];
try {
for (int i = 0; i < columnRangeInfo.getNumOfRanges(); i++) {
SortParameters parameters = originSortParameters.getCopy();
parameters.setPartitionID(i + "");
parameters.setRangeId(i);
sortParameterArray[i] = parameters;
setTempLocation(parameters);
intermediateFileMergers[i] = new UnsafeIntermediateMerger(parameters);
sortDataRows[i] = new UnsafeSortDataRows(parameters, intermediateFileMergers[i], inMemoryChunkSizeInMB);
sortDataRows[i].initialize();
}
} catch (MemoryException e) {
throw new CarbonDataLoadingException(e);
}
ExecutorService executorService = Executors.newFixedThreadPool(iterators.length);
this.threadStatusObserver = new ThreadStatusObserver(executorService);
final int batchSize = CarbonProperties.getInstance().getBatchSize();
try {
for (int i = 0; i < iterators.length; i++) {
executorService.execute(new SortIteratorThread(iterators[i], sortDataRows, rowCounter, this.insideRowCounterList, this.threadStatusObserver));
}
executorService.shutdown();
executorService.awaitTermination(2, TimeUnit.DAYS);
processRowToNextStep(sortDataRows, originSortParameters);
} catch (Exception e) {
checkError();
throw new CarbonDataLoadingException("Problem while shutdown the server ", e);
}
checkError();
try {
for (int i = 0; i < intermediateFileMergers.length; i++) {
intermediateFileMergers[i].finish();
}
} catch (Exception e) {
throw new CarbonDataLoadingException(e);
}
Iterator<CarbonRowBatch>[] batchIterator = new Iterator[columnRangeInfo.getNumOfRanges()];
for (int i = 0; i < sortDataRows.length; i++) {
batchIterator[i] = new MergedDataIterator(sortParameterArray[i], batchSize, intermediateFileMergers[i]);
}
return batchIterator;
}
use of org.apache.carbondata.core.memory.MemoryException in project carbondata by apache.
the class BlockletDataMapIndexStore method get.
@Override
public BlockletDataMap get(TableBlockIndexUniqueIdentifier identifier) throws IOException {
String lruCacheKey = identifier.getUniqueTableSegmentIdentifier();
BlockletDataMap dataMap = (BlockletDataMap) lruCache.get(lruCacheKey);
if (dataMap == null) {
try {
SegmentIndexFileStore indexFileStore = new SegmentIndexFileStore();
Set<String> filesRead = new HashSet<>();
Map<String, BlockMetaInfo> blockMetaInfoMap = getBlockMetaInfoMap(identifier, indexFileStore, filesRead);
dataMap = loadAndGetDataMap(identifier, indexFileStore, blockMetaInfoMap);
} catch (MemoryException e) {
LOGGER.error("memory exception when loading datamap: " + e.getMessage());
throw new RuntimeException(e.getMessage(), e);
}
}
return dataMap;
}
use of org.apache.carbondata.core.memory.MemoryException in project carbondata by apache.
the class LuceneCoarseGrainDataMapFactory method getDataMaps.
/**
* Get the datamap for segmentid
*/
@Override
public List<CoarseGrainDataMap> getDataMaps(Segment segment) throws IOException {
List<CoarseGrainDataMap> lstDataMap = new ArrayList<>();
CoarseGrainDataMap dataMap = new LuceneCoarseGrainDataMap(analyzer);
try {
dataMap.init(new DataMapModel(LuceneDataMapWriter.genDataMapStorePath(tableIdentifier.getTablePath(), segment.getSegmentNo(), dataMapName)));
} catch (MemoryException e) {
LOGGER.error("failed to get lucene datamap , detail is {}" + e.getMessage());
return lstDataMap;
}
lstDataMap.add(dataMap);
return lstDataMap;
}
Aggregations