use of org.apache.carbondata.processing.sortandgroupby.exception.CarbonSortKeyAndGroupByException in project carbondata by apache.
the class SingleThreadFinalSortFilesMerger method getSortedRecordFromFile.
/**
* This method will be used to get the sorted record from file
*
* @return sorted record sorted record
* @throws CarbonSortKeyAndGroupByException
*/
private Object[] getSortedRecordFromFile() throws CarbonDataWriterException {
Object[] row = null;
// poll the top object from heap
// heap maintains binary tree which is based on heap condition that will
// be based on comparator we are passing the heap
// when will call poll it will always delete root of the tree and then
// it does trickel down operation complexity is log(n)
SortTempFileChunkHolder poll = this.recordHolderHeapLocal.poll();
// get the row from chunk
row = poll.getRow();
// check if there no entry present
if (!poll.hasNext()) {
// if chunk is empty then close the stream
poll.closeStream();
// change the file counter
--this.fileCounter;
// reaturn row
return row;
}
// read new row
try {
poll.readRow();
} catch (CarbonSortKeyAndGroupByException e) {
throw new CarbonDataWriterException(e.getMessage(), e);
}
// add to heap
this.recordHolderHeapLocal.add(poll);
// return row
return row;
}
use of org.apache.carbondata.processing.sortandgroupby.exception.CarbonSortKeyAndGroupByException in project carbondata by apache.
the class SortTempFileChunkHolder method initialise.
private void initialise() throws CarbonSortKeyAndGroupByException {
try {
if (isSortTempFileCompressionEnabled) {
this.bufferSize = sortTempFileNoOFRecordsInCompression;
}
stream = new DataInputStream(new BufferedInputStream(new FileInputStream(tempFile), this.fileBufferSize));
this.entryCount = stream.readInt();
if (prefetch) {
new DataFetcher(false).call();
totalRecordFetch += currentBuffer.length;
if (totalRecordFetch < this.entryCount) {
submit = executorService.submit(new DataFetcher(true));
}
} else {
if (isSortTempFileCompressionEnabled) {
new DataFetcher(false).call();
}
}
} catch (FileNotFoundException e) {
LOGGER.error(e);
throw new CarbonSortKeyAndGroupByException(tempFile + " No Found", e);
} catch (IOException e) {
LOGGER.error(e);
throw new CarbonSortKeyAndGroupByException(tempFile + " No Found", e);
} catch (Exception e) {
LOGGER.error(e);
throw new CarbonSortKeyAndGroupByException(tempFile + " Problem while reading", e);
}
}
use of org.apache.carbondata.processing.sortandgroupby.exception.CarbonSortKeyAndGroupByException in project carbondata by apache.
the class SortTempFileChunkHolder method getRowFromStream.
/**
* Reads row from file
* @return Object[]
* @throws CarbonSortKeyAndGroupByException
*/
private Object[] getRowFromStream() throws CarbonSortKeyAndGroupByException {
// create new row of size 3 (1 for dims , 1 for high card , 1 for measures)
Object[] holder = new Object[3];
int index = 0;
int nonDicIndex = 0;
int[] dim = new int[this.dimensionCount];
byte[][] nonDicArray = new byte[this.noDictionaryCount + this.complexDimensionCount][];
Object[] measures = new Object[this.measureCount];
try {
// read dimension values
for (int i = 0; i < isNoDictionaryDimensionColumn.length; i++) {
if (isNoDictionaryDimensionColumn[i]) {
short len = stream.readShort();
byte[] array = new byte[len];
stream.readFully(array);
nonDicArray[nonDicIndex++] = array;
} else {
dim[index++] = stream.readInt();
}
}
for (int i = 0; i < complexDimensionCount; i++) {
short len = stream.readShort();
byte[] array = new byte[len];
stream.readFully(array);
nonDicArray[nonDicIndex++] = array;
}
index = 0;
// read measure values
for (int i = 0; i < this.measureCount; i++) {
if (stream.readByte() == 1) {
switch(aggType[i]) {
case SHORT:
measures[index++] = stream.readShort();
break;
case INT:
measures[index++] = stream.readInt();
break;
case LONG:
measures[index++] = stream.readLong();
break;
case DOUBLE:
measures[index++] = stream.readDouble();
break;
case DECIMAL:
int len = stream.readInt();
byte[] buff = new byte[len];
stream.readFully(buff);
measures[index++] = buff;
break;
}
} else {
measures[index++] = null;
}
}
NonDictionaryUtil.prepareOutObj(holder, dim, nonDicArray, measures);
// increment number if record read
this.numberOfObjectRead++;
} catch (IOException e) {
LOGGER.error("Problme while reading the madkey fom sort temp file");
throw new CarbonSortKeyAndGroupByException("Problem while reading the sort temp file ", e);
}
//return out row
return holder;
}
use of org.apache.carbondata.processing.sortandgroupby.exception.CarbonSortKeyAndGroupByException in project carbondata by apache.
the class CompactionResultSortProcessor method processResult.
/**
* This method will iterate over the query result and perform row sorting operation
*
* @param resultIteratorList
*/
private void processResult(List<RawResultIterator> resultIteratorList) throws Exception {
for (RawResultIterator resultIterator : resultIteratorList) {
while (resultIterator.hasNext()) {
addRowForSorting(prepareRowObjectForSorting(resultIterator.next()));
isRecordFound = true;
}
}
try {
sortDataRows.startSorting();
} catch (CarbonSortKeyAndGroupByException e) {
LOGGER.error(e);
throw new Exception("Problem loading data during compaction: " + e.getMessage());
}
}
use of org.apache.carbondata.processing.sortandgroupby.exception.CarbonSortKeyAndGroupByException in project carbondata by apache.
the class CompactionResultSortProcessor method initSortDataRows.
/**
* create an instance of sort data rows
*/
private void initSortDataRows() throws Exception {
measureCount = carbonTable.getMeasureByTableName(tableName).size();
List<CarbonDimension> dimensions = carbonTable.getDimensionByTableName(tableName);
noDictionaryColMapping = new boolean[dimensions.size()];
int i = 0;
for (CarbonDimension dimension : dimensions) {
if (CarbonUtil.hasEncoding(dimension.getEncoder(), Encoding.DICTIONARY)) {
i++;
continue;
}
noDictionaryColMapping[i++] = true;
noDictionaryCount++;
}
dimensionColumnCount = dimensions.size();
SortParameters parameters = createSortParameters();
intermediateFileMerger = new SortIntermediateFileMerger(parameters);
// TODO: Now it is only supported onheap merge, but we can have unsafe merge
// as well by using UnsafeSortDataRows.
this.sortDataRows = new SortDataRows(parameters, intermediateFileMerger);
try {
this.sortDataRows.initialize();
} catch (CarbonSortKeyAndGroupByException e) {
LOGGER.error(e);
throw new Exception("Error initializing sort data rows object during compaction: " + e.getMessage());
}
}
Aggregations