use of org.apache.carbondata.processing.loading.row.IntermediateSortTempRow in project carbondata by apache.
the class IntermediateSortTempRowComparatorTest method compareboolean.
@Test
public void compareboolean() {
DataType[] noDicDataTypes = { DataTypes.BOOLEAN };
boolean[] noDicSortColumnMapping = { true };
new_comparator = new IntermediateSortTempRowComparator(noDicSortColumnMapping, noDicDataTypes);
int[] dictSortDims1 = { 1, 2, 3 };
Object[] noDictSortDims1 = { true };
byte[] noSortDimsAndMeasures1 = { 1, 2, 3 };
IntermediateSortTempRow a1 = new IntermediateSortTempRow(dictSortDims1, noDictSortDims1, noSortDimsAndMeasures1);
int[] dictSortDims = { 1, 2, 3 };
Object[] noDictSortDims = { false };
byte[] noSortDimsAndMeasures = { 1, 2, 3 };
IntermediateSortTempRow a = new IntermediateSortTempRow(dictSortDims, noDictSortDims, noSortDimsAndMeasures);
int res = new_comparator.compare(a1, a);
Assert.assertTrue(res > 0);
}
use of org.apache.carbondata.processing.loading.row.IntermediateSortTempRow in project carbondata by apache.
the class IntermediateSortTempRowComparatorTest method compareintreverse.
@Test
public void compareintreverse() {
DataType[] noDicDataTypes = { DataTypes.INT };
boolean[] noDicSortColumnMapping = { true };
new_comparator = new IntermediateSortTempRowComparator(noDicSortColumnMapping, noDicDataTypes);
int[] dictSortDims1 = { 1, 2, 3 };
Object[] noDictSortDims1 = { 1, 2, 3 };
byte[] noSortDimsAndMeasures1 = { 1, 2, 3 };
IntermediateSortTempRow a1 = new IntermediateSortTempRow(dictSortDims1, noDictSortDims1, noSortDimsAndMeasures1);
int[] dictSortDims = { 1, 2, 3 };
Object[] noDictSortDims = { 4, 5, 6 };
byte[] noSortDimsAndMeasures = { 1, 2, 3 };
IntermediateSortTempRow a = new IntermediateSortTempRow(dictSortDims, noDictSortDims, noSortDimsAndMeasures);
int res = new_comparator.compare(a1, a);
Assert.assertTrue(res < 0);
}
use of org.apache.carbondata.processing.loading.row.IntermediateSortTempRow in project carbondata by apache.
the class InMemorySortTempChunkHolder method readRow.
/**
* 1. Read row from RawResultIterator'
* 2. Convert it to IntermediateSortTempRow
* 3. Store it in memory to read through getRow() method
*/
public void readRow() {
Object[] row = this.rawResultIterator.next();
// TODO add code to get directly Object[] Instead Of CarbonRow Object
CarbonRow carbonRow = WriteStepRowUtil.fromMergerRow(row, segmentProperties, noDicAndComplexColumns);
Object[] data = carbonRow.getData();
Object[] measuresValue = (Object[]) data[WriteStepRowUtil.MEASURE];
for (int i = 0; i < measuresValue.length; i++) {
measuresValue[i] = getConvertedMeasureValue(measuresValue[i], measureDataType[i]);
}
returnRow = new IntermediateSortTempRow((int[]) data[WriteStepRowUtil.DICTIONARY_DIMENSION], (Object[]) data[WriteStepRowUtil.NO_DICTIONARY_AND_COMPLEX], measuresValue);
}
use of org.apache.carbondata.processing.loading.row.IntermediateSortTempRow in project carbondata by apache.
the class SingleThreadFinalSortFilesMerger method getSortedRecordFromFile.
/**
* This method will be used to get the sorted record from file
*
* @return sorted record sorted record
* @throws CarbonSortKeyAndGroupByException
*/
private IntermediateSortTempRow getSortedRecordFromFile() throws CarbonDataWriterException {
IntermediateSortTempRow row = null;
// poll the top object from heap
// heap maintains binary tree which is based on heap condition that will
// be based on comparator we are passing the heap
// when will call poll it will always delete root of the tree and then
// it does trickel down operation complexity is log(n)
SortTempFileChunkHolder poll = this.recordHolderHeapLocal.peek();
// get the row from chunk
row = poll.getRow();
// check if there no entry present
if (!poll.hasNext()) {
// if chunk is empty then close the stream
poll.closeStream();
this.recordHolderHeapLocal.poll();
// return row
return row;
}
// read new row
try {
poll.readRow();
} catch (CarbonSortKeyAndGroupByException e) {
close();
throw new CarbonDataWriterException(e);
}
// maintain heap
this.recordHolderHeapLocal.siftTopDown();
// return row
return row;
}
use of org.apache.carbondata.processing.loading.row.IntermediateSortTempRow in project carbondata by apache.
the class SortTempFileChunkHolder method readRow.
/**
* This method will be used to read new row from file
*
* @throws CarbonSortKeyAndGroupByException problem while reading
*/
public void readRow() throws CarbonSortKeyAndGroupByException {
if (prefetch) {
fillDataForPrefetch();
} else {
try {
if (convertToActualField) {
IntermediateSortTempRow intermediateSortTempRow = sortStepRowHandler.readWithNoSortFieldConvert(stream);
this.sortTempRowUpdater.updateSortTempRow(intermediateSortTempRow);
this.returnRow = intermediateSortTempRow;
} else {
this.returnRow = sortStepRowHandler.readWithoutNoSortFieldConvert(stream);
}
this.numberOfObjectRead++;
} catch (IOException e) {
throw new CarbonSortKeyAndGroupByException("Problem while reading rows", e);
}
}
}
Aggregations