use of org.apache.carbondata.core.metadata.datatype.DataType in project carbondata by apache.
the class IntermediateFileMerger method writeDataTofile.
/**
* Below method will be used to write data to file
*
* @throws CarbonSortKeyAndGroupByException problem while writing
*/
private void writeDataTofile(Object[] row) throws CarbonSortKeyAndGroupByException {
if (mergerParameters.isSortFileCompressionEnabled() || mergerParameters.isPrefetch()) {
if (entryCount == 0) {
records = new Object[totalSize][];
records[entryCount++] = row;
return;
}
records[entryCount++] = row;
if (entryCount == totalSize) {
this.writer.writeSortTempFile(records);
entryCount = 0;
records = new Object[totalSize][];
}
return;
}
try {
DataType[] aggType = mergerParameters.getMeasureDataType();
int[] mdkArray = (int[]) row[0];
byte[][] nonDictArray = (byte[][]) row[1];
int mdkIndex = 0;
int nonDictKeyIndex = 0;
// write dictionary and non dictionary dimensions here.
for (boolean nodictinary : noDictionarycolumnMapping) {
if (nodictinary) {
byte[] col = nonDictArray[nonDictKeyIndex++];
stream.writeShort(col.length);
stream.write(col);
} else {
stream.writeInt(mdkArray[mdkIndex++]);
}
}
int fieldIndex = 0;
for (int counter = 0; counter < mergerParameters.getMeasureColCount(); counter++) {
if (null != NonDictionaryUtil.getMeasure(fieldIndex, row)) {
stream.write((byte) 1);
switch(aggType[counter]) {
case SHORT:
case INT:
case LONG:
Long val = (Long) NonDictionaryUtil.getMeasure(fieldIndex, row);
stream.writeLong(val);
break;
case DOUBLE:
stream.writeDouble((Double) NonDictionaryUtil.getMeasure(fieldIndex, row));
break;
case DECIMAL:
byte[] bigDecimalInBytes = (byte[]) NonDictionaryUtil.getMeasure(fieldIndex, row);
stream.writeInt(bigDecimalInBytes.length);
stream.write(bigDecimalInBytes);
break;
}
} else {
stream.write((byte) 0);
}
fieldIndex++;
}
} catch (IOException e) {
throw new CarbonSortKeyAndGroupByException("Problem while writing the file", e);
}
}
use of org.apache.carbondata.core.metadata.datatype.DataType in project carbondata by apache.
the class SortDataRows method writeData.
private void writeData(Object[][] recordHolderList, int entryCountLocal, File file) throws CarbonSortKeyAndGroupByException {
DataOutputStream stream = null;
try {
// open stream
stream = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(file), parameters.getFileWriteBufferSize()));
// write number of entries to the file
stream.writeInt(entryCountLocal);
int complexDimColCount = parameters.getComplexDimColCount();
int dimColCount = parameters.getDimColCount() + complexDimColCount;
DataType[] type = parameters.getMeasureDataType();
boolean[] noDictionaryDimnesionMapping = parameters.getNoDictionaryDimnesionColumn();
Object[] row = null;
for (int i = 0; i < entryCountLocal; i++) {
// get row from record holder list
row = recordHolderList[i];
int dimCount = 0;
// write dictionary and non dictionary dimensions here.
for (; dimCount < noDictionaryDimnesionMapping.length; dimCount++) {
if (noDictionaryDimnesionMapping[dimCount]) {
byte[] col = (byte[]) row[dimCount];
stream.writeShort(col.length);
stream.write(col);
} else {
stream.writeInt((int) row[dimCount]);
}
}
// write complex dimensions here.
for (; dimCount < dimColCount; dimCount++) {
byte[] value = (byte[]) row[dimCount];
stream.writeShort(value.length);
stream.write(value);
}
// as measures are stored in separate array.
for (int mesCount = 0; mesCount < parameters.getMeasureColCount(); mesCount++) {
Object value = row[mesCount + dimColCount];
if (null != value) {
stream.write((byte) 1);
switch(type[mesCount]) {
case SHORT:
stream.writeShort((Short) value);
break;
case INT:
stream.writeInt((Integer) value);
break;
case LONG:
stream.writeLong((Long) value);
break;
case DOUBLE:
stream.writeDouble((Double) value);
break;
case DECIMAL:
BigDecimal val = (BigDecimal) value;
byte[] bigDecimalInBytes = DataTypeUtil.bigDecimalToByte(val);
stream.writeInt(bigDecimalInBytes.length);
stream.write(bigDecimalInBytes);
break;
}
} else {
stream.write((byte) 0);
}
}
}
} catch (IOException e) {
throw new CarbonSortKeyAndGroupByException("Problem while writing the file", e);
} finally {
// close streams
CarbonUtil.closeStreams(stream);
}
}
Aggregations