use of org.apache.spark.sql.types.LongType in project carbondata by apache.
the class SafeVariableLengthDimensionDataChunkStore method fillRow.
@Override
public void fillRow(int rowId, CarbonColumnVector vector, int vectorRow) {
// if column was explicitly sorted we need to get the rowid based inverted index reverse
if (isExplictSorted) {
rowId = invertedIndexReverse[rowId];
}
// now to get the row from memory block we need to do following thing
// 1. first get the current offset
// 2. if it's not a last row- get the next row offset
// Subtract the current row offset + 2 bytes(to skip the data length) with next row offset
// else subtract the current row offset with complete data
// length get the offset of set of data
int currentDataOffset = dataOffsets[rowId];
short length = 0;
// calculating the length of data
if (rowId < numberOfRows - 1) {
length = (short) (dataOffsets[rowId + 1] - (currentDataOffset + CarbonCommonConstants.SHORT_SIZE_IN_BYTE));
} else {
// for last record
length = (short) (this.data.length - currentDataOffset);
}
if (ByteUtil.UnsafeComparer.INSTANCE.equals(CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY, 0, CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY.length, data, currentDataOffset, length)) {
vector.putNull(vectorRow);
} else {
DataType dt = vector.getType();
if (dt instanceof StringType) {
vector.putBytes(vectorRow, currentDataOffset, length, data);
} else if (dt instanceof BooleanType) {
vector.putBoolean(vectorRow, ByteUtil.toBoolean(data[currentDataOffset]));
} else if (dt instanceof ShortType) {
vector.putShort(vectorRow, ByteUtil.toShort(data, currentDataOffset, length));
} else if (dt instanceof IntegerType) {
vector.putInt(vectorRow, ByteUtil.toInt(data, currentDataOffset, length));
} else if (dt instanceof FloatType) {
vector.putFloat(vectorRow, ByteUtil.toFloat(data, currentDataOffset));
} else if (dt instanceof DoubleType) {
vector.putDouble(vectorRow, ByteUtil.toDouble(data, currentDataOffset));
} else if (dt instanceof LongType) {
vector.putLong(vectorRow, ByteUtil.toLong(data, currentDataOffset, length));
} else if (dt instanceof DecimalType) {
vector.putDecimal(vectorRow, Decimal.apply(ByteUtil.toBigDecimal(data, currentDataOffset, length)), DecimalType.MAX_PRECISION());
}
}
}
Aggregations