use of org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector in project hive by apache.
the class VectorPTFEvaluatorDecimalAvg method evaluateGroupBatch.
@Override
public void evaluateGroupBatch(VectorizedRowBatch batch) throws HiveException {
evaluateInputExpr(batch);
// Sum all non-null decimal column values for avg; maintain isGroupResultNull; after last row of
// last group batch compute the group avg when sum is non-null.
// We do not filter when PTF is in reducer.
Preconditions.checkState(!batch.selectedInUse);
final int size = batch.size;
if (size == 0) {
return;
}
DecimalColumnVector decimalColVector = ((DecimalColumnVector) batch.cols[inputColumnNum]);
if (decimalColVector.isRepeating) {
if (decimalColVector.noNulls || !decimalColVector.isNull[0]) {
// We have a repeated value. The sum increases by value * batch.size.
temp.setFromLong(batch.size);
if (isGroupResultNull) {
// First aggregation calculation for group.
sum.set(decimalColVector.vector[0]);
sum.mutateMultiply(temp);
isGroupResultNull = false;
} else {
temp.mutateMultiply(decimalColVector.vector[0]);
sum.mutateAdd(temp);
}
nonNullGroupCount += size;
}
} else if (decimalColVector.noNulls) {
HiveDecimalWritable[] vector = decimalColVector.vector;
if (isGroupResultNull) {
// First aggregation calculation for group.
sum.set(vector[0]);
isGroupResultNull = false;
} else {
sum.mutateAdd(vector[0]);
}
for (int i = 1; i < size; i++) {
sum.mutateAdd(vector[i]);
}
nonNullGroupCount += size;
} else {
boolean[] batchIsNull = decimalColVector.isNull;
int i = 0;
while (batchIsNull[i]) {
if (++i >= size) {
return;
}
}
HiveDecimalWritable[] vector = decimalColVector.vector;
if (isGroupResultNull) {
// First aggregation calculation for group.
sum.set(vector[i++]);
isGroupResultNull = false;
} else {
sum.mutateAdd(vector[i++]);
}
nonNullGroupCount++;
for (; i < size; i++) {
if (!batchIsNull[i]) {
sum.mutateAdd(vector[i]);
nonNullGroupCount++;
}
}
}
}
use of org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector in project hive by apache.
the class VectorPTFEvaluatorDecimalFirstValue method evaluateGroupBatch.
@Override
public void evaluateGroupBatch(VectorizedRowBatch batch) throws HiveException {
evaluateInputExpr(batch);
// First row determines isGroupResultNull and decimal firstValue; stream fill result as repeated.
// We do not filter when PTF is in reducer.
Preconditions.checkState(!batch.selectedInUse);
if (!haveFirstValue) {
final int size = batch.size;
if (size == 0) {
return;
}
DecimalColumnVector decimalColVector = ((DecimalColumnVector) batch.cols[inputColumnNum]);
if (decimalColVector.isRepeating) {
if (decimalColVector.noNulls || !decimalColVector.isNull[0]) {
firstValue.set(decimalColVector.vector[0]);
isGroupResultNull = false;
}
} else if (decimalColVector.noNulls) {
firstValue.set(decimalColVector.vector[0]);
isGroupResultNull = false;
} else {
if (!decimalColVector.isNull[0]) {
firstValue.set(decimalColVector.vector[0]);
isGroupResultNull = false;
}
}
haveFirstValue = true;
}
/*
* Do careful maintenance of the outputColVector.noNulls flag.
*/
// First value is repeated for all batches.
DecimalColumnVector outputColVector = (DecimalColumnVector) batch.cols[outputColumnNum];
outputColVector.isRepeating = true;
if (isGroupResultNull) {
outputColVector.noNulls = false;
outputColVector.isNull[0] = true;
} else {
outputColVector.isNull[0] = false;
outputColVector.set(0, firstValue);
}
}
use of org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector in project hive by apache.
the class VectorPTFEvaluatorDecimalSum method evaluateGroupBatch.
@Override
public void evaluateGroupBatch(VectorizedRowBatch batch) throws HiveException {
evaluateInputExpr(batch);
// Sum all non-null decimal column values; maintain isGroupResultNull.
// We do not filter when PTF is in reducer.
Preconditions.checkState(!batch.selectedInUse);
final int size = batch.size;
if (size == 0) {
return;
}
DecimalColumnVector decimalColVector = ((DecimalColumnVector) batch.cols[inputColumnNum]);
if (decimalColVector.isRepeating) {
if (decimalColVector.noNulls || !decimalColVector.isNull[0]) {
temp.setFromLong(batch.size);
if (isGroupResultNull) {
// First aggregation calculation for group.
sum.set(decimalColVector.vector[0]);
sum.mutateMultiply(temp);
isGroupResultNull = false;
} else {
temp.mutateMultiply(decimalColVector.vector[0]);
sum.mutateAdd(temp);
}
}
} else if (decimalColVector.noNulls) {
HiveDecimalWritable[] vector = decimalColVector.vector;
if (isGroupResultNull) {
// First aggregation calculation for group.
sum.set(vector[0]);
isGroupResultNull = false;
} else {
sum.mutateAdd(vector[0]);
}
for (int i = 1; i < size; i++) {
sum.mutateAdd(vector[i]);
}
} else {
boolean[] batchIsNull = decimalColVector.isNull;
int i = 0;
while (batchIsNull[i]) {
if (++i >= size) {
return;
}
}
HiveDecimalWritable[] vector = decimalColVector.vector;
if (isGroupResultNull) {
// First aggregation calculation for group.
sum.set(vector[i++]);
isGroupResultNull = false;
} else {
sum.mutateAdd(vector[i++]);
}
for (; i < size; i++) {
if (!batchIsNull[i]) {
sum.mutateAdd(vector[i]);
}
}
}
}
use of org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector in project hive by apache.
the class RecordReaderImpl method nextDecimal.
static HiveDecimalWritable nextDecimal(ColumnVector vector, int row, Object previous) {
if (vector.isRepeating) {
row = 0;
}
if (vector.noNulls || !vector.isNull[row]) {
HiveDecimalWritable result;
if (previous == null || previous.getClass() != HiveDecimalWritable.class) {
result = new HiveDecimalWritable();
} else {
result = (HiveDecimalWritable) previous;
}
if (vector instanceof Decimal64ColumnVector) {
long value = ((Decimal64ColumnVector) vector).vector[row];
result.deserialize64(value, ((Decimal64ColumnVector) vector).scale);
} else {
result.set(((DecimalColumnVector) vector).vector[row]);
}
return result;
} else {
return null;
}
}
use of org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector in project hive by apache.
the class TestVectorTypeCasts method getBatchDecimalString.
private VectorizedRowBatch getBatchDecimalString() {
VectorizedRowBatch b = new VectorizedRowBatch(2);
DecimalColumnVector dv;
short scale = 2;
b.cols[0] = dv = new DecimalColumnVector(18, scale);
b.cols[1] = new BytesColumnVector();
b.size = 3;
dv.vector[0].set(HiveDecimal.create("1.1"));
dv.vector[1].set(HiveDecimal.create("-2.2"));
dv.vector[2].set(HiveDecimal.create("9999999999999999.00"));
return b;
}
Aggregations