use of org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector in project hive by apache.
the class VectorPTFEvaluatorDoubleLastValue method evaluateGroupBatch.
public void evaluateGroupBatch(VectorizedRowBatch batch, boolean isLastGroupBatch) {
evaluateInputExpr(batch);
// Last row of last batch determines isGroupResultNull and double lastValue.
// We do not filter when PTF is in reducer.
Preconditions.checkState(!batch.selectedInUse);
if (!isLastGroupBatch) {
return;
}
final int size = batch.size;
if (size == 0) {
return;
}
DoubleColumnVector doubleColVector = ((DoubleColumnVector) batch.cols[inputColumnNum]);
if (doubleColVector.isRepeating) {
if (doubleColVector.noNulls || !doubleColVector.isNull[0]) {
lastValue = doubleColVector.vector[0];
isGroupResultNull = false;
} else {
isGroupResultNull = true;
}
} else if (doubleColVector.noNulls) {
lastValue = doubleColVector.vector[size - 1];
isGroupResultNull = false;
} else {
final int lastBatchIndex = size - 1;
if (!doubleColVector.isNull[lastBatchIndex]) {
lastValue = doubleColVector.vector[lastBatchIndex];
isGroupResultNull = false;
} else {
isGroupResultNull = true;
}
}
}
use of org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector in project hive by apache.
the class VectorPTFEvaluatorDoubleMax method evaluateGroupBatch.
public void evaluateGroupBatch(VectorizedRowBatch batch, boolean isLastGroupBatch) {
evaluateInputExpr(batch);
// Determine maximum of all non-null double column values; maintain isGroupResultNull.
// We do not filter when PTF is in reducer.
Preconditions.checkState(!batch.selectedInUse);
final int size = batch.size;
if (size == 0) {
return;
}
DoubleColumnVector doubleColVector = ((DoubleColumnVector) batch.cols[inputColumnNum]);
if (doubleColVector.isRepeating) {
if (doubleColVector.noNulls || !doubleColVector.isNull[0]) {
if (isGroupResultNull) {
max = doubleColVector.vector[0];
isGroupResultNull = false;
} else {
final double repeatedMax = doubleColVector.vector[0];
if (repeatedMax < max) {
max = repeatedMax;
}
}
}
} else if (doubleColVector.noNulls) {
double[] vector = doubleColVector.vector;
double varMax = vector[0];
for (int i = 1; i < size; i++) {
final double d = vector[i];
if (d > varMax) {
varMax = d;
}
}
if (isGroupResultNull) {
max = varMax;
isGroupResultNull = false;
} else if (varMax > max) {
max = varMax;
}
} else {
boolean[] batchIsNull = doubleColVector.isNull;
int i = 0;
while (batchIsNull[i]) {
if (++i >= size) {
return;
}
}
double[] vector = doubleColVector.vector;
double varMax = vector[i++];
for (; i < size; i++) {
if (!batchIsNull[i]) {
final double d = vector[i];
if (d > varMax) {
varMax = d;
}
}
}
if (isGroupResultNull) {
max = varMax;
isGroupResultNull = false;
} else if (varMax > max) {
max = varMax;
}
}
}
use of org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector in project hive by apache.
the class TestConstantVectorExpression method testConstantExpression.
@Test
public void testConstantExpression() throws Exception {
ConstantVectorExpression longCve = new ConstantVectorExpression(0, 17, TypeInfoFactory.longTypeInfo);
ConstantVectorExpression doubleCve = new ConstantVectorExpression(1, 17.34, TypeInfoFactory.doubleTypeInfo);
String str = "alpha";
ConstantVectorExpression bytesCve = new ConstantVectorExpression(2, str.getBytes(), TypeInfoFactory.stringTypeInfo);
HiveDecimal decVal = HiveDecimal.create("25.8");
ConstantVectorExpression decimalCve = new ConstantVectorExpression(3, decVal, TypeInfoFactory.decimalTypeInfo);
ConstantVectorExpression nullCve = new ConstantVectorExpression(4, TypeInfoFactory.stringTypeInfo, true);
int size = 20;
VectorizedRowBatch vrg = VectorizedRowGroupGenUtil.getVectorizedRowBatch(size, 5, 0);
LongColumnVector lcv = (LongColumnVector) vrg.cols[0];
DoubleColumnVector dcv = new DoubleColumnVector(size);
BytesColumnVector bcv = new BytesColumnVector(size);
DecimalColumnVector dv = new DecimalColumnVector(5, 1);
BytesColumnVector bcvn = new BytesColumnVector(size);
vrg.cols[1] = dcv;
vrg.cols[2] = bcv;
vrg.cols[3] = dv;
vrg.cols[4] = bcvn;
longCve.evaluate(vrg);
doubleCve.evaluate(vrg);
bytesCve.evaluate(vrg);
decimalCve.evaluate(vrg);
nullCve.evaluate(vrg);
assertTrue(lcv.isRepeating);
assertTrue(dcv.isRepeating);
assertTrue(bcv.isRepeating);
assertEquals(17, lcv.vector[0]);
assertTrue(17.34 == dcv.vector[0]);
assertTrue(bcvn.isRepeating);
assertTrue(bcvn.isNull[0]);
assertTrue(!bcvn.noNulls);
byte[] alphaBytes = "alpha".getBytes();
assertTrue(bcv.length[0] == alphaBytes.length);
assertTrue(sameFirstKBytes(alphaBytes, bcv.vector[0], alphaBytes.length));
// Evaluation of the bytes Constant Vector Expression after the vector is
// modified.
((BytesColumnVector) (vrg.cols[2])).vector[0] = "beta".getBytes();
bytesCve.evaluate(vrg);
assertTrue(bcv.length[0] == alphaBytes.length);
assertTrue(sameFirstKBytes(alphaBytes, bcv.vector[0], alphaBytes.length));
assertTrue(25.8 == dv.vector[0].getHiveDecimal().doubleValue());
// Evaluation of the decimal Constant Vector Expression after the vector is
// modified.
((DecimalColumnVector) (vrg.cols[3])).vector[0].set(HiveDecimal.create("39.7"));
decimalCve.evaluate(vrg);
assertTrue(25.8 == dv.vector[0].getHiveDecimal().doubleValue());
}
use of org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector in project hive by apache.
the class TestVectorConditionalExpressions method testDoubleColumnColumnIfExpr.
@Test
public void testDoubleColumnColumnIfExpr() {
// Just spot check because we already checked the logic for long.
// The code is from the same template file.
VectorizedRowBatch batch = getBatch1Long3DoubleVectors();
VectorExpression expr = new IfExprDoubleColumnDoubleColumn(0, 1, 2, 3);
expr.evaluate(batch);
// get result vector
DoubleColumnVector r = (DoubleColumnVector) batch.cols[3];
// verify standard case
assertEquals(true, 1d == r.vector[0]);
assertEquals(true, 2d == r.vector[1]);
assertEquals(true, -3d == r.vector[2]);
assertEquals(true, -4d == r.vector[3]);
assertEquals(false, r.isRepeating);
}
use of org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector in project hive by apache.
the class TestVectorFilterExpressions method testFilterDoubleNotBetween.
@Test
public void testFilterDoubleNotBetween() {
// Spot check only. null & repeating behavior are checked elsewhere for the same template.
int seed = 17;
VectorizedRowBatch vrb = VectorizedRowGroupGenUtil.getVectorizedRowBatch(5, 2, seed);
vrb.cols[0] = new DoubleColumnVector();
DoubleColumnVector dcv = (DoubleColumnVector) vrb.cols[0];
// Basic case
dcv.vector[0] = 5;
dcv.vector[1] = 20;
dcv.vector[2] = 17;
dcv.vector[3] = 15;
dcv.vector[4] = 10;
VectorExpression expr = new FilterDoubleColumnNotBetween(0, 10, 20);
expr.evaluate(vrb);
assertEquals(1, vrb.size);
assertTrue(vrb.selectedInUse);
assertEquals(0, vrb.selected[0]);
}
Aggregations