use of org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch in project hive by apache.
the class TestVectorArithmeticExpressions method longColAddLongScalarNoNulls.
private void longColAddLongScalarNoNulls(boolean checked) throws HiveException {
VectorizedRowBatch vrg = getVectorizedRowBatchSingleLongVector(VectorizedRowBatch.DEFAULT_SIZE);
VectorExpression expr;
if (checked) {
expr = new LongColAddLongScalarChecked(0, 23, 1);
expr.setOutputTypeInfo(TypeInfoFactory.getPrimitiveTypeInfo("bigint"));
} else {
expr = new LongColAddLongScalar(0, 23, 1);
}
expr.evaluate(vrg);
// verify
for (int i = 0; i < VectorizedRowBatch.DEFAULT_SIZE; i++) {
Assert.assertEquals(i * 37 + 23, ((LongColumnVector) vrg.cols[1]).vector[i]);
}
Assert.assertTrue(((LongColumnVector) vrg.cols[1]).noNulls);
Assert.assertFalse(((LongColumnVector) vrg.cols[1]).isRepeating);
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch in project hive by apache.
the class TestVectorArithmeticExpressions method testDecimalColSubtractDecimalColumn.
// Spot check decimal column-column subtract
@Test
public void testDecimalColSubtractDecimalColumn() throws HiveException {
VectorizedRowBatch b = getVectorizedRowBatch3DecimalCols();
VectorExpression expr = new DecimalColSubtractDecimalColumn(0, 1, 2);
DecimalColumnVector r = (DecimalColumnVector) b.cols[2];
// test without nulls
expr.evaluate(b);
assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("0.20")));
assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("-4.30")));
assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("-1.00")));
// test that underflow produces NULL
b = getVectorizedRowBatch3DecimalCols();
DecimalColumnVector c0 = (DecimalColumnVector) b.cols[0];
// set to min possible value
c0.vector[0].set(HiveDecimal.create("-9999999999999999.99"));
r = (DecimalColumnVector) b.cols[2];
// will cause underflow for result at position 0, must yield NULL
expr.evaluate(b);
assertTrue(!r.noNulls && r.isNull[0]);
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch in project hive by apache.
the class TestVectorArithmeticExpressions method getVectorizedRowBatch2LongInLongOut.
private VectorizedRowBatch getVectorizedRowBatch2LongInLongOut() {
VectorizedRowBatch batch = new VectorizedRowBatch(3);
LongColumnVector lcv, lcv2;
lcv = new LongColumnVector();
for (int i = 0; i < VectorizedRowBatch.DEFAULT_SIZE; i++) {
lcv.vector[i] = i * 37;
}
batch.cols[0] = lcv;
lcv2 = new LongColumnVector();
batch.cols[1] = lcv2;
for (int i = 0; i < VectorizedRowBatch.DEFAULT_SIZE; i++) {
lcv2.vector[i] = i * 37;
}
batch.cols[2] = new LongColumnVector();
batch.size = VectorizedRowBatch.DEFAULT_SIZE;
return batch;
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch in project hive by apache.
the class TestVectorArithmeticExpressions method testDecimalScalarModuloDecimalColumn.
// Spot check decimal scalar-column modulo
@Test
public void testDecimalScalarModuloDecimalColumn() throws HiveException {
VectorizedRowBatch b = getVectorizedRowBatch3DecimalCols();
HiveDecimal d = HiveDecimal.create("2.00");
VectorExpression expr = new DecimalScalarModuloDecimalColumn(d, 0, 2);
// test without nulls
expr.evaluate(b);
DecimalColumnVector r = (DecimalColumnVector) b.cols[2];
assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("0.80")));
assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("2.00")));
// entry 2 will be null due to zero-divide
assertFalse(r.noNulls);
assertTrue(r.isNull[2]);
// try again with some different data values
DecimalColumnVector in = (DecimalColumnVector) b.cols[0];
expr = new DecimalScalarModuloDecimalColumn(d, 0, 2);
in.vector[0].set(HiveDecimal.create("0.50"));
in.vector[1].set(HiveDecimal.create("0.80"));
in.vector[2].set(HiveDecimal.create("0.70"));
expr.evaluate(b);
assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("0.00")));
assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("0.40")));
assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("0.60")));
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch in project hive by apache.
the class TestVectorArithmeticExpressions method testLongColDivideLongColumn.
@Test
public void testLongColDivideLongColumn() throws HiveException {
/* Testing for equality of doubles after a math operation is
* not always reliable so use this as a tolerance.
*/
final double eps = 1e-7d;
VectorizedRowBatch batch = getVectorizedRowBatch2LongInDoubleOut();
LongColDivideLongColumn expr = new LongColDivideLongColumn(0, 1, 2);
batch.cols[0].isNull[1] = true;
batch.cols[0].noNulls = false;
batch.cols[1].noNulls = false;
DoubleColumnVector out = (DoubleColumnVector) batch.cols[2];
// Set so we can verify they are reset by operation
out.noNulls = true;
out.isRepeating = true;
expr.evaluate(batch);
// 0/0 for entry 0 should work but generate NaN
assertFalse(out.noNulls);
assertTrue(out.isNull[0]);
assertTrue(Double.isNaN(out.vector[0]));
// verify NULL output in entry 1 is correct
assertTrue(out.isNull[1]);
assertTrue(Double.isNaN(out.vector[1]));
// check entries beyond first 2
for (int i = 2; i != batch.size; i++) {
assertTrue(out.vector[i] > 1.0d - eps && out.vector[i] < 1.0d + eps);
}
assertFalse(out.noNulls);
assertFalse(out.isRepeating);
}
Aggregations