use of org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch in project hive by apache.
the class TestVectorFilterExpressions method testFilterStringIn.
@Test
public void testFilterStringIn() throws HiveException {
int seed = 17;
VectorizedRowBatch vrb = VectorizedRowGroupGenUtil.getVectorizedRowBatch(3, 2, seed);
vrb.cols[0] = new BytesColumnVector();
BytesColumnVector bcv = (BytesColumnVector) vrb.cols[0];
bcv.initBuffer();
bcv.setVal(0, a, 0, 1);
bcv.setVal(1, b, 0, 1);
bcv.setVal(2, c, 0, 1);
VectorExpression expr = new FilterStringColumnInList(0);
byte[][] inList = { b, c };
((FilterStringColumnInList) expr).setInListValues(inList);
// basic test
expr.evaluate(vrb);
assertEquals(2, vrb.size);
assertTrue(vrb.selectedInUse);
assertEquals(1, vrb.selected[0]);
assertEquals(2, vrb.selected[1]);
// nulls
vrb.selectedInUse = false;
vrb.size = 3;
bcv.noNulls = false;
bcv.isNull[2] = true;
expr.evaluate(vrb);
assertEquals(1, vrb.size);
assertEquals(1, vrb.selected[0]);
assertTrue(vrb.selectedInUse);
// repeating
vrb.selectedInUse = false;
vrb.size = 3;
bcv.noNulls = true;
bcv.isRepeating = true;
expr.evaluate(vrb);
assertEquals(0, vrb.size);
// nulls and repeating
vrb.selectedInUse = false;
vrb.size = 3;
bcv.noNulls = false;
bcv.isRepeating = true;
bcv.isNull[0] = true;
bcv.setVal(0, b, 0, 1);
expr.evaluate(vrb);
assertEquals(0, vrb.size);
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch in project hive by apache.
the class TestVectorFilterExpressions method testFilterDecimalScalarGreaterThanColumn.
/**
* Spot check scalar > col for decimal.
*/
@Test
public void testFilterDecimalScalarGreaterThanColumn() throws HiveException {
VectorizedRowBatch b = getVectorizedRowBatch1DecimalCol();
HiveDecimal scalar = HiveDecimal.create("0");
VectorExpression expr = new FilterDecimalScalarGreaterDecimalColumn(scalar, 0);
expr.evaluate(b);
// check that right row(s) are selected
assertTrue(b.selectedInUse);
assertEquals(1, b.selected[0]);
assertEquals(1, b.size);
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch in project hive by apache.
the class TestVectorFilterExpressions method testFilterDecimalColGreaterEqualCol.
/**
* Spot check col >= col for decimal.
*/
@Test
public void testFilterDecimalColGreaterEqualCol() throws HiveException {
VectorizedRowBatch b = getVectorizedRowBatch2DecimalCol();
VectorExpression expr = new FilterDecimalColGreaterEqualDecimalColumn(0, 1);
expr.evaluate(b);
// check that right row(s) are selected
assertTrue(b.selectedInUse);
assertEquals(0, b.selected[0]);
assertEquals(1, b.selected[1]);
assertEquals(2, b.size);
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch in project hive by apache.
the class TestVectorFilterExpressions method testFilterLongBetween.
@Test
public void testFilterLongBetween() throws HiveException {
int seed = 17;
VectorizedRowBatch vrb = VectorizedRowGroupGenUtil.getVectorizedRowBatch(5, 2, seed);
LongColumnVector lcv0 = (LongColumnVector) vrb.cols[0];
VectorExpression expr1 = new FilterLongColumnBetween(0, 15, 17);
// Basic case
lcv0.vector[0] = 5;
lcv0.vector[1] = 20;
lcv0.vector[2] = 17;
lcv0.vector[3] = 15;
lcv0.vector[4] = 10;
expr1.evaluate(vrb);
assertEquals(2, vrb.size);
assertTrue(vrb.selectedInUse);
assertEquals(2, vrb.selected[0]);
assertEquals(3, vrb.selected[1]);
// With nulls
VectorizedRowBatch vrb1 = VectorizedRowGroupGenUtil.getVectorizedRowBatch(5, 2, seed);
lcv0 = (LongColumnVector) vrb1.cols[0];
lcv0.vector[0] = 5;
lcv0.vector[1] = 20;
lcv0.vector[2] = 17;
lcv0.vector[3] = 15;
lcv0.vector[4] = 10;
lcv0.noNulls = false;
lcv0.isNull[0] = true;
lcv0.isNull[2] = true;
expr1.evaluate(vrb1);
assertEquals(1, vrb1.size);
assertTrue(vrb1.selectedInUse);
assertEquals(3, vrb1.selected[0]);
// With nulls and selected
VectorizedRowBatch vrb2 = VectorizedRowGroupGenUtil.getVectorizedRowBatch(7, 2, seed);
vrb2.selectedInUse = true;
vrb2.selected[0] = 1;
vrb2.selected[1] = 2;
vrb2.selected[2] = 4;
vrb2.size = 3;
lcv0 = (LongColumnVector) vrb2.cols[0];
lcv0.vector[0] = 5;
lcv0.vector[1] = 20;
lcv0.vector[2] = 17;
lcv0.vector[3] = 15;
lcv0.vector[4] = 10;
lcv0.vector[5] = 19;
lcv0.vector[6] = 21;
lcv0.noNulls = false;
lcv0.isNull[0] = true;
lcv0.isNull[2] = true;
lcv0.isNull[5] = true;
expr1.evaluate(vrb2);
assertEquals(0, vrb2.size);
// Repeating non null
VectorizedRowBatch vrb3 = VectorizedRowGroupGenUtil.getVectorizedRowBatch(7, 2, seed);
lcv0 = (LongColumnVector) vrb3.cols[0];
lcv0.isRepeating = true;
lcv0.vector[0] = 17;
lcv0.vector[1] = 20;
lcv0.vector[2] = 17;
lcv0.vector[3] = 15;
lcv0.vector[4] = 10;
expr1.evaluate(vrb3);
assertEquals(7, vrb3.size);
assertFalse(vrb3.selectedInUse);
assertTrue(lcv0.isRepeating);
// Repeating null
lcv0.noNulls = false;
lcv0.vector[0] = 17;
lcv0.isNull[0] = true;
expr1.evaluate(vrb3);
assertEquals(0, vrb3.size);
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch in project hive by apache.
the class TestVectorArithmeticExpressions method testDecimalColAddDecimalColumn.
@Test
public void testDecimalColAddDecimalColumn() throws HiveException {
VectorizedRowBatch b = getVectorizedRowBatch3DecimalCols();
VectorExpression expr = new DecimalColAddDecimalColumn(0, 1, 2);
DecimalColumnVector r = (DecimalColumnVector) b.cols[2];
// test without nulls
expr.evaluate(b);
assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("2.20")));
assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("-2.30")));
assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("1.00")));
// test nulls propagation
b = getVectorizedRowBatch3DecimalCols();
DecimalColumnVector c0 = (DecimalColumnVector) b.cols[0];
c0.noNulls = false;
c0.isNull[0] = true;
r = (DecimalColumnVector) b.cols[2];
expr.evaluate(b);
assertTrue(!r.noNulls && r.isNull[0]);
// Verify null output data entry is not 0, but rather the value specified by design,
// which is the minimum non-0 value, 0.01 in this case.
assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("0.01")));
// test that overflow produces NULL
b = getVectorizedRowBatch3DecimalCols();
c0 = (DecimalColumnVector) b.cols[0];
// set to max possible value
c0.vector[0].set(HiveDecimal.create("9999999999999999.99"));
r = (DecimalColumnVector) b.cols[2];
// will cause overflow for result at position 0, must yield NULL
expr.evaluate(b);
assertTrue(!r.noNulls && r.isNull[0]);
// verify proper null output data value
assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("0.01")));
// test left input repeating
b = getVectorizedRowBatch3DecimalCols();
c0 = (DecimalColumnVector) b.cols[0];
c0.isRepeating = true;
r = (DecimalColumnVector) b.cols[2];
expr.evaluate(b);
assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("2.20")));
assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("2.20")));
assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("2.20")));
// test both inputs repeating
DecimalColumnVector c1 = (DecimalColumnVector) b.cols[1];
c1.isRepeating = true;
expr.evaluate(b);
assertTrue(r.isRepeating);
assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("2.20")));
// test right input repeating
b = getVectorizedRowBatch3DecimalCols();
c1 = (DecimalColumnVector) b.cols[1];
c1.isRepeating = true;
c1.vector[0].set(HiveDecimal.create("2.00"));
r = (DecimalColumnVector) b.cols[2];
expr.evaluate(b);
assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("2.00")));
}
Aggregations