use of org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColGreaterLongScalar in project hive by apache.
the class TestVectorFilterExpressions method testColOpScalarNumericFilterNullAndRepeatingLogic.
@Test
public void testColOpScalarNumericFilterNullAndRepeatingLogic() throws HiveException {
// No nulls, not repeating
FilterLongColGreaterLongScalar f = new FilterLongColGreaterLongScalar(0, 1);
VectorizedRowBatch batch = this.getSimpleLongBatch();
batch.cols[0].noNulls = true;
batch.cols[0].isRepeating = false;
f.evaluate(batch);
// only last 2 rows qualify
Assert.assertEquals(2, batch.size);
// show that their positions are recorded
Assert.assertTrue(batch.selectedInUse);
Assert.assertEquals(2, batch.selected[0]);
Assert.assertEquals(3, batch.selected[1]);
// make everything qualify and ensure selected is not in use
// col > -1
f = new FilterLongColGreaterLongScalar(0, -1);
batch = getSimpleLongBatch();
f.evaluate(batch);
Assert.assertFalse(batch.selectedInUse);
Assert.assertEquals(4, batch.size);
// has nulls, not repeating
batch = getSimpleLongBatch();
// col > 1
f = new FilterLongColGreaterLongScalar(0, 1);
batch.cols[0].noNulls = false;
batch.cols[0].isRepeating = false;
batch.cols[0].isNull[3] = true;
f.evaluate(batch);
Assert.assertTrue(batch.selectedInUse);
Assert.assertEquals(1, batch.size);
Assert.assertEquals(2, batch.selected[0]);
// no nulls, is repeating
batch = getSimpleLongBatch();
// col > -1
f = new FilterLongColGreaterLongScalar(0, -1);
batch.cols[0].noNulls = true;
batch.cols[0].isRepeating = true;
f.evaluate(batch);
Assert.assertFalse(batch.selectedInUse);
// everything qualifies (4 rows, all with value -1)
Assert.assertEquals(4, batch.size);
// has nulls, is repeating
batch = getSimpleLongBatch();
batch.cols[0].noNulls = false;
batch.cols[0].isRepeating = true;
batch.cols[0].isNull[0] = true;
f.evaluate(batch);
// all values are null so none qualify
Assert.assertEquals(0, batch.size);
}
use of org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColGreaterLongScalar in project hive by apache.
the class TestVectorizationContext method testFilterWithNegativeScalar.
@Test
public void testFilterWithNegativeScalar() throws HiveException {
ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(Integer.class, "col1", "table", false);
ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc(Integer.valueOf(-10));
GenericUDFOPGreaterThan udf = new GenericUDFOPGreaterThan();
ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc();
exprDesc.setGenericUDF(udf);
List<ExprNodeDesc> children1 = new ArrayList<ExprNodeDesc>(2);
children1.add(col1Expr);
children1.add(constDesc);
exprDesc.setChildren(children1);
List<String> columns = new ArrayList<String>();
columns.add("col0");
columns.add("col1");
columns.add("col2");
VectorizationContext vc = new VectorizationContext("name", columns);
VectorExpression ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
assertTrue(ve instanceof FilterLongColGreaterLongScalar);
}
Aggregations