Search in sources :

Example 86 with VectorizedRowBatch

use of org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch in project hive by apache.

the class TestVectorFilterExpressions method getSimpleLongBatch.

private VectorizedRowBatch getSimpleLongBatch() {
    VectorizedRowBatch batch = VectorizedRowGroupGenUtil.getVectorizedRowBatch(4, 1, 1);
    LongColumnVector lcv0 = (LongColumnVector) batch.cols[0];
    lcv0.vector[0] = 0;
    lcv0.vector[1] = 1;
    lcv0.vector[2] = 2;
    lcv0.vector[3] = 3;
    return batch;
}
Also used : VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) LongColumnVector(org.apache.hadoop.hive.ql.exec.vector.LongColumnVector)

Example 87 with VectorizedRowBatch

use of org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch in project hive by apache.

the class TestVectorFilterExpressions method testFilterDoubleNotBetween.

@Test
public void testFilterDoubleNotBetween() throws HiveException {
    // Spot check only. null & repeating behavior are checked elsewhere for the same template.
    int seed = 17;
    VectorizedRowBatch vrb = VectorizedRowGroupGenUtil.getVectorizedRowBatch(5, 2, seed);
    vrb.cols[0] = new DoubleColumnVector();
    DoubleColumnVector dcv = (DoubleColumnVector) vrb.cols[0];
    // Basic case
    dcv.vector[0] = 5;
    dcv.vector[1] = 20;
    dcv.vector[2] = 17;
    dcv.vector[3] = 15;
    dcv.vector[4] = 10;
    VectorExpression expr = new FilterDoubleColumnNotBetween(0, 10, 20);
    expr.evaluate(vrb);
    assertEquals(1, vrb.size);
    assertTrue(vrb.selectedInUse);
    assertEquals(0, vrb.selected[0]);
}
Also used : VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) FilterDoubleColumnNotBetween(org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleColumnNotBetween) DoubleColumnVector(org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector) Test(org.junit.Test)

Example 88 with VectorizedRowBatch

use of org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch in project hive by apache.

the class TestVectorFilterExpressions method testFilterLongColEqualLongScalar.

@Test
public void testFilterLongColEqualLongScalar() throws HiveException {
    VectorizedRowBatch vrg = VectorizedRowGroupGenUtil.getVectorizedRowBatch(1024, 1, 23);
    FilterLongColEqualLongScalar expr = new FilterLongColEqualLongScalar(0, 46);
    expr.evaluate(vrg);
    assertEquals(1, vrg.size);
    assertEquals(1, vrg.selected[0]);
}
Also used : FilterLongColEqualLongScalar(org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColEqualLongScalar) VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) Test(org.junit.Test)

Example 89 with VectorizedRowBatch

use of org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch in project hive by apache.

the class TestVectorFilterExpressions method testFilterLongColGreaterLongColumn.

@Test
public void testFilterLongColGreaterLongColumn() throws HiveException {
    int seed = 17;
    VectorizedRowBatch b = VectorizedRowGroupGenUtil.getVectorizedRowBatch(VectorizedRowBatch.DEFAULT_SIZE, 2, seed);
    LongColumnVector lcv0 = (LongColumnVector) b.cols[0];
    LongColumnVector lcv1 = (LongColumnVector) b.cols[1];
    b.size = 3;
    FilterLongColGreaterLongColumn expr = new FilterLongColGreaterLongColumn(0, 1);
    // Basic case
    lcv0.vector[0] = 10;
    lcv0.vector[1] = 10;
    lcv0.vector[2] = 10;
    lcv1.vector[0] = 20;
    lcv1.vector[1] = 1;
    lcv1.vector[2] = 7;
    expr.evaluate(b);
    assertEquals(2, b.size);
    assertEquals(1, b.selected[0]);
    assertEquals(2, b.selected[1]);
    // handle null with selected in use
    lcv0.noNulls = false;
    lcv0.isNull[1] = true;
    expr.evaluate(b);
    assertEquals(1, b.size);
    assertEquals(2, b.selected[0]);
    // handle repeating
    b.size = 3;
    b.selectedInUse = false;
    lcv0.isRepeating = true;
    lcv0.noNulls = true;
    expr.evaluate(b);
    assertEquals(2, b.size);
    // handle repeating null
    b.size = 3;
    b.selectedInUse = false;
    lcv0.isNull[0] = true;
    lcv0.noNulls = false;
    expr.evaluate(b);
    assertEquals(0, b.size);
    // handle null on both sizes (not repeating)
    b.size = 3;
    b.selectedInUse = false;
    lcv0.isRepeating = false;
    lcv1.noNulls = false;
    lcv1.isNull[2] = true;
    expr.evaluate(b);
    assertEquals(0, b.size);
}
Also used : VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) LongColumnVector(org.apache.hadoop.hive.ql.exec.vector.LongColumnVector) FilterLongColGreaterLongColumn(org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColGreaterLongColumn) Test(org.junit.Test)

Example 90 with VectorizedRowBatch

use of org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch in project hive by apache.

the class TestVectorFilterExpressions method testFilterDecimalColEqualDecimalScalar.

/**
 * This tests the template for Decimal Column-Scalar comparison filters,
 * called FilterDecimalColumnCompareScalar.txt. Only equal is tested for
 * multiple cases because the logic is the same for <, >, <=, >=, == and !=.
 */
@Test
public void testFilterDecimalColEqualDecimalScalar() throws HiveException {
    VectorizedRowBatch b = getVectorizedRowBatch1DecimalCol();
    HiveDecimal scalar = HiveDecimal.create("-3.30");
    VectorExpression expr = new FilterDecimalColEqualDecimalScalar(0, scalar);
    expr.evaluate(b);
    // check that right row(s) are selected
    assertTrue(b.selectedInUse);
    assertEquals(1, b.selected[0]);
    assertEquals(1, b.size);
    // try again with a null value
    b = getVectorizedRowBatch1DecimalCol();
    b.cols[0].noNulls = false;
    b.cols[0].isNull[1] = true;
    expr.evaluate(b);
    // verify that no rows were selected
    assertEquals(0, b.size);
    // try the repeating case
    b = getVectorizedRowBatch1DecimalCol();
    b.cols[0].isRepeating = true;
    expr.evaluate(b);
    // verify that no rows were selected
    assertEquals(0, b.size);
    // try the repeating null case
    b = getVectorizedRowBatch1DecimalCol();
    b.cols[0].isRepeating = true;
    b.cols[0].noNulls = false;
    b.cols[0].isNull[0] = true;
    expr.evaluate(b);
    // verify that no rows were selected
    assertEquals(0, b.size);
}
Also used : VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) FilterDecimalColEqualDecimalScalar(org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDecimalColEqualDecimalScalar) Test(org.junit.Test)

Aggregations

VectorizedRowBatch (org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch)401 Test (org.junit.Test)214 LongColumnVector (org.apache.hadoop.hive.ql.exec.vector.LongColumnVector)157 BytesColumnVector (org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector)98 TestVectorizedRowBatch (org.apache.hadoop.hive.ql.exec.vector.TestVectorizedRowBatch)83 DoubleColumnVector (org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector)64 DecimalColumnVector (org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector)40 TimestampColumnVector (org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector)32 VectorExpression (org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression)30 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)28 VectorizedParquetRecordReader (org.apache.hadoop.hive.ql.io.parquet.vector.VectorizedParquetRecordReader)26 Configuration (org.apache.hadoop.conf.Configuration)23 IOException (java.io.IOException)20 HiveConf (org.apache.hadoop.hive.conf.HiveConf)20 VectorExtractRow (org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow)19 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)18 VectorizationContext (org.apache.hadoop.hive.ql.exec.vector.VectorizationContext)18 Timestamp (java.sql.Timestamp)17 VectorUDFAdaptor (org.apache.hadoop.hive.ql.exec.vector.udf.VectorUDFAdaptor)16 VectorizedRowBatchCtx (org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx)15