use of org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch in project hive by apache.
the class TestVectorConditionalExpressions method getBatch1Long3DoubleVectors.
private VectorizedRowBatch getBatch1Long3DoubleVectors() {
VectorizedRowBatch batch = new VectorizedRowBatch(4);
LongColumnVector lv = new LongColumnVector();
// set first argument to IF -- boolean flag
lv.vector[0] = 0;
lv.vector[1] = 0;
lv.vector[2] = 1;
lv.vector[3] = 1;
batch.cols[0] = lv;
// set second argument to IF
DoubleColumnVector v = new DoubleColumnVector();
v.vector[0] = -1;
v.vector[1] = -2;
v.vector[2] = -3;
v.vector[3] = -4;
batch.cols[1] = v;
// set third argument to IF
v = new DoubleColumnVector();
v.vector[0] = 1;
v.vector[1] = 2;
v.vector[2] = 3;
v.vector[3] = 4;
batch.cols[2] = v;
// set output column
batch.cols[3] = new DoubleColumnVector();
batch.size = 4;
return batch;
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch in project hive by apache.
the class TestVectorConditionalExpressions method testDoubleScalarColumnIfExpr.
@Test
public void testDoubleScalarColumnIfExpr() {
VectorizedRowBatch batch = getBatch1Long3DoubleVectors();
VectorExpression expr = new IfExprDoubleScalarDoubleColumn(0, 100.0d, 2, 3);
DoubleColumnVector r = (DoubleColumnVector) batch.cols[3];
expr.evaluate(batch);
assertEquals(true, 1d == r.vector[0]);
assertEquals(true, 2d == r.vector[1]);
assertEquals(true, 100d == r.vector[2]);
assertEquals(true, 100d == r.vector[3]);
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch in project hive by apache.
the class TestVectorConditionalExpressions method getBatch1Long3BytesVectors.
private VectorizedRowBatch getBatch1Long3BytesVectors() {
VectorizedRowBatch batch = new VectorizedRowBatch(4);
LongColumnVector lv = new LongColumnVector();
// set first argument to IF -- boolean flag
lv.vector[0] = 0;
lv.vector[1] = 0;
lv.vector[2] = 1;
lv.vector[3] = 1;
batch.cols[0] = lv;
// set second argument to IF
BytesColumnVector v = new BytesColumnVector();
v.initBuffer();
setString(v, 0, "arg2_0");
setString(v, 1, "arg2_1");
setString(v, 2, "arg2_2");
setString(v, 3, "arg2_3");
batch.cols[1] = v;
// set third argument to IF
v = new BytesColumnVector();
v.initBuffer();
setString(v, 0, "arg3_0");
setString(v, 1, "arg3_1");
setString(v, 2, "arg3_2");
setString(v, 3, "arg3_3");
batch.cols[2] = v;
// set output column
v = new BytesColumnVector();
v.initBuffer();
batch.cols[3] = v;
batch.size = 4;
return batch;
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch in project hive by apache.
the class TestVectorConditionalExpressions method testIfExprStringScalarStringScalar.
@Test
public void testIfExprStringScalarStringScalar() {
// standard case
VectorizedRowBatch batch = getBatch1Long3BytesVectors();
byte[] scalar1 = getUTF8Bytes("scalar1");
byte[] scalar2 = getUTF8Bytes("scalar2");
VectorExpression expr = new IfExprStringScalarStringScalar(0, scalar1, scalar2, 3);
BytesColumnVector r = (BytesColumnVector) batch.cols[3];
expr.evaluate(batch);
assertTrue(getString(r, 0).equals("scalar2"));
assertTrue(getString(r, 1).equals("scalar2"));
assertTrue(getString(r, 2).equals("scalar1"));
assertTrue(getString(r, 3).equals("scalar1"));
assertFalse(r.isRepeating);
// repeating case for first (boolean flag) argument to IF
batch = getBatch1Long3BytesVectors();
batch.cols[0].isRepeating = true;
expr.evaluate(batch);
r = (BytesColumnVector) batch.cols[3];
assertTrue(r.isRepeating);
assertTrue(getString(r, 0).equals("scalar2"));
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch in project hive by apache.
the class TestVectorDateExpressions method getVectorizedRowBatch.
/*
* Input array is used to fill the entire size of the vector row batch
*/
private VectorizedRowBatch getVectorizedRowBatch(int[] inputs, int size) {
VectorizedRowBatch batch = new VectorizedRowBatch(2, size);
LongColumnVector lcv = new LongColumnVector(size);
for (int i = 0; i < size; i++) {
lcv.vector[i] = inputs[i % inputs.length];
}
batch.cols[0] = lcv;
batch.cols[1] = new LongColumnVector(size);
batch.size = size;
return batch;
}
Aggregations