use of org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch in project hive by apache.
the class TestVectorArithmeticExpressions method getVectorizedRowBatchSingleLongVector.
private VectorizedRowBatch getVectorizedRowBatchSingleLongVector(int size) {
VectorizedRowBatch vrg = new VectorizedRowBatch(2, size);
LongColumnVector lcv = new LongColumnVector(size);
for (int i = 0; i < size; i++) {
lcv.vector[i] = i * 37;
}
vrg.cols[0] = lcv;
vrg.cols[1] = new LongColumnVector(size);
vrg.size = size;
return vrg;
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch in project hive by apache.
the class TestVectorArithmeticExpressions method testDecimalColMultiplyDecimalColumn.
// Spot check decimal column-column multiply
@Test
public void testDecimalColMultiplyDecimalColumn() throws HiveException {
VectorizedRowBatch b = getVectorizedRowBatch3DecimalCols();
VectorExpression expr = new DecimalColMultiplyDecimalColumn(0, 1, 2);
DecimalColumnVector r = (DecimalColumnVector) b.cols[2];
// test without nulls
expr.evaluate(b);
assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("1.20")));
assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("-3.30")));
assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("0.00")));
// test that underflow produces NULL
b = getVectorizedRowBatch3DecimalCols();
DecimalColumnVector c0 = (DecimalColumnVector) b.cols[0];
// set to max possible value
c0.vector[0].set(HiveDecimal.create("9999999999999999.99"));
DecimalColumnVector c1 = (DecimalColumnVector) b.cols[1];
c1.vector[0].set(HiveDecimal.create("2.00"));
r = (DecimalColumnVector) b.cols[2];
// will cause overflow for result at position 0, must yield NULL
expr.evaluate(b);
assertTrue(!r.noNulls && r.isNull[0]);
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch in project hive by apache.
the class TestVectorArithmeticExpressions method getVectorizedRowBatch3DecimalCols.
// Make a decimal batch with three columns, including two for inputs and one for the result.
private VectorizedRowBatch getVectorizedRowBatch3DecimalCols() {
VectorizedRowBatch b = new VectorizedRowBatch(3);
DecimalColumnVector v0, v1;
b.cols[0] = v0 = new DecimalColumnVector(18, 2);
b.cols[1] = v1 = new DecimalColumnVector(18, 2);
b.cols[2] = new DecimalColumnVector(18, 2);
v0.vector[0].set(HiveDecimal.create("1.20"));
v0.vector[1].set(HiveDecimal.create("-3.30"));
v0.vector[2].set(HiveDecimal.create("0"));
v1.vector[0].set(HiveDecimal.create("1.00"));
v1.vector[1].set(HiveDecimal.create("1.00"));
v1.vector[2].set(HiveDecimal.create("1.00"));
b.size = 3;
return b;
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch in project hive by apache.
the class TestVectorDateExpressions method getVectorizedRowBatch.
/**
* Input array is used to fill the entire size of the vector row batch
*/
private VectorizedRowBatch getVectorizedRowBatch(int[] inputs, int size) {
VectorizedRowBatch batch = new VectorizedRowBatch(2, size);
LongColumnVector lcv = new LongColumnVector(size);
for (int i = 0; i < size; i++) {
lcv.vector[i] = inputs[i % inputs.length];
}
batch.cols[0] = lcv;
batch.cols[1] = new LongColumnVector(size);
batch.size = size;
return batch;
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch in project hive by apache.
the class TestVectorDateExpressions method testVectorUDFUnixTimeStamp.
@Test
public void testVectorUDFUnixTimeStamp() throws HiveException {
VectorizedRowBatch batch = getVectorizedRowBatch(new int[] { 0 }, VectorizedRowBatch.DEFAULT_SIZE);
Assert.assertTrue(((LongColumnVector) batch.cols[1]).noNulls);
Assert.assertFalse(((LongColumnVector) batch.cols[1]).isRepeating);
verifyUDFUnixTimeStamp(batch);
TestVectorizedRowBatch.addRandomNulls(batch.cols[0]);
verifyUDFUnixTimeStamp(batch);
int[] boundaries = getAllBoundaries();
batch = getVectorizedRowBatch(boundaries, boundaries.length);
verifyUDFUnixTimeStamp(batch);
TestVectorizedRowBatch.addRandomNulls(batch.cols[0]);
verifyUDFUnixTimeStamp(batch);
TestVectorizedRowBatch.addRandomNulls(batch.cols[1]);
verifyUDFUnixTimeStamp(batch);
batch = getVectorizedRowBatch(new int[] { 0 }, 1);
batch.cols[0].isRepeating = true;
verifyUDFUnixTimeStamp(batch);
batch.cols[0].noNulls = false;
batch.cols[0].isNull[0] = true;
verifyUDFUnixTimeStamp(batch);
batch = getVectorizedRandomRowBatch(200, VectorizedRowBatch.DEFAULT_SIZE);
verifyUDFUnixTimeStamp(batch);
TestVectorizedRowBatch.addRandomNulls(batch.cols[0]);
verifyUDFUnixTimeStamp(batch);
TestVectorizedRowBatch.addRandomNulls(batch.cols[1]);
verifyUDFUnixTimeStamp(batch);
}
Aggregations