use of org.apache.hadoop.hive.ql.exec.vector.LongColumnVector in project hive by apache.
the class TestVectorUDFAdaptor method getBatchLongInLongOut.
private VectorizedRowBatch getBatchLongInLongOut() {
VectorizedRowBatch b = new VectorizedRowBatch(2);
LongColumnVector in = new LongColumnVector();
LongColumnVector out = new LongColumnVector();
b.cols[0] = in;
b.cols[1] = out;
in.vector[0] = 0;
in.vector[1] = 1;
in.vector[2] = 2;
in.isNull[2] = true;
in.noNulls = true;
b.size = 3;
return b;
}
use of org.apache.hadoop.hive.ql.exec.vector.LongColumnVector in project hive by apache.
the class TestVectorDateExpressions method getVectorizedRandomRowBatch.
private VectorizedRowBatch getVectorizedRandomRowBatch(int seed, int size) {
VectorizedRowBatch batch = new VectorizedRowBatch(2, size);
LongColumnVector lcv = new LongColumnVector(size);
Random rand = new Random(seed);
for (int i = 0; i < size; i++) {
lcv.vector[i] = (rand.nextInt());
}
batch.cols[0] = lcv;
batch.cols[1] = new LongColumnVector(size);
batch.size = size;
return batch;
}
use of org.apache.hadoop.hive.ql.exec.vector.LongColumnVector in project hive by apache.
the class TestVectorExpressionWriters method testStructLong.
private void testStructLong(TypeInfo type) throws HiveException {
LongColumnVector icv = VectorizedRowGroupGenUtil.generateLongColumnVector(true, false, vectorSize, new Random(10));
icv.isNull[3] = true;
LongColumnVector bcv = VectorizedRowGroupGenUtil.generateLongColumnVector(true, false, vectorSize, new Random(10));
bcv.isNull[2] = true;
ArrayList<Object>[] values = (ArrayList<Object>[]) new ArrayList[this.vectorSize];
StructObjectInspector soi = genStructOI();
VectorExpressionWriter[] vew = VectorExpressionWriterFactory.getExpressionWriters(soi);
for (int i = 0; i < vectorSize; i++) {
values[i] = new ArrayList<Object>(2);
values[i].add(null);
values[i].add(null);
vew[0].setValue(values[i], icv, i);
vew[1].setValue(values[i], bcv, i);
Object theInt = values[i].get(0);
if (theInt == null) {
Assert.assertTrue(icv.isNull[i]);
} else {
IntWritable w = (IntWritable) theInt;
Assert.assertEquals((int) icv.vector[i], w.get());
}
Object theBool = values[i].get(1);
if (theBool == null) {
Assert.assertTrue(bcv.isNull[i]);
} else {
BooleanWritable w = (BooleanWritable) theBool;
Assert.assertEquals(bcv.vector[i] == 0 ? false : true, w.get());
}
}
}
use of org.apache.hadoop.hive.ql.exec.vector.LongColumnVector in project hive by apache.
the class TestVectorExpressionWriters method testSetterLong.
private void testSetterLong(TypeInfo type) throws HiveException {
LongColumnVector lcv = VectorizedRowGroupGenUtil.generateLongColumnVector(true, false, vectorSize, new Random(10));
lcv.isNull[3] = true;
Object[] values = new Object[this.vectorSize];
VectorExpressionWriter vew = getWriter(type);
for (int i = 0; i < vectorSize; i++) {
// setValue() should be able to handle null input
values[i] = null;
values[i] = vew.setValue(values[i], lcv, i);
if (values[i] != null) {
Writable expected = getWritableValue(type, lcv.vector[i]);
Assert.assertEquals(expected, values[i]);
} else {
Assert.assertTrue(lcv.isNull[i]);
}
}
}
use of org.apache.hadoop.hive.ql.exec.vector.LongColumnVector in project hive by apache.
the class TestVectorStringExpressions method makeStringBatchForColColCompare.
private VectorizedRowBatch makeStringBatchForColColCompare() {
VectorizedRowBatch batch = new VectorizedRowBatch(4);
BytesColumnVector v = new BytesColumnVector();
batch.cols[0] = v;
BytesColumnVector v2 = new BytesColumnVector();
batch.cols[1] = v2;
batch.cols[2] = new BytesColumnVector();
batch.cols[3] = new LongColumnVector();
v.setRef(0, blue, 0, blue.length);
v.isNull[0] = false;
v.setRef(1, green, 0, green.length);
v.isNull[1] = false;
v.setRef(2, red, 0, red.length);
v.isNull[2] = false;
v.setRef(3, emptyString, 0, emptyString.length);
v.isNull[3] = true;
v.noNulls = false;
v2.setRef(0, red, 0, red.length);
v2.isNull[0] = false;
v2.setRef(1, green, 0, green.length);
v2.isNull[1] = false;
v2.setRef(2, blue, 0, blue.length);
v2.isNull[2] = false;
v2.setRef(3, red, 0, red.length);
v2.isNull[3] = false;
v2.noNulls = false;
batch.size = 4;
return batch;
}
Aggregations