use of org.apache.hadoop.hive.ql.exec.vector.LongColumnVector in project hive by apache.
the class TestVectorArithmeticExpressions method getVectorizedRowBatchSingleLongVector.
private VectorizedRowBatch getVectorizedRowBatchSingleLongVector(int size) {
VectorizedRowBatch vrg = new VectorizedRowBatch(2, size);
LongColumnVector lcv = new LongColumnVector(size);
for (int i = 0; i < size; i++) {
lcv.vector[i] = i * 37;
}
vrg.cols[0] = lcv;
vrg.cols[1] = new LongColumnVector(size);
vrg.size = size;
return vrg;
}
use of org.apache.hadoop.hive.ql.exec.vector.LongColumnVector in project hive by apache.
the class FakeVectorRowBatchFromRepeats method produceNextBatch.
@Override
public VectorizedRowBatch produceNextBatch() {
vrg.size = 0;
vrg.selectedInUse = false;
if (count > 0) {
vrg.size = batchSize < count ? batchSize : count;
count -= vrg.size;
for (int i = 0; i < numCols; ++i) {
LongColumnVector col = (LongColumnVector) vrg.cols[i];
col.isRepeating = true;
Long value = values[i];
if (value == null) {
col.isNull[0] = true;
col.noNulls = false;
} else {
col.noNulls = true;
col.vector[0] = value;
}
}
}
return vrg;
}
use of org.apache.hadoop.hive.ql.exec.vector.LongColumnVector in project hive by apache.
the class VectorizedRowGroupGenUtil method generateLongColumnVector.
public static LongColumnVector generateLongColumnVector(boolean nulls, boolean repeating, int size, Random rand) {
LongColumnVector lcv = new LongColumnVector(size);
lcv.noNulls = !nulls;
lcv.isRepeating = repeating;
long repeatingValue;
do {
repeatingValue = rand.nextLong();
} while (repeatingValue == 0);
int nullFrequency = generateNullFrequency(rand);
for (int i = 0; i < size; i++) {
if (nulls && (repeating || i % nullFrequency == 0)) {
lcv.isNull[i] = true;
lcv.vector[i] = LONG_VECTOR_NULL_VALUE;
} else {
lcv.isNull[i] = false;
lcv.vector[i] = repeating ? repeatingValue : rand.nextLong();
if (lcv.vector[i] == 0) {
i--;
}
}
}
return lcv;
}
use of org.apache.hadoop.hive.ql.exec.vector.LongColumnVector in project hive by apache.
the class TestVectorUDFAdaptor method getBatchStrDblLongWithStrOut.
private VectorizedRowBatch getBatchStrDblLongWithStrOut() {
VectorizedRowBatch b = new VectorizedRowBatch(4);
BytesColumnVector strCol = new BytesColumnVector();
LongColumnVector longCol = new LongColumnVector();
DoubleColumnVector dblCol = new DoubleColumnVector();
BytesColumnVector outCol = new BytesColumnVector();
b.cols[0] = strCol;
b.cols[1] = longCol;
b.cols[2] = dblCol;
b.cols[3] = outCol;
strCol.initBuffer();
strCol.setVal(0, blue, 0, blue.length);
strCol.setVal(1, red, 0, red.length);
longCol.vector[0] = 0;
longCol.vector[1] = 1;
dblCol.vector[0] = 0.0;
dblCol.vector[1] = 1.0;
// set one null value for possible later use
longCol.isNull[1] = true;
// but have no nulls initially
longCol.noNulls = true;
strCol.noNulls = true;
dblCol.noNulls = true;
outCol.initBuffer();
b.size = 2;
return b;
}
use of org.apache.hadoop.hive.ql.exec.vector.LongColumnVector in project hive by apache.
the class TestVectorUDFAdaptor method testLongUDF.
@Test
public void testLongUDF() {
// create a syntax tree for a simple function call "longudf(col0)"
ExprNodeGenericFuncDesc funcDesc;
TypeInfo typeInfo = TypeInfoFactory.longTypeInfo;
GenericUDFBridge genericUDFBridge = new GenericUDFBridge("longudf", false, LongUDF.class.getName());
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
ExprNodeColumnDesc colDesc = new ExprNodeColumnDesc(typeInfo, "col0", "tablename", false);
children.add(colDesc);
VectorUDFArgDesc[] argDescs = new VectorUDFArgDesc[1];
argDescs[0] = new VectorUDFArgDesc();
argDescs[0].setVariable(0);
funcDesc = new ExprNodeGenericFuncDesc(typeInfo, genericUDFBridge, genericUDFBridge.getUdfName(), children);
// create the adaptor for this function call to work in vector mode
VectorUDFAdaptor vudf = null;
try {
vudf = new VectorUDFAdaptor(funcDesc, 1, "Long", argDescs);
} catch (HiveException e) {
// We should never get here.
assertTrue(false);
}
VectorizedRowBatch b = getBatchLongInLongOut();
vudf.evaluate(b);
// verify output
LongColumnVector out = (LongColumnVector) b.cols[1];
assertEquals(1000, out.vector[0]);
assertEquals(1001, out.vector[1]);
assertEquals(1002, out.vector[2]);
assertTrue(out.noNulls);
assertFalse(out.isRepeating);
// with nulls
b = getBatchLongInLongOut();
out = (LongColumnVector) b.cols[1];
b.cols[0].noNulls = false;
vudf.evaluate(b);
assertFalse(out.noNulls);
assertEquals(1000, out.vector[0]);
assertEquals(1001, out.vector[1]);
assertTrue(out.isNull[2]);
assertFalse(out.isRepeating);
// with repeating
b = getBatchLongInLongOut();
out = (LongColumnVector) b.cols[1];
b.cols[0].isRepeating = true;
vudf.evaluate(b);
// The implementation may or may not set output it isRepeting.
// That is implementation-defined.
assertTrue(b.cols[1].isRepeating && out.vector[0] == 1000 || !b.cols[1].isRepeating && out.vector[2] == 1000);
assertEquals(3, b.size);
}
Aggregations