use of org.apache.hadoop.hive.ql.exec.vector.LongColumnVector in project hive by apache.
the class TestVectorArithmeticExpressions method testLongColAddLongScalarWithNulls.
@Test
public void testLongColAddLongScalarWithNulls() {
VectorizedRowBatch batch = getVectorizedRowBatchSingleLongVector(VectorizedRowBatch.DEFAULT_SIZE);
LongColumnVector lcv = (LongColumnVector) batch.cols[0];
LongColumnVector lcvOut = (LongColumnVector) batch.cols[1];
TestVectorizedRowBatch.addRandomNulls(lcv);
LongColAddLongScalar expr = new LongColAddLongScalar(0, 23, 1);
expr.evaluate(batch);
// verify
for (int i = 0; i < VectorizedRowBatch.DEFAULT_SIZE; i++) {
if (!lcv.isNull[i]) {
Assert.assertEquals(i * 37 + 23, lcvOut.vector[i]);
} else {
Assert.assertTrue(lcvOut.isNull[i]);
}
}
Assert.assertFalse(lcvOut.noNulls);
Assert.assertFalse(lcvOut.isRepeating);
verifyLongNullDataVectorEntries(lcvOut, batch.selected, batch.selectedInUse, batch.size);
}
use of org.apache.hadoop.hive.ql.exec.vector.LongColumnVector in project hive by apache.
the class TestDecimalUtil method testSign.
@Test
public void testSign() {
LongColumnVector lcv = new LongColumnVector(4);
HiveDecimal d1 = HiveDecimal.create("19.56778");
DecimalUtil.sign(0, d1, lcv);
Assert.assertEquals(1, lcv.vector[0]);
HiveDecimal d2 = HiveDecimal.create("-25.34567");
DecimalUtil.sign(0, d2, lcv);
Assert.assertEquals(-1, lcv.vector[0]);
HiveDecimal d3 = HiveDecimal.create("0.00000");
Assert.assertEquals(0, d3.scale());
DecimalUtil.sign(0, d3, lcv);
Assert.assertEquals(0, lcv.vector[0]);
}
use of org.apache.hadoop.hive.ql.exec.vector.LongColumnVector in project hive by apache.
the class TestVectorArithmeticExpressions method getVectorizedRowBatchSingleLongVector.
private VectorizedRowBatch getVectorizedRowBatchSingleLongVector(int size) {
VectorizedRowBatch vrg = new VectorizedRowBatch(2, size);
LongColumnVector lcv = new LongColumnVector(size);
for (int i = 0; i < size; i++) {
lcv.vector[i] = i * 37;
}
vrg.cols[0] = lcv;
vrg.cols[1] = new LongColumnVector(size);
vrg.size = size;
return vrg;
}
use of org.apache.hadoop.hive.ql.exec.vector.LongColumnVector in project hive by apache.
the class FakeVectorRowBatchFromRepeats method produceNextBatch.
@Override
public VectorizedRowBatch produceNextBatch() {
vrg.size = 0;
vrg.selectedInUse = false;
if (count > 0) {
vrg.size = batchSize < count ? batchSize : count;
count -= vrg.size;
for (int i = 0; i < numCols; ++i) {
LongColumnVector col = (LongColumnVector) vrg.cols[i];
col.isRepeating = true;
Long value = values[i];
if (value == null) {
col.isNull[0] = true;
col.noNulls = false;
} else {
col.noNulls = true;
col.vector[0] = value;
}
}
}
return vrg;
}
use of org.apache.hadoop.hive.ql.exec.vector.LongColumnVector in project hive by apache.
the class VectorizedRowGroupGenUtil method generateLongColumnVector.
public static LongColumnVector generateLongColumnVector(boolean nulls, boolean repeating, int size, Random rand) {
LongColumnVector lcv = new LongColumnVector(size);
lcv.noNulls = !nulls;
lcv.isRepeating = repeating;
long repeatingValue;
do {
repeatingValue = rand.nextLong();
} while (repeatingValue == 0);
int nullFrequency = generateNullFrequency(rand);
for (int i = 0; i < size; i++) {
if (nulls && (repeating || i % nullFrequency == 0)) {
lcv.isNull[i] = true;
lcv.vector[i] = LONG_VECTOR_NULL_VALUE;
} else {
lcv.isNull[i] = false;
lcv.vector[i] = repeating ? repeatingValue : rand.nextLong();
if (lcv.vector[i] == 0) {
i--;
}
}
}
return lcv;
}
Aggregations