use of org.apache.hadoop.hive.ql.exec.vector.LongColumnVector in project hive by apache.
the class TestVectorGenericDateExpressions method validateDateDiff.
private void validateDateDiff(VectorizedRowBatch batch, LongColumnVector date1, LongColumnVector date2, PrimitiveCategory colType1, PrimitiveCategory colType2) throws HiveException {
VectorExpression udf = new VectorUDFDateDiffColCol(0, 1, 2);
udf.setInputTypeInfos(new TypeInfo[] { primitiveCategoryToTypeInfo(colType1), primitiveCategoryToTypeInfo(colType2) });
udf.transientInit();
udf.evaluate(batch);
LongColumnVector output = (LongColumnVector) batch.cols[2];
for (int i = 0; i < date1.vector.length; i++) {
if (date1.isNull[i] || date2.isNull[i]) {
Assert.assertTrue(output.isNull[i]);
} else {
Assert.assertEquals(date1.vector[i] - date2.vector[i], output.vector[i]);
}
}
}
use of org.apache.hadoop.hive.ql.exec.vector.LongColumnVector in project hive by apache.
the class TestVectorGenericDateExpressions method testDateAddColScalar.
private void testDateAddColScalar(PrimitiveCategory colType1, boolean isPositive) throws HiveException {
LongColumnVector date1 = newRandomLongColumnVector(10000, size);
ColumnVector col1 = castTo(date1, colType1);
long scalar2 = newRandom(1000);
LongColumnVector output = new LongColumnVector(size);
VectorizedRowBatch batch = new VectorizedRowBatch(2, size);
batch.cols[0] = col1;
batch.cols[1] = output;
validateDateAdd(batch, colType1, scalar2, isPositive, date1);
TestVectorizedRowBatch.addRandomNulls(batch.cols[0]);
validateDateAdd(batch, colType1, scalar2, isPositive, date1);
}
use of org.apache.hadoop.hive.ql.exec.vector.LongColumnVector in project hive by apache.
the class TestVectorGenericDateExpressions method testToDate.
@Test
public void testToDate() throws HiveException {
for (PrimitiveCategory type : Arrays.asList(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.STRING)) {
LongColumnVector date = newRandomLongColumnVector(10000, size);
LongColumnVector output = new LongColumnVector(size);
VectorizedRowBatch batch = new VectorizedRowBatch(2, size);
batch.cols[0] = castTo(date, type);
batch.cols[1] = output;
validateToDate(batch, type, date);
TestVectorizedRowBatch.addRandomNulls(date);
batch.cols[0] = castTo(date, type);
validateToDate(batch, type, date);
}
VectorExpression udf = new CastStringToDate(0, 1);
udf.setInputTypeInfos(new TypeInfo[] { TypeInfoFactory.stringTypeInfo });
udf.transientInit();
VectorizedRowBatch batch = new VectorizedRowBatch(2, 1);
batch.cols[0] = new BytesColumnVector(1);
batch.cols[1] = new LongColumnVector(1);
BytesColumnVector bcv = (BytesColumnVector) batch.cols[0];
byte[] bytes = "error".getBytes(utf8);
bcv.vector[0] = bytes;
bcv.start[0] = 0;
bcv.length[0] = bytes.length;
udf.evaluate(batch);
Assert.assertEquals(batch.cols[1].isNull[0], true);
}
use of org.apache.hadoop.hive.ql.exec.vector.LongColumnVector in project hive by apache.
the class TestVectorGenericDateExpressions method testDateAddScalarCol.
@Test
public void testDateAddScalarCol() throws HiveException {
for (PrimitiveCategory scalarType1 : dateTimestampStringTypes) testDateAddScalarCol(scalarType1, true);
VectorExpression udf = new VectorUDFDateAddScalarCol("error".getBytes(utf8), 0, 1);
udf.setInputTypeInfos(new TypeInfo[] { TypeInfoFactory.stringTypeInfo, TypeInfoFactory.timestampTypeInfo });
udf.transientInit();
VectorizedRowBatch batch = new VectorizedRowBatch(2, 1);
batch.cols[0] = new LongColumnVector(1);
batch.cols[1] = new LongColumnVector(1);
udf.evaluate(batch);
Assert.assertEquals(batch.cols[1].isNull[0], true);
}
use of org.apache.hadoop.hive.ql.exec.vector.LongColumnVector in project hive by apache.
the class TestVectorGenericDateExpressions method testDateDiffScalarCol.
@Test
public void testDateDiffScalarCol() throws HiveException {
for (PrimitiveCategory scalarType1 : dateTimestampStringTypes) {
for (PrimitiveCategory colType2 : dateTimestampStringTypes) {
LongColumnVector date2 = newRandomLongColumnVector(10000, size);
LongColumnVector output = new LongColumnVector(size);
ColumnVector col2 = castTo(date2, colType2);
VectorizedRowBatch batch = new VectorizedRowBatch(2, size);
batch.cols[0] = col2;
batch.cols[1] = output;
long scalar1 = newRandom(1000);
validateDateDiff(batch, scalar1, scalarType1, colType2, date2);
TestVectorizedRowBatch.addRandomNulls(date2);
batch.cols[0] = castTo(date2, colType2);
validateDateDiff(batch, scalar1, scalarType1, colType2, date2);
}
}
VectorExpression udf;
byte[] bytes = "error".getBytes(utf8);
VectorizedRowBatch batch = new VectorizedRowBatch(2, 1);
udf = new VectorUDFDateDiffScalarCol(new Timestamp(0), 0, 1);
udf.setInputTypeInfos(new TypeInfo[] { TypeInfoFactory.timestampTypeInfo, TypeInfoFactory.stringTypeInfo });
udf.transientInit();
batch.cols[0] = new BytesColumnVector(1);
batch.cols[1] = new LongColumnVector(1);
BytesColumnVector bcv = (BytesColumnVector) batch.cols[0];
bcv.vector[0] = bytes;
bcv.start[0] = 0;
bcv.length[0] = bytes.length;
udf.evaluate(batch);
Assert.assertEquals(batch.cols[1].isNull[0], true);
udf = new VectorUDFDateDiffScalarCol(bytes, 0, 1);
udf.setInputTypeInfos(new TypeInfo[] { TypeInfoFactory.stringTypeInfo, TypeInfoFactory.timestampTypeInfo });
udf.transientInit();
batch.cols[0] = new LongColumnVector(1);
batch.cols[1] = new LongColumnVector(1);
udf.evaluate(batch);
Assert.assertEquals(batch.cols[1].isNull[0], true);
}
Aggregations