use of org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory in project hive by apache.
the class TestVectorGenericDateExpressions method testDate.
@Test
public void testDate() throws HiveException {
for (PrimitiveCategory colType : dateTimestampStringTypes) {
LongColumnVector date = newRandomLongColumnVector(10000, size);
LongColumnVector output = new LongColumnVector(size);
VectorizedRowBatch batch = new VectorizedRowBatch(2, size);
batch.cols[0] = castTo(date, colType);
batch.cols[1] = output;
validateDate(batch, colType, date);
TestVectorizedRowBatch.addRandomNulls(date);
batch.cols[0] = castTo(date, colType);
validateDate(batch, colType, date);
}
VectorExpression udf = new VectorUDFDateString(0, 1);
udf.setInputTypeInfos(new TypeInfo[] { TypeInfoFactory.stringTypeInfo });
udf.transientInit();
VectorizedRowBatch batch = new VectorizedRowBatch(2, 1);
batch.cols[0] = new BytesColumnVector(1);
batch.cols[1] = new LongColumnVector(1);
BytesColumnVector bcv = (BytesColumnVector) batch.cols[0];
byte[] bytes = "error".getBytes(utf8);
bcv.vector[0] = bytes;
bcv.start[0] = 0;
bcv.length[0] = bytes.length;
udf.evaluate(batch);
Assert.assertEquals(batch.cols[1].isNull[0], true);
}
use of org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory in project hive by apache.
the class TestVectorGenericDateExpressions method testDateSubColCol.
@Test
public void testDateSubColCol() throws HiveException {
for (PrimitiveCategory colType1 : dateTimestampStringTypes) testDateAddColCol(colType1, false);
VectorExpression udf = new VectorUDFDateSubColCol(0, 1, 2);
VectorizedRowBatch batch = new VectorizedRowBatch(3, 1);
BytesColumnVector bcv;
byte[] bytes = "error".getBytes(utf8);
udf.setInputTypeInfos(new TypeInfo[] { TypeInfoFactory.stringTypeInfo, TypeInfoFactory.timestampTypeInfo });
udf.transientInit();
batch.cols[0] = new BytesColumnVector(1);
batch.cols[1] = new LongColumnVector(1);
batch.cols[2] = new LongColumnVector(1);
bcv = (BytesColumnVector) batch.cols[0];
bcv.vector[0] = bytes;
bcv.start[0] = 0;
bcv.length[0] = bytes.length;
udf.evaluate(batch);
Assert.assertEquals(batch.cols[2].isNull[0], true);
}
use of org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory in project hive by apache.
the class TestVectorGenericDateExpressions method testDateDiffColCol.
@Test
public void testDateDiffColCol() throws HiveException {
for (PrimitiveCategory colType1 : dateTimestampStringTypes) {
for (PrimitiveCategory colType2 : dateTimestampStringTypes) {
LongColumnVector date1 = newRandomLongColumnVector(10000, size);
LongColumnVector date2 = newRandomLongColumnVector(10000, size);
LongColumnVector output = new LongColumnVector(size);
VectorizedRowBatch batch = new VectorizedRowBatch(3, size);
batch.cols[0] = castTo(date1, colType1);
batch.cols[1] = castTo(date2, colType2);
batch.cols[2] = output;
validateDateDiff(batch, date1, date2, colType1, colType2);
TestVectorizedRowBatch.addRandomNulls(date1);
batch.cols[0] = castTo(date1, colType1);
validateDateDiff(batch, date1, date2, colType1, colType2);
TestVectorizedRowBatch.addRandomNulls(date2);
batch.cols[1] = castTo(date2, colType2);
validateDateDiff(batch, date1, date2, colType1, colType2);
}
}
VectorExpression udf = new VectorUDFDateDiffColCol(0, 1, 2);
VectorizedRowBatch batch = new VectorizedRowBatch(3, 1);
BytesColumnVector bcv;
byte[] bytes = "error".getBytes(utf8);
udf.setInputTypeInfos(new TypeInfo[] { TypeInfoFactory.stringTypeInfo, TypeInfoFactory.timestampTypeInfo });
udf.transientInit();
batch.cols[0] = new BytesColumnVector(1);
batch.cols[1] = new TimestampColumnVector(1);
batch.cols[2] = new LongColumnVector(1);
bcv = (BytesColumnVector) batch.cols[0];
bcv.vector[0] = bytes;
bcv.start[0] = 0;
bcv.length[0] = bytes.length;
udf.evaluate(batch);
Assert.assertEquals(batch.cols[2].isNull[0], true);
udf.setInputTypeInfos(new TypeInfo[] { TypeInfoFactory.timestampTypeInfo, TypeInfoFactory.stringTypeInfo });
udf.transientInit();
batch.cols[0] = new TimestampColumnVector(1);
batch.cols[1] = new BytesColumnVector(1);
batch.cols[2] = new LongColumnVector(1);
bcv = (BytesColumnVector) batch.cols[1];
bcv.vector[0] = bytes;
bcv.start[0] = 0;
bcv.length[0] = bytes.length;
udf.evaluate(batch);
Assert.assertEquals(batch.cols[2].isNull[0], true);
}
use of org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory in project hive by apache.
the class TestVectorGenericDateExpressions method testDateAddColCol.
@Test
public void testDateAddColCol() throws HiveException {
for (PrimitiveCategory colType1 : dateTimestampStringTypes) testDateAddColCol(colType1, true);
VectorExpression udf = new VectorUDFDateAddColCol(0, 1, 2);
VectorizedRowBatch batch = new VectorizedRowBatch(3, 1);
BytesColumnVector bcv;
byte[] bytes = "error".getBytes(utf8);
udf.setInputTypeInfos(new TypeInfo[] { TypeInfoFactory.stringTypeInfo, TypeInfoFactory.timestampTypeInfo });
udf.transientInit();
batch.cols[0] = new BytesColumnVector(1);
batch.cols[1] = new LongColumnVector(1);
batch.cols[2] = new LongColumnVector(1);
bcv = (BytesColumnVector) batch.cols[0];
bcv.vector[0] = bytes;
bcv.start[0] = 0;
bcv.length[0] = bytes.length;
udf.evaluate(batch);
Assert.assertEquals(batch.cols[2].isNull[0], true);
}
use of org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory in project hive by apache.
the class GenericUDF method obtainDateConverter.
protected void obtainDateConverter(ObjectInspector[] arguments, int i, PrimitiveCategory[] inputTypes, Converter[] converters) throws UDFArgumentTypeException {
PrimitiveObjectInspector inOi = (PrimitiveObjectInspector) arguments[i];
PrimitiveCategory inputType = inOi.getPrimitiveCategory();
ObjectInspector outOi;
switch(inputType) {
case STRING:
case VARCHAR:
case CHAR:
outOi = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
break;
case TIMESTAMP:
case DATE:
case VOID:
case TIMESTAMPLOCALTZ:
outOi = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
break;
default:
throw new UDFArgumentTypeException(i, getFuncName() + " only takes STRING_GROUP or DATE_GROUP types as " + getArgOrder(i) + " argument, got " + inputType);
}
converters[i] = ObjectInspectorConverters.getConverter(inOi, outOi);
inputTypes[i] = inputType;
}
Aggregations