use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class TestVectorUDFAdaptor method testGenericUDF.
// test the UDF adaptor for a generic UDF (as opposed to a legacy UDF)
@Test
public void testGenericUDF() {
// create a syntax tree for a function call 'myisnull(col0, "UNKNOWN")'
ExprNodeGenericFuncDesc funcDesc;
GenericUDF genericUDF = new GenericUDFIsNull();
TypeInfo typeInfoStr = TypeInfoFactory.stringTypeInfo;
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
children.add(new ExprNodeColumnDesc(typeInfoStr, "col0", "tablename", false));
children.add(new ExprNodeConstantDesc(typeInfoStr, "UNKNOWN"));
VectorUDFArgDesc[] argDescs = new VectorUDFArgDesc[2];
for (int i = 0; i < 2; i++) {
argDescs[i] = new VectorUDFArgDesc();
}
argDescs[0].setVariable(0);
argDescs[1].setConstant((ExprNodeConstantDesc) children.get(1));
funcDesc = new ExprNodeGenericFuncDesc(typeInfoStr, genericUDF, "myisnull", children);
// create the adaptor for this function call to work in vector mode
VectorUDFAdaptor vudf = null;
try {
vudf = new VectorUDFAdaptor(funcDesc, 3, "String", argDescs);
} catch (HiveException e) {
// We should never get here.
assertTrue(false);
}
VectorizedRowBatch b;
byte[] red = null;
byte[] unknown = null;
try {
red = "red".getBytes("UTF-8");
unknown = "UNKNOWN".getBytes("UTF-8");
} catch (Exception e) {
;
}
BytesColumnVector out;
// with nulls
b = getBatchStrDblLongWithStrOut();
b.cols[0].noNulls = false;
// set 1st entry to null
b.cols[0].isNull[0] = true;
vudf.evaluate(b);
out = (BytesColumnVector) b.cols[3];
// verify outputs
int cmp = StringExpr.compare(red, 0, red.length, out.vector[1], out.start[1], out.length[1]);
assertEquals(0, cmp);
cmp = StringExpr.compare(unknown, 0, unknown.length, out.vector[0], out.start[0], out.length[0]);
assertEquals(0, cmp);
// output entry should not be null for null input for this particular generic UDF
assertTrue(out.noNulls || !out.isNull[0]);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class TestGenericUDFDateFormat method testDateFormatStr.
public void testDateFormatStr() throws HiveException {
GenericUDFDateFormat udf = new GenericUDFDateFormat();
ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
Text fmtText = new Text("EEEE");
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.stringTypeInfo, fmtText);
ObjectInspector[] arguments = { valueOI0, valueOI1 };
udf.initialize(arguments);
// date str
runAndVerifyStr("2015-04-05", fmtText, "Sunday", udf);
runAndVerifyStr("2015-04-06", fmtText, "Monday", udf);
runAndVerifyStr("2015-04-07", fmtText, "Tuesday", udf);
runAndVerifyStr("2015-04-08", fmtText, "Wednesday", udf);
runAndVerifyStr("2015-04-09", fmtText, "Thursday", udf);
runAndVerifyStr("2015-04-10", fmtText, "Friday", udf);
runAndVerifyStr("2015-04-11", fmtText, "Saturday", udf);
runAndVerifyStr("2015-04-12", fmtText, "Sunday", udf);
// wrong date str
runAndVerifyStr("2016-02-30", fmtText, "Tuesday", udf);
runAndVerifyStr("2014-01-32", fmtText, "Saturday", udf);
runAndVerifyStr("01/14/2014", fmtText, null, udf);
runAndVerifyStr(null, fmtText, null, udf);
// ts str
runAndVerifyStr("2015-04-05 10:30:45", fmtText, "Sunday", udf);
runAndVerifyStr("2015-04-06 10:30:45", fmtText, "Monday", udf);
runAndVerifyStr("2015-04-07 10:30:45", fmtText, "Tuesday", udf);
runAndVerifyStr("2015-04-08 10:30:45", fmtText, "Wednesday", udf);
runAndVerifyStr("2015-04-09 10:30", fmtText, "Thursday", udf);
runAndVerifyStr("2015-04-10 10:30:45.123", fmtText, "Friday", udf);
runAndVerifyStr("2015-04-11T10:30:45", fmtText, "Saturday", udf);
runAndVerifyStr("2015-04-12 10", fmtText, "Sunday", udf);
// wrong ts str
runAndVerifyStr("2016-02-30 10:30:45", fmtText, "Tuesday", udf);
runAndVerifyStr("2014-01-32 10:30:45", fmtText, "Saturday", udf);
runAndVerifyStr("01/14/2014 10:30:45", fmtText, null, udf);
runAndVerifyStr("2016-02-28T10:30:45", fmtText, "Sunday", udf);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project SQLWindowing by hbutani.
the class WindowingKeyWrapperFactory method getWindowingKeyWrapper.
public WindowingKeyWrapper getWindowingKeyWrapper() {
if (keyFields.length == 1 && TypeInfoUtils.getTypeInfoFromObjectInspector(keyObjectInspectors[0]).equals(TypeInfoFactory.stringTypeInfo)) {
assert (TypeInfoUtils.getTypeInfoFromObjectInspector(currentKeyObjectInspectors[0]).equals(TypeInfoFactory.stringTypeInfo));
soi_new = (StringObjectInspector) keyObjectInspectors[0];
soi_copy = (StringObjectInspector) currentKeyObjectInspectors[0];
return new TextWindowingKeyWrapper(false);
} else {
currentStructEqualComparer = new ListObjectsEqualComparer(currentKeyObjectInspectors, currentKeyObjectInspectors);
newKeyStructEqualComparer = new ListObjectsEqualComparer(currentKeyObjectInspectors, keyObjectInspectors);
return new ListWindowingKeyWrapper(false);
}
}
Aggregations