use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class TestGenericUDFRegexp method testConstant.
@Test
public void testConstant() throws HiveException {
GenericUDFRegExp udf = new GenericUDFRegExp();
ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
Text regexText = new Text("^fo");
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.stringTypeInfo, regexText);
ObjectInspector[] arguments = { valueOI0, valueOI1 };
udf.initialize(arguments);
runAndVerifyConst("fofo", regexText, true, udf);
runAndVerifyConst("fofofo", regexText, true, udf);
runAndVerifyConst("fobar", regexText, true, udf);
runAndVerifyConst("barfobar", regexText, false, udf);
// null
runAndVerifyConst(null, regexText, null, udf);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class TestVectorUDFAdaptor method testMultiArgumentUDF.
@Test
public void testMultiArgumentUDF() throws HiveException {
// create a syntax tree for a function call "testudf(col0, col1, col2)"
ExprNodeGenericFuncDesc funcDesc;
TypeInfo typeInfoStr = TypeInfoFactory.stringTypeInfo;
TypeInfo typeInfoLong = TypeInfoFactory.longTypeInfo;
TypeInfo typeInfoDbl = TypeInfoFactory.doubleTypeInfo;
GenericUDFBridge genericUDFBridge = new GenericUDFBridge("testudf", false, ConcatTextLongDoubleUDF.class.getName());
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
children.add(new ExprNodeColumnDesc(typeInfoStr, "col0", "tablename", false));
children.add(new ExprNodeColumnDesc(typeInfoLong, "col1", "tablename", false));
children.add(new ExprNodeColumnDesc(typeInfoDbl, "col2", "tablename", false));
VectorUDFArgDesc[] argDescs = new VectorUDFArgDesc[3];
for (int i = 0; i < 3; i++) {
argDescs[i] = new VectorUDFArgDesc();
argDescs[i].setVariable(i);
}
funcDesc = new ExprNodeGenericFuncDesc(typeInfoStr, genericUDFBridge, genericUDFBridge.getUdfName(), children);
// create the adaptor for this function call to work in vector mode
VectorUDFAdaptor vudf = null;
try {
vudf = new VectorUDFAdaptor(funcDesc, 3, "String", argDescs);
} catch (HiveException e) {
// We should never get here.
assertTrue(false);
throw new RuntimeException(e);
}
// with no nulls
VectorizedRowBatch b = getBatchStrDblLongWithStrOut();
vudf.evaluate(b);
byte[] result = null;
byte[] result2 = null;
try {
result = "red:1:1.0".getBytes("UTF-8");
result2 = "blue:0:0.0".getBytes("UTF-8");
} catch (Exception e) {
;
}
BytesColumnVector out = (BytesColumnVector) b.cols[3];
int cmp = StringExpr.compare(result, 0, result.length, out.vector[1], out.start[1], out.length[1]);
assertEquals(0, cmp);
assertTrue(out.noNulls);
// with nulls
b = getBatchStrDblLongWithStrOut();
b.cols[1].noNulls = false;
vudf.evaluate(b);
out = (BytesColumnVector) b.cols[3];
assertFalse(out.noNulls);
assertTrue(out.isNull[1]);
// with all input columns repeating
b = getBatchStrDblLongWithStrOut();
b.cols[0].isRepeating = true;
b.cols[1].isRepeating = true;
b.cols[2].isRepeating = true;
vudf.evaluate(b);
out = (BytesColumnVector) b.cols[3];
assertTrue(out.isRepeating);
cmp = StringExpr.compare(result2, 0, result2.length, out.vector[0], out.start[0], out.length[0]);
assertEquals(0, cmp);
assertTrue(out.noNulls);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class TestGenericUDFDateFormat method testDateFormatDate.
@Test
public void testDateFormatDate() throws HiveException {
GenericUDFDateFormat udf = new GenericUDFDateFormat();
ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
Text fmtText = new Text("EEEE");
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.stringTypeInfo, fmtText);
ObjectInspector[] arguments = { valueOI0, valueOI1 };
udf.initialize(arguments);
runAndVerifyDate("2015-04-05", fmtText, "Sunday", udf);
runAndVerifyDate("2015-04-06", fmtText, "Monday", udf);
runAndVerifyDate("2015-04-07", fmtText, "Tuesday", udf);
runAndVerifyDate("2015-04-08", fmtText, "Wednesday", udf);
runAndVerifyDate("2015-04-09", fmtText, "Thursday", udf);
runAndVerifyDate("2015-04-10", fmtText, "Friday", udf);
runAndVerifyDate("2015-04-11", fmtText, "Saturday", udf);
runAndVerifyDate("2015-04-12", fmtText, "Sunday", udf);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class TestGenericUDFAddMonths method testAddMonthsTimestamp.
@Test
public void testAddMonthsTimestamp() throws HiveException {
GenericUDFAddMonths udf = new GenericUDFAddMonths();
ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
ObjectInspector valueOI2 = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.stringTypeInfo, fmtTextWithTime);
ObjectInspector[] arguments = { valueOI0, valueOI1, valueOI2 };
udf.initialize(arguments);
runAndVerify(Timestamp.valueOf("2018-05-10 08:15:12"), 1, fmtTextWithTime, "2018-06-10 08:15:12", udf);
runAndVerify(Timestamp.valueOf("2017-12-31 14:15:16"), 2, fmtTextWithTime, "2018-02-28 14:15:16", udf);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class TestGenericUDFAddMonths method testAddMonthsInvalidFormatter.
@Test
public void testAddMonthsInvalidFormatter() throws HiveException {
GenericUDFAddMonths udf = new GenericUDFAddMonths();
ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
ObjectInspector valueOI2 = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.stringTypeInfo, fmtTextInvalid);
ObjectInspector[] arguments = { valueOI0, valueOI1, valueOI2 };
try {
udf.initialize(arguments);
fail("Expected to throw an exception for invalid DateFormat");
} catch (IllegalArgumentException e) {
// test success if exception caught
}
}
Aggregations