use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class TestGenericUDFDateFormat method testWrongDateStr.
public void testWrongDateStr() throws HiveException {
boolean caught = false;
try {
GenericUDFDateFormat udf = new GenericUDFDateFormat();
ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
Text fmtText = new Text("EEEE");
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.stringTypeInfo, fmtText);
ObjectInspector[] arguments = { valueOI0, valueOI1 };
udf.initialize(arguments);
runAndVerifyStr("2016-02-30 10:30:45", fmtText, "Tuesday", udf);
runAndVerifyStr("2014-01-32", fmtText, "Saturday", udf);
runAndVerifyStr("01/14/2014", fmtText, null, udf);
runAndVerifyStr(null, fmtText, null, udf);
} catch (HiveException e) {
caught = true;
}
assertTrue(caught);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class TestGenericUDFDateFormat method testDateFormatStr.
public void testDateFormatStr() throws HiveException {
GenericUDFDateFormat udf = new GenericUDFDateFormat();
ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
Text fmtText = new Text("EEEE");
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.stringTypeInfo, fmtText);
ObjectInspector[] arguments = { valueOI0, valueOI1 };
udf.initialize(arguments);
// date str
runAndVerifyStr("2015-04-05", fmtText, "Sunday", udf);
runAndVerifyStr("2015-04-06", fmtText, "Monday", udf);
runAndVerifyStr("2015-04-07", fmtText, "Tuesday", udf);
runAndVerifyStr("2015-04-08", fmtText, "Wednesday", udf);
runAndVerifyStr("2015-04-09", fmtText, "Thursday", udf);
runAndVerifyStr("2015-04-10", fmtText, "Friday", udf);
runAndVerifyStr("2015-04-11", fmtText, "Saturday", udf);
runAndVerifyStr("2015-04-12", fmtText, "Sunday", udf);
// ts str
runAndVerifyStr("2015-04-05 10:30:45", fmtText, "Sunday", udf);
runAndVerifyStr("2015-04-06 10:30:45", fmtText, "Monday", udf);
runAndVerifyStr("2015-04-07 10:30:45", fmtText, "Tuesday", udf);
runAndVerifyStr("2015-04-08 10:30:45", fmtText, "Wednesday", udf);
runAndVerifyStr("2015-04-09 10:30", fmtText, "Thursday", udf);
runAndVerifyStr("2015-04-10 10:30:45.123", fmtText, "Friday", udf);
runAndVerifyStr("2015-04-11T10:30:45", fmtText, "Saturday", udf);
runAndVerifyStr("2015-04-12 10", fmtText, "Sunday", udf);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class TestMapJoinOperator method doTestMultiKey.
public void doTestMultiKey(long seed, VectorMapJoinVariation vectorMapJoinVariation) throws Exception {
int rowCount = 10000;
HiveConf hiveConf = new HiveConf();
String[] bigTableColumnNames = new String[] { "b1", "b2", "b3" };
TypeInfo[] bigTableTypeInfos = new TypeInfo[] { TypeInfoFactory.intTypeInfo, TypeInfoFactory.longTypeInfo, TypeInfoFactory.stringTypeInfo };
int[] bigTableKeyColumnNums = new int[] { 0, 1, 2 };
String[] smallTableValueColumnNames = new String[] { "sv1" };
TypeInfo[] smallTableValueTypeInfos = new TypeInfo[] { TypeInfoFactory.stringTypeInfo };
int[] bigTableRetainColumnNums = new int[] { 0, 1, 2 };
int[] smallTableRetainKeyColumnNums = new int[] {};
int[] smallTableRetainValueColumnNums = new int[] { 0 };
SmallTableGenerationParameters smallTableGenerationParameters = new SmallTableGenerationParameters();
// ----------------------------------------------------------------------------------------------
MapJoinTestDescription testDesc = new MapJoinTestDescription(hiveConf, vectorMapJoinVariation, bigTableColumnNames, bigTableTypeInfos, bigTableKeyColumnNums, smallTableValueColumnNames, smallTableValueTypeInfos, bigTableRetainColumnNums, smallTableRetainKeyColumnNums, smallTableRetainValueColumnNums, smallTableGenerationParameters);
// Prepare data. Good for ANY implementation variation.
MapJoinTestData testData = new MapJoinTestData(rowCount, testDesc, seed, seed * 10);
executeTest(testDesc, testData);
}
Aggregations