use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableStringObjectInspector in project hive by apache.
the class TestGenericUDFSortArray method testSortList.
@Test
public void testSortList() throws HiveException {
ObjectInspector[] inputOIs = { ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableStringObjectInspector)) };
udf.initialize(inputOIs);
Object i1 = asList(new Text("aa"), new Text("dd"), new Text("cc"), new Text("bb"));
Object i2 = asList(new Text("aa"), new Text("cc"), new Text("ba"), new Text("dd"));
Object i3 = asList(new Text("aa"), new Text("cc"), new Text("dd"), new Text("ee"), new Text("bb"));
Object i4 = asList(new Text("aa"), new Text("cc"), new Text("ddd"), new Text("bb"));
runAndVerify(asList(i1, i2, i3, i4), asList(i2, i3, i4, i1));
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableStringObjectInspector in project hive by apache.
the class TestGenericUDFSortArray method testSortMap.
@Test
public void testSortMap() throws HiveException {
ObjectInspector[] inputOIs = { ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorFactory.getStandardMapObjectInspector(PrimitiveObjectInspectorFactory.writableStringObjectInspector, PrimitiveObjectInspectorFactory.writableIntObjectInspector)) };
udf.initialize(inputOIs);
Map<Text, IntWritable> m1 = new HashMap<Text, IntWritable>();
m1.put(new Text("a"), new IntWritable(4));
m1.put(new Text("b"), new IntWritable(3));
m1.put(new Text("c"), new IntWritable(1));
m1.put(new Text("d"), new IntWritable(2));
Map<Text, IntWritable> m2 = new HashMap<Text, IntWritable>();
m2.put(new Text("d"), new IntWritable(4));
m2.put(new Text("b"), new IntWritable(3));
m2.put(new Text("a"), new IntWritable(1));
m2.put(new Text("c"), new IntWritable(2));
Map<Text, IntWritable> m3 = new HashMap<Text, IntWritable>();
m3.put(new Text("d"), new IntWritable(4));
m3.put(new Text("b"), new IntWritable(3));
m3.put(new Text("a"), new IntWritable(1));
runAndVerify(asList((Object) m1, m2, m3), asList((Object) m3, m2, m1));
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableStringObjectInspector in project hive by apache.
the class TestGenericUDFSortArray method testSortStruct.
@Test
public void testSortStruct() throws HiveException {
ObjectInspector[] inputOIs = { ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorFactory.getStandardStructObjectInspector(asList("f1", "f2", "f3", "f4"), asList(PrimitiveObjectInspectorFactory.writableStringObjectInspector, PrimitiveObjectInspectorFactory.writableDoubleObjectInspector, PrimitiveObjectInspectorFactory.writableDateObjectInspector, ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableIntObjectInspector)))) };
udf.initialize(inputOIs);
Object i1 = asList(new Text("a"), new DoubleWritable(3.1415), new DateWritable(new Date(2015, 5, 26)), asList(new IntWritable(1), new IntWritable(3), new IntWritable(2), new IntWritable(4)));
Object i2 = asList(new Text("b"), new DoubleWritable(3.14), new DateWritable(new Date(2015, 5, 26)), asList(new IntWritable(1), new IntWritable(3), new IntWritable(2), new IntWritable(4)));
Object i3 = asList(new Text("a"), new DoubleWritable(3.1415), new DateWritable(new Date(2015, 5, 25)), asList(new IntWritable(1), new IntWritable(3), new IntWritable(2), new IntWritable(5)));
Object i4 = asList(new Text("a"), new DoubleWritable(3.1415), new DateWritable(new Date(2015, 5, 25)), asList(new IntWritable(1), new IntWritable(3), new IntWritable(2), new IntWritable(4)));
runAndVerify(asList(i1, i2, i3, i4), asList(i4, i3, i1, i2));
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableStringObjectInspector in project hive by apache.
the class TestGenericUDFSortArrayByField method testSortPrimitiveTupleTwoFieldOrderDESC.
@Test
public void testSortPrimitiveTupleTwoFieldOrderDESC() throws HiveException {
List<ObjectInspector> tuple = new ArrayList<ObjectInspector>();
tuple.add(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
tuple.add(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
ObjectInspector[] inputOIs = { ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorFactory.getStandardStructObjectInspector(asList("Company", "Department"), tuple)), PrimitiveObjectInspectorFactory.writableStringObjectInspector, PrimitiveObjectInspectorFactory.writableHiveVarcharObjectInspector, PrimitiveObjectInspectorFactory.writableStringObjectInspector };
udf.initialize(inputOIs);
Object i1 = asList(new Text("Linkedin"), new Text("HR"));
Object i2 = asList(new Text("Linkedin"), new Text("IT"));
Object i3 = asList(new Text("Linkedin"), new Text("Finance"));
Object i4 = asList(new Text("Facebook"), new Text("IT"));
Object i5 = asList(new Text("Facebook"), new Text("Finance"));
Object i6 = asList(new Text("Facebook"), new Text("HR"));
Object i7 = asList(new Text("Google"), new Text("Logistics"));
Object i8 = asList(new Text("Google"), new Text("Finance"));
Object i9 = asList(new Text("Google"), new Text("HR"));
HiveVarchar vc = new HiveVarchar();
vc.setValue("Department");
GenericUDF.DeferredJavaObject[] argas = { new GenericUDF.DeferredJavaObject(asList(i1, i2, i3, i4, i5, i6, i7, i8, i9)), new GenericUDF.DeferredJavaObject(new Text("Company")), new GenericUDF.DeferredJavaObject(new HiveVarcharWritable(vc)), new GenericUDF.DeferredJavaObject(new Text("DESC")) };
runAndVerify(argas, asList(i2, i1, i3, i7, i9, i8, i4, i6, i5));
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableStringObjectInspector in project hive by apache.
the class TestGenericUDFQuarter method testQuarterStr.
public void testQuarterStr() throws HiveException {
GenericUDFQuarter udf = new GenericUDFQuarter();
ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
ObjectInspector[] arguments = { valueOI0 };
udf.initialize(arguments);
// date str
runAndVerifyStr("2014-01-10", 1, udf);
runAndVerifyStr("2014-02-10", 1, udf);
runAndVerifyStr("2014-03-31", 1, udf);
runAndVerifyStr("2014-04-02", 2, udf);
runAndVerifyStr("2014-05-28", 2, udf);
runAndVerifyStr("2016-06-03", 2, udf);
runAndVerifyStr("2016-07-28", 3, udf);
runAndVerifyStr("2016-08-29", 3, udf);
runAndVerifyStr("2016-09-29", 3, udf);
runAndVerifyStr("2016-10-29", 4, udf);
runAndVerifyStr("2016-11-29", 4, udf);
runAndVerifyStr("2016-12-29", 4, udf);
// wrong date str
runAndVerifyStr("2016-03-35", 2, udf);
runAndVerifyStr("2014-01-32", 1, udf);
runAndVerifyStr("01/14/2014", null, udf);
runAndVerifyStr(null, null, udf);
// negative Unix time
runAndVerifyStr("1966-01-01", 1, udf);
runAndVerifyStr("1966-03-31", 1, udf);
runAndVerifyStr("1966-04-01", 2, udf);
runAndVerifyStr("1966-12-31", 4, udf);
// ts str
runAndVerifyStr("2014-01-01 00:00:00", 1, udf);
runAndVerifyStr("2014-02-10 15:23:00", 1, udf);
runAndVerifyStr("2014-03-31 15:23:00", 1, udf);
runAndVerifyStr("2014-04-02 15:23:00", 2, udf);
runAndVerifyStr("2014-05-28 15:23:00", 2, udf);
runAndVerifyStr("2016-06-03 15:23:00", 2, udf);
runAndVerifyStr("2016-07-28 15:23:00", 3, udf);
runAndVerifyStr("2016-08-29 15:23:00", 3, udf);
runAndVerifyStr("2016-09-29 15:23:00", 3, udf);
runAndVerifyStr("2016-10-29 15:23:00", 4, udf);
runAndVerifyStr("2016-11-29 15:23:00", 4, udf);
runAndVerifyStr("2016-12-31 23:59:59.999", 4, udf);
// wrong date str
runAndVerifyStr("2016-03-35 15:23:00", 2, udf);
runAndVerifyStr("2014-01-32 15:23:00", 1, udf);
runAndVerifyStr("01/14/2014 15:23:00", null, udf);
runAndVerifyStr(null, null, udf);
// negative Unix time
runAndVerifyStr("1966-01-01 00:00:00", 1, udf);
runAndVerifyStr("1966-03-31 23:59:59.999", 1, udf);
runAndVerifyStr("1966-04-01 00:00:00", 2, udf);
runAndVerifyStr("1966-12-31 23:59:59.999", 4, udf);
}
Aggregations