use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector in project hive by apache.
the class TestGenericUDFSortArrayByField method testSortTupleArrayStructOrderDESC.
@Test
public void testSortTupleArrayStructOrderDESC() throws HiveException {
List<ObjectInspector> tuple = new ArrayList<ObjectInspector>();
tuple.add(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
tuple.add(ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableIntObjectInspector));
ObjectInspector field = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.stringTypeInfo, new Text("Scores"));
ObjectInspector orderField = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.stringTypeInfo, new Text("desc"));
ObjectInspector[] inputOIs = { ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorFactory.getStandardStructObjectInspector(asList("Student", "Scores"), tuple)), field, orderField };
udf.initialize(inputOIs);
Object i1 = asList(new Text("Foo"), asList(new IntWritable(4), new IntWritable(3), new IntWritable(2), new IntWritable(1)));
Object i2 = asList(new Text("Boo"), asList(new IntWritable(2), new IntWritable(3), new IntWritable(2), new IntWritable(1)));
Object i3 = asList(new Text("Tom"), asList(new IntWritable(10), new IntWritable(3), new IntWritable(2), new IntWritable(1)));
GenericUDF.DeferredJavaObject[] argas = { new GenericUDF.DeferredJavaObject(asList(i1, i2, i3)), new GenericUDF.DeferredJavaObject(field), new GenericUDF.DeferredJavaObject(orderField) };
runAndVerify(argas, asList(i3, i1, i2));
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector in project hive by apache.
the class TestGenericUDFSortArrayByField method testSortPrimitiveTupleOneFieldOrderASC.
@Test
public void testSortPrimitiveTupleOneFieldOrderASC() throws HiveException {
List<ObjectInspector> tuple = new ArrayList<ObjectInspector>();
tuple.add(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
tuple.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
ObjectInspector[] inputOIs = { ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorFactory.getStandardStructObjectInspector(asList("Company", "Salary"), tuple)), PrimitiveObjectInspectorFactory.writableHiveVarcharObjectInspector, PrimitiveObjectInspectorFactory.writableHiveVarcharObjectInspector };
udf.initialize(inputOIs);
Object i1 = asList(new Text("Facebook"), new DoubleWritable(80223.25));
Object i2 = asList(new Text("Facebook"), new DoubleWritable(50223.25));
Object i3 = asList(new Text("Facebook"), new DoubleWritable(40223.25));
Object i4 = asList(new Text("Facebook"), new DoubleWritable(60223.25));
HiveVarchar vc = new HiveVarchar();
vc.setValue("Salary");
HiveVarchar order = new HiveVarchar();
order.setValue("ASC");
GenericUDF.DeferredJavaObject[] argas = { new GenericUDF.DeferredJavaObject(asList(i1, i2, i3, i4)), new GenericUDF.DeferredJavaObject(new HiveVarcharWritable(vc)), new GenericUDF.DeferredJavaObject(new HiveVarcharWritable(order)) };
runAndVerify(argas, asList(i3, i2, i4, i1));
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector in project hive by apache.
the class TestGenericUDFSortArrayByField method testSortPrimitiveTupleOneField.
@Test
public void testSortPrimitiveTupleOneField() throws HiveException {
List<ObjectInspector> tuple = new ArrayList<ObjectInspector>();
tuple.add(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
tuple.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
ObjectInspector[] inputOIs = { ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorFactory.getStandardStructObjectInspector(asList("Company", "Salary"), tuple)), PrimitiveObjectInspectorFactory.writableHiveVarcharObjectInspector };
udf.initialize(inputOIs);
Object i1 = asList(new Text("Facebook"), new DoubleWritable(80223.25));
Object i2 = asList(new Text("Facebook"), new DoubleWritable(50223.25));
Object i3 = asList(new Text("Facebook"), new DoubleWritable(40223.25));
Object i4 = asList(new Text("Facebook"), new DoubleWritable(60223.25));
HiveVarchar vc = new HiveVarchar();
vc.setValue("Salary");
GenericUDF.DeferredJavaObject[] argas = { new GenericUDF.DeferredJavaObject(asList(i1, i2, i3, i4)), new GenericUDF.DeferredJavaObject(new HiveVarcharWritable(vc)) };
runAndVerify(argas, asList(i3, i2, i4, i1));
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector in project SQLWindowing by hbutani.
the class NPathUtils method createSelectListInputOI.
/*
* add array<struct> to the list of columns
*/
public static ObjectInspector createSelectListInputOI(StructObjectInspector inputOI) {
List<? extends StructField> fields = inputOI.getAllStructFieldRefs();
ArrayList<ObjectInspector> selectListFieldOIs = new ArrayList<ObjectInspector>();
ArrayList<String> selectListFieldNames = new ArrayList<String>();
StandardListObjectInspector pathAttrOI = ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorUtils.getStandardObjectInspector(inputOI));
for (StructField f : fields) {
selectListFieldOIs.add(ObjectInspectorUtils.getStandardObjectInspector(f.getFieldObjectInspector()));
selectListFieldNames.add(f.getFieldName());
}
selectListFieldOIs.add(pathAttrOI);
selectListFieldNames.add(PATHATTR_NAME);
return ObjectInspectorFactory.getStandardStructObjectInspector(selectListFieldNames, selectListFieldOIs);
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector in project SQLWindowing by hbutani.
the class ResultExpressionParser method buildSelectListEvaluators.
private void buildSelectListEvaluators() throws WindowingException {
selectListExprEvaluators = new ArrayList<ExprNodeEvaluator>();
selectListExprOIs = new ArrayList<ObjectInspector>();
ArrayList<String> selectListExprNames = new ArrayList<String>();
int i = 0;
Iterator<Object> it = selectSpec.getColumnListAndAlias();
while (it.hasNext()) {
Object[] selectColDetails = (Object[]) it.next();
String selectColName = (String) selectColDetails[1];
ASTNode selectColumnNode = (ASTNode) selectColDetails[2];
ExprNodeDesc selectColumnExprNode = TranslateUtils.buildExprNode(selectColumnNode, selectListInputTypeCheckCtx);
ExprNodeEvaluator selectColumnExprEval = ExprNodeEvaluatorFactory.get(selectColumnExprNode);
ObjectInspector selectColumnOI = null;
try {
selectColumnOI = selectColumnExprEval.initialize(selectListInputOI);
} catch (HiveException he) {
throw new WindowingException(he);
}
selectColName = getColumnName(selectColName, selectColumnExprNode, i);
selectListExprEvaluators.add(selectColumnExprEval);
selectListExprOIs.add(selectColumnOI);
selectListExprNames.add(selectColName);
i++;
}
selectListOutputOI = ObjectInspectorFactory.getStandardStructObjectInspector(selectListExprNames, selectListExprOIs);
}
Aggregations