use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector in project hive by apache.
the class VectorizedBatchUtil method convertToStandardStructObjectInspector.
public static StandardStructObjectInspector convertToStandardStructObjectInspector(StructObjectInspector structObjectInspector) throws HiveException {
List<? extends StructField> fields = structObjectInspector.getAllStructFieldRefs();
List<ObjectInspector> oids = new ArrayList<ObjectInspector>();
ArrayList<String> columnNames = new ArrayList<String>();
for (StructField field : fields) {
TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(field.getFieldObjectInspector().getTypeName());
ObjectInspector standardWritableObjectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(typeInfo);
oids.add(standardWritableObjectInspector);
columnNames.add(field.getFieldName());
}
return ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, oids);
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector in project hive by apache.
the class MatchPath method createSelectListRR.
/*
* add array<struct> to the list of columns
*/
protected static RowResolver createSelectListRR(MatchPath evaluator, PTFInputDef inpDef) throws SemanticException {
RowResolver rr = new RowResolver();
RowResolver inputRR = inpDef.getOutputShape().getRr();
evaluator.inputColumnNamesMap = new HashMap<String, String>();
ArrayList<String> inputColumnNames = new ArrayList<String>();
ArrayList<ObjectInspector> inpColOIs = new ArrayList<ObjectInspector>();
for (ColumnInfo inpCInfo : inputRR.getColumnInfos()) {
ColumnInfo cInfo = new ColumnInfo(inpCInfo);
String colAlias = cInfo.getAlias();
String[] tabColAlias = inputRR.reverseLookup(inpCInfo.getInternalName());
if (tabColAlias != null) {
colAlias = tabColAlias[1];
}
ASTNode inExpr = null;
inExpr = PTFTranslator.getASTNode(inpCInfo, inputRR);
if (inExpr != null) {
rr.putExpression(inExpr, cInfo);
colAlias = inExpr.toStringTree().toLowerCase();
} else {
colAlias = colAlias == null ? cInfo.getInternalName() : colAlias;
rr.put(cInfo.getTabAlias(), colAlias, cInfo);
}
evaluator.inputColumnNamesMap.put(cInfo.getInternalName(), colAlias);
inputColumnNames.add(colAlias);
inpColOIs.add(cInfo.getObjectInspector());
}
StandardListObjectInspector pathAttrOI = ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorFactory.getStandardStructObjectInspector(inputColumnNames, inpColOIs));
ColumnInfo pathColumn = new ColumnInfo(PATHATTR_NAME, TypeInfoUtils.getTypeInfoFromObjectInspector(pathAttrOI), null, false, false);
rr.put(null, PATHATTR_NAME, pathColumn);
return rr;
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector in project hive by apache.
the class MyTestPrimitiveClass method getRowInspector.
public StructObjectInspector getRowInspector(PrimitiveTypeInfo[] primitiveTypeInfos) {
List<String> columnNames = new ArrayList<String>(primitiveCount);
List<ObjectInspector> primitiveObjectInspectorList = new ArrayList<ObjectInspector>(primitiveCount);
for (int index = 0; index < MyTestPrimitiveClass.primitiveCount; index++) {
columnNames.add(String.format("col%d", index));
PrimitiveTypeInfo primitiveTypeInfo = primitiveTypeInfos[index];
PrimitiveCategory primitiveCategory = primitiveTypeInfo.getPrimitiveCategory();
primitiveObjectInspectorList.add(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(primitiveCategory));
}
StandardStructObjectInspector rowOI = ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, primitiveObjectInspectorList);
return rowOI;
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector in project hive by apache.
the class AvroObjectInspectorGenerator method createObjectInspectorWorker.
private ObjectInspector createObjectInspectorWorker(TypeInfo ti) throws SerDeException {
// at deserialization and the object inspector will never see the actual union.
if (!supportedCategories(ti)) {
throw new AvroSerdeException("Don't yet support this type: " + ti);
}
ObjectInspector result;
switch(ti.getCategory()) {
case PRIMITIVE:
PrimitiveTypeInfo pti = (PrimitiveTypeInfo) ti;
result = PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(pti);
break;
case STRUCT:
StructTypeInfo sti = (StructTypeInfo) ti;
ArrayList<ObjectInspector> ois = new ArrayList<ObjectInspector>(sti.getAllStructFieldTypeInfos().size());
for (TypeInfo typeInfo : sti.getAllStructFieldTypeInfos()) {
ois.add(createObjectInspectorWorker(typeInfo));
}
result = ObjectInspectorFactory.getStandardStructObjectInspector(sti.getAllStructFieldNames(), ois);
break;
case MAP:
MapTypeInfo mti = (MapTypeInfo) ti;
result = ObjectInspectorFactory.getStandardMapObjectInspector(PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveObjectInspector.PrimitiveCategory.STRING), createObjectInspectorWorker(mti.getMapValueTypeInfo()));
break;
case LIST:
ListTypeInfo ati = (ListTypeInfo) ti;
result = ObjectInspectorFactory.getStandardListObjectInspector(createObjectInspectorWorker(ati.getListElementTypeInfo()));
break;
case UNION:
UnionTypeInfo uti = (UnionTypeInfo) ti;
List<TypeInfo> allUnionObjectTypeInfos = uti.getAllUnionObjectTypeInfos();
List<ObjectInspector> unionObjectInspectors = new ArrayList<ObjectInspector>(allUnionObjectTypeInfos.size());
for (TypeInfo typeInfo : allUnionObjectTypeInfos) {
unionObjectInspectors.add(createObjectInspectorWorker(typeInfo));
}
result = ObjectInspectorFactory.getStandardUnionObjectInspector(unionObjectInspectors);
break;
default:
throw new AvroSerdeException("No Hive categories matched: " + ti);
}
return result;
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector in project hive by apache.
the class TestGenericUDFSortArrayByField method testSortPrimitiveTupleTwoField.
@Test
public void testSortPrimitiveTupleTwoField() throws HiveException {
List<ObjectInspector> tuple = new ArrayList<ObjectInspector>();
tuple.add(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
tuple.add(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
ObjectInspector[] inputOIs = { ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorFactory.getStandardStructObjectInspector(asList("Company", "Department"), tuple)), PrimitiveObjectInspectorFactory.writableStringObjectInspector, PrimitiveObjectInspectorFactory.writableHiveVarcharObjectInspector };
udf.initialize(inputOIs);
Object i1 = asList(new Text("Linkedin"), new Text("HR"));
Object i2 = asList(new Text("Linkedin"), new Text("IT"));
Object i3 = asList(new Text("Linkedin"), new Text("Finance"));
Object i4 = asList(new Text("Facebook"), new Text("IT"));
Object i5 = asList(new Text("Facebook"), new Text("Finance"));
Object i6 = asList(new Text("Facebook"), new Text("HR"));
Object i7 = asList(new Text("Google"), new Text("Logistics"));
Object i8 = asList(new Text("Google"), new Text("Finance"));
Object i9 = asList(new Text("Google"), new Text("HR"));
HiveVarchar vc = new HiveVarchar();
vc.setValue("Department");
GenericUDF.DeferredJavaObject[] argas = { new GenericUDF.DeferredJavaObject(asList(i1, i2, i3, i4, i5, i6, i7, i8, i9)), new GenericUDF.DeferredJavaObject(new Text("Company")), new GenericUDF.DeferredJavaObject(new HiveVarcharWritable(vc)) };
runAndVerify(argas, asList(i5, i6, i4, i8, i9, i7, i3, i1, i2));
}
Aggregations