use of org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector in project hive by apache.
the class PTFDeserializer method initializeWindowing.
public void initializeWindowing(WindowTableFunctionDef def) throws HiveException {
ShapeDetails inpShape = def.getInput().getOutputShape();
/*
* 1. setup resolve, make connections
*/
TableFunctionEvaluator tEval = def.getTFunction();
WindowingTableFunctionResolver tResolver = (WindowingTableFunctionResolver) constructResolver(def.getResolverClassName());
tResolver.initialize(ptfDesc, def, tEval);
/*
* 2. initialize WFns.
*/
for (WindowFunctionDef wFnDef : def.getWindowFunctions()) {
if (wFnDef.getArgs() != null) {
for (PTFExpressionDef arg : wFnDef.getArgs()) {
initialize(arg, inpShape);
}
}
if (wFnDef.getWindowFrame() != null) {
WindowFrameDef wFrmDef = wFnDef.getWindowFrame();
initialize(wFrmDef, inpShape);
}
setupWdwFnEvaluator(wFnDef);
}
ArrayList<String> aliases = new ArrayList<String>();
ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();
for (WindowFunctionDef wFnDef : def.getWindowFunctions()) {
aliases.add(wFnDef.getAlias());
if (wFnDef.isPivotResult()) {
fieldOIs.add(((ListObjectInspector) wFnDef.getOI()).getListElementObjectInspector());
} else {
fieldOIs.add(wFnDef.getOI());
}
}
PTFDeserializer.addInputColumnsToList(inpShape, aliases, fieldOIs);
StructObjectInspector wdwOutOI = ObjectInspectorFactory.getStandardStructObjectInspector(aliases, fieldOIs);
tResolver.setWdwProcessingOutputOI(wdwOutOI);
initialize(def.getOutputShape(), wdwOutOI);
tResolver.initializeOutputOI();
}
use of org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector in project hive by apache.
the class PTFDeserializer method getTypeMap.
private static ArrayList<? extends Object>[] getTypeMap(StructObjectInspector oi) {
StructTypeInfo t = (StructTypeInfo) TypeInfoUtils.getTypeInfoFromObjectInspector(oi);
ArrayList<String> fnames = t.getAllStructFieldNames();
ArrayList<TypeInfo> fields = t.getAllStructFieldTypeInfos();
return new ArrayList<?>[] { fnames, fields };
}
use of org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector in project hive by apache.
the class ColumnStatsTask method unpackStructObject.
private void unpackStructObject(ObjectInspector oi, Object o, String fName, ColumnStatisticsObj cStatsObj) {
if (oi.getCategory() != ObjectInspector.Category.STRUCT) {
throw new RuntimeException("Invalid object datatype : " + oi.getCategory().toString());
}
StructObjectInspector soi = (StructObjectInspector) oi;
List<? extends StructField> fields = soi.getAllStructFieldRefs();
List<Object> list = soi.getStructFieldsDataAsList(o);
for (int i = 0; i < fields.size(); i++) {
// Get the field objectInspector, fieldName and the field object.
ObjectInspector foi = fields.get(i).getFieldObjectInspector();
Object f = (list == null ? null : list.get(i));
String fieldName = fields.get(i).getFieldName();
if (foi.getCategory() == ObjectInspector.Category.PRIMITIVE) {
unpackPrimitiveObject(foi, f, fieldName, cStatsObj);
} else {
unpackStructObject(foi, f, fieldName, cStatsObj);
}
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector in project hive by apache.
the class CommonJoinOperator method getJoinOutputObjectInspector.
private <T extends JoinDesc> ObjectInspector getJoinOutputObjectInspector(Byte[] order, List<ObjectInspector>[] aliasToObjectInspectors, T conf) {
List<ObjectInspector> structFieldObjectInspectors = new ArrayList<ObjectInspector>();
for (Byte alias : order) {
List<ObjectInspector> oiList = getValueObjectInspectors(alias, aliasToObjectInspectors);
if (oiList != null && !oiList.isEmpty()) {
structFieldObjectInspectors.addAll(oiList);
}
}
StructObjectInspector joinOutputObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(conf.getOutputColumnNames(), structFieldObjectInspectors);
return joinOutputObjectInspector;
}
use of org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector in project hive by apache.
the class UnionOperator method process.
@Override
public synchronized void process(Object row, int tag) throws HiveException {
StructObjectInspector soi = parentObjInspectors[tag];
List<? extends StructField> fields = parentFields[tag];
if (needsTransform[tag] && outputRow.size() > 0) {
for (int c = 0; c < fields.size(); c++) {
outputRow.set(c, columnTypeResolvers[c].convertIfNecessary(soi.getStructFieldData(row, fields.get(c)), fields.get(c).getFieldObjectInspector()));
}
forward(outputRow, outputObjInspector);
} else {
forward(row, inputObjInspectors[tag]);
}
}
Aggregations