use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo in project hive by apache.
the class TestArrowColumnarBatchSerDe method initSerDe.
private StructObjectInspector initSerDe(AbstractSerDe serDe, String[][] schema) throws SerDeException {
List<String> fieldNameList = newArrayList();
List<String> fieldTypeList = newArrayList();
List<TypeInfo> typeInfoList = newArrayList();
for (String[] nameAndType : schema) {
String name = nameAndType[0];
String type = nameAndType[1];
fieldNameList.add(name);
fieldTypeList.add(type);
typeInfoList.add(TypeInfoUtils.getTypeInfoFromTypeString(type));
}
String fieldNames = Joiner.on(',').join(fieldNameList);
String fieldTypes = Joiner.on(',').join(fieldTypeList);
Properties schemaProperties = new Properties();
schemaProperties.setProperty(serdeConstants.LIST_COLUMNS, fieldNames);
schemaProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES, fieldTypes);
serDe.initialize(conf, schemaProperties, null);
return (StructObjectInspector) TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(TypeInfoFactory.getStructTypeInfo(fieldNameList, typeInfoList));
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo in project hive by apache.
the class GenericUDFMacro method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
checkNotNull(colTypes, "colTypes");
checkNotNull(arguments, "arguments");
checkNotNull(bodyDesc, "bodyDesc");
if (colTypes.size() != arguments.length) {
throw new UDFArgumentLengthException("The macro " + macroName + " accepts exactly " + colTypes.size() + " arguments.");
}
try {
body = ExprNodeEvaluatorFactory.get(bodyDesc);
} catch (HiveException ex) {
throw new UDFArgumentException(ex);
}
converters = new ObjectInspectorConverters.Converter[arguments.length];
ArrayList<ObjectInspector> colObjectInspectors = new ArrayList<ObjectInspector>(colTypes.size());
for (int index = 0; index < arguments.length; ++index) {
ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(colTypes.get(index));
colObjectInspectors.add(objectInspector);
converters[index] = ObjectInspectorConverters.getConverter(arguments[index], objectInspector);
}
evaluatedArguments = new ArrayList<Object>(arguments.length);
ObjectInspector structOI = ObjectInspectorFactory.getStandardStructObjectInspector(colNames, colObjectInspectors);
try {
return body.initialize(structOI);
} catch (HiveException ex) {
throw new UDFArgumentException(ex);
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo in project hive by apache.
the class TestKeyWrapperFactory method setup.
@Before
public void setup() throws Exception {
SessionState ss = new SessionState(new HiveConf());
SessionState.setCurrentSessionState(ss);
ArrayList<Text> col1 = new ArrayList<Text>();
col1.add(new Text("0"));
col1.add(new Text("1"));
col1.add(new Text("2"));
col1.add(new Text("3"));
TypeInfo col1Type = TypeInfoFactory.getListTypeInfo(TypeInfoFactory.stringTypeInfo);
ArrayList<Text> cola = new ArrayList<Text>();
cola.add(new Text("a"));
cola.add(new Text("b"));
cola.add(new Text("c"));
TypeInfo colaType = TypeInfoFactory.getListTypeInfo(TypeInfoFactory.stringTypeInfo);
try {
ArrayList<Object> data = new ArrayList<Object>();
data.add(col1);
data.add(cola);
ArrayList<String> names = new ArrayList<String>();
names.add("col1");
names.add("cola");
ArrayList<TypeInfo> typeInfos = new ArrayList<TypeInfo>();
typeInfos.add(col1Type);
typeInfos.add(colaType);
TypeInfo dataType = TypeInfoFactory.getStructTypeInfo(names, typeInfos);
InspectableObject r = new InspectableObject();
ObjectInspector[] oi = new ObjectInspector[1];
r.o = data;
oi[0] = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(dataType);
try {
// get a evaluator for a simple field expression
ExprNodeDesc exprDesc = new ExprNodeColumnDesc(colaType, "cola", "", false);
ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(exprDesc);
ExprNodeEvaluator[] evals = new ExprNodeEvaluator[1];
evals[0] = eval;
ObjectInspector resultOI = eval.initialize(oi[0]);
ObjectInspector[] resultOIs = new ObjectInspector[1];
resultOIs[0] = resultOI;
factory = new KeyWrapperFactory(evals, oi, resultOIs);
} catch (Throwable e) {
e.printStackTrace();
throw e;
}
} catch (Throwable e) {
e.printStackTrace();
throw new RuntimeException(e);
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo in project hive by apache.
the class TestVectorStructField method doRowStructFieldTest.
private void doRowStructFieldTest(TypeInfo typeInfo, List<String> columns, List<ExprNodeDesc> children, ExprNodeFieldDesc exprNodeFieldDesc, Object[][] randomRows, ObjectInspector rowInspector, TypeInfo outputTypeInfo, Object[] resultObjects) throws Exception {
/*
System.out.println(
"*DEBUG* typeInfo " + typeInfo.toString() +
" negativeTestMode ROW_MODE" +
" exprDesc " + exprDesc.toString());
*/
HiveConf hiveConf = new HiveConf();
ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(exprNodeFieldDesc, hiveConf);
evaluator.initialize(rowInspector);
ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(outputTypeInfo);
final int rowCount = randomRows.length;
for (int i = 0; i < rowCount; i++) {
Object[] row = randomRows[i];
Object result = evaluator.evaluate(row);
Object copyResult = null;
try {
copyResult = ObjectInspectorUtils.copyToStandardObject(result, objectInspector, ObjectInspectorCopyOption.WRITABLE);
} catch (Exception e) {
System.out.println("here");
}
resultObjects[i] = copyResult;
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo in project hive by apache.
the class VectorReduceSinkObjectHashOperator method getObjectInspectorArray.
private ObjectInspector[] getObjectInspectorArray(TypeInfo[] typeInfos) {
final int size = typeInfos.length;
ObjectInspector[] objectInspectors = new ObjectInspector[size];
for (int i = 0; i < size; i++) {
TypeInfo typeInfo = typeInfos[i];
ObjectInspector standardWritableObjectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(typeInfo);
objectInspectors[i] = standardWritableObjectInspector;
}
return objectInspectors;
}
Aggregations