use of org.apache.flink.table.functions.hive.conversion.HiveObjectConversion in project flink by apache.
the class HiveGenericUDTF method open.
@Override
public void open(FunctionContext context) throws Exception {
function = hiveFunctionWrapper.createFunction();
function.setCollector(input -> {
Row row = (Row) HiveInspectors.toFlinkObject(returnInspector, input, hiveShim);
HiveGenericUDTF.this.collect(row);
});
ObjectInspector[] argumentInspectors = HiveInspectors.toInspectors(hiveShim, constantArguments, argTypes);
returnInspector = function.initialize(argumentInspectors);
isArgsSingleArray = HiveFunctionUtil.isSingleBoxedArray(argTypes);
conversions = new HiveObjectConversion[argumentInspectors.length];
for (int i = 0; i < argumentInspectors.length; i++) {
conversions[i] = HiveInspectors.getConversion(argumentInspectors[i], argTypes[i].getLogicalType(), hiveShim);
}
allIdentityConverter = Arrays.stream(conversions).allMatch(conv -> conv instanceof IdentityConversion);
}
use of org.apache.flink.table.functions.hive.conversion.HiveObjectConversion in project flink by apache.
the class HiveSimpleUDF method openInternal.
@Override
public void openInternal() {
LOG.info("Opening HiveSimpleUDF as '{}'", hiveFunctionWrapper.getClassName());
function = hiveFunctionWrapper.createFunction();
List<TypeInfo> typeInfos = new ArrayList<>();
for (DataType arg : argTypes) {
typeInfos.add(HiveTypeUtil.toHiveTypeInfo(arg, false));
}
try {
method = function.getResolver().getEvalMethod(typeInfos);
returnInspector = ObjectInspectorFactory.getReflectionObjectInspector(method.getGenericReturnType(), ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
ObjectInspector[] argInspectors = new ObjectInspector[typeInfos.size()];
for (int i = 0; i < argTypes.length; i++) {
argInspectors[i] = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(typeInfos.get(i));
}
conversionHelper = new GenericUDFUtils.ConversionHelper(method, argInspectors);
conversions = new HiveObjectConversion[argInspectors.length];
for (int i = 0; i < argInspectors.length; i++) {
conversions[i] = HiveInspectors.getConversion(argInspectors[i], argTypes[i].getLogicalType(), hiveShim);
}
allIdentityConverter = Arrays.stream(conversions).allMatch(conv -> conv instanceof IdentityConversion);
} catch (Exception e) {
throw new FlinkHiveUDFException(String.format("Failed to open HiveSimpleUDF from %s", hiveFunctionWrapper.getClassName()), e);
}
}
Aggregations