Search in sources :

Example 1 with IdentityConversion

use of org.apache.flink.table.functions.hive.conversion.IdentityConversion in project flink by apache.

the class HiveGenericUDTF method open.

@Override
public void open(FunctionContext context) throws Exception {
    function = hiveFunctionWrapper.createFunction();
    function.setCollector(input -> {
        Row row = (Row) HiveInspectors.toFlinkObject(returnInspector, input, hiveShim);
        HiveGenericUDTF.this.collect(row);
    });
    ObjectInspector[] argumentInspectors = HiveInspectors.toInspectors(hiveShim, constantArguments, argTypes);
    returnInspector = function.initialize(argumentInspectors);
    isArgsSingleArray = HiveFunctionUtil.isSingleBoxedArray(argTypes);
    conversions = new HiveObjectConversion[argumentInspectors.length];
    for (int i = 0; i < argumentInspectors.length; i++) {
        conversions[i] = HiveInspectors.getConversion(argumentInspectors[i], argTypes[i].getLogicalType(), hiveShim);
    }
    allIdentityConverter = Arrays.stream(conversions).allMatch(conv -> conv instanceof IdentityConversion);
}
Also used : TableFunction(org.apache.flink.table.functions.TableFunction) DataType(org.apache.flink.table.types.DataType) UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) Arrays(java.util.Arrays) Logger(org.slf4j.Logger) GenericUDTF(org.apache.hadoop.hive.ql.udf.generic.GenericUDTF) HiveTypeUtil(org.apache.flink.table.catalog.hive.util.HiveTypeUtil) TypeInfoLogicalTypeConverter(org.apache.flink.table.runtime.types.TypeInfoLogicalTypeConverter) IdentityConversion(org.apache.flink.table.functions.hive.conversion.IdentityConversion) LoggerFactory(org.slf4j.LoggerFactory) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) HiveShim(org.apache.flink.table.catalog.hive.client.HiveShim) Collector(org.apache.hadoop.hive.ql.udf.generic.Collector) VisibleForTesting(org.apache.flink.annotation.VisibleForTesting) HiveInspectors(org.apache.flink.table.functions.hive.conversion.HiveInspectors) Preconditions.checkArgument(org.apache.flink.util.Preconditions.checkArgument) HiveObjectConversion(org.apache.flink.table.functions.hive.conversion.HiveObjectConversion) HiveFunctionUtil(org.apache.flink.table.functions.hive.util.HiveFunctionUtil) Internal(org.apache.flink.annotation.Internal) Row(org.apache.flink.types.Row) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) TypeInformation(org.apache.flink.api.common.typeinfo.TypeInformation) FunctionContext(org.apache.flink.table.functions.FunctionContext) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) IdentityConversion(org.apache.flink.table.functions.hive.conversion.IdentityConversion) Row(org.apache.flink.types.Row)

Example 2 with IdentityConversion

use of org.apache.flink.table.functions.hive.conversion.IdentityConversion in project flink by apache.

the class HiveSimpleUDF method openInternal.

@Override
public void openInternal() {
    LOG.info("Opening HiveSimpleUDF as '{}'", hiveFunctionWrapper.getClassName());
    function = hiveFunctionWrapper.createFunction();
    List<TypeInfo> typeInfos = new ArrayList<>();
    for (DataType arg : argTypes) {
        typeInfos.add(HiveTypeUtil.toHiveTypeInfo(arg, false));
    }
    try {
        method = function.getResolver().getEvalMethod(typeInfos);
        returnInspector = ObjectInspectorFactory.getReflectionObjectInspector(method.getGenericReturnType(), ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
        ObjectInspector[] argInspectors = new ObjectInspector[typeInfos.size()];
        for (int i = 0; i < argTypes.length; i++) {
            argInspectors[i] = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(typeInfos.get(i));
        }
        conversionHelper = new GenericUDFUtils.ConversionHelper(method, argInspectors);
        conversions = new HiveObjectConversion[argInspectors.length];
        for (int i = 0; i < argInspectors.length; i++) {
            conversions[i] = HiveInspectors.getConversion(argInspectors[i], argTypes[i].getLogicalType(), hiveShim);
        }
        allIdentityConverter = Arrays.stream(conversions).allMatch(conv -> conv instanceof IdentityConversion);
    } catch (Exception e) {
        throw new FlinkHiveUDFException(String.format("Failed to open HiveSimpleUDF from %s", hiveFunctionWrapper.getClassName()), e);
    }
}
Also used : DataType(org.apache.flink.table.types.DataType) TypeInfoUtils(org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils) UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) Arrays(java.util.Arrays) Logger(org.slf4j.Logger) HiveTypeUtil(org.apache.flink.table.catalog.hive.util.HiveTypeUtil) IdentityConversion(org.apache.flink.table.functions.hive.conversion.IdentityConversion) LoggerFactory(org.slf4j.LoggerFactory) GenericUDFUtils(org.apache.hadoop.hive.ql.udf.generic.GenericUDFUtils) FunctionRegistry(org.apache.hadoop.hive.ql.exec.FunctionRegistry) HiveShim(org.apache.flink.table.catalog.hive.client.HiveShim) ArrayList(java.util.ArrayList) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) UDF(org.apache.hadoop.hive.ql.exec.UDF) HiveInspectors(org.apache.flink.table.functions.hive.conversion.HiveInspectors) List(java.util.List) ObjectInspectorFactory(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory) Preconditions.checkArgument(org.apache.flink.util.Preconditions.checkArgument) HiveObjectConversion(org.apache.flink.table.functions.hive.conversion.HiveObjectConversion) Internal(org.apache.flink.annotation.Internal) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) Method(java.lang.reflect.Method) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) IdentityConversion(org.apache.flink.table.functions.hive.conversion.IdentityConversion) ArrayList(java.util.ArrayList) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) DataType(org.apache.flink.table.types.DataType) GenericUDFUtils(org.apache.hadoop.hive.ql.udf.generic.GenericUDFUtils)

Aggregations

Arrays (java.util.Arrays)2 Internal (org.apache.flink.annotation.Internal)2 HiveShim (org.apache.flink.table.catalog.hive.client.HiveShim)2 HiveTypeUtil (org.apache.flink.table.catalog.hive.util.HiveTypeUtil)2 HiveInspectors (org.apache.flink.table.functions.hive.conversion.HiveInspectors)2 HiveObjectConversion (org.apache.flink.table.functions.hive.conversion.HiveObjectConversion)2 IdentityConversion (org.apache.flink.table.functions.hive.conversion.IdentityConversion)2 DataType (org.apache.flink.table.types.DataType)2 Preconditions.checkArgument (org.apache.flink.util.Preconditions.checkArgument)2 UDFArgumentException (org.apache.hadoop.hive.ql.exec.UDFArgumentException)2 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)2 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)2 Logger (org.slf4j.Logger)2 LoggerFactory (org.slf4j.LoggerFactory)2 Method (java.lang.reflect.Method)1 ArrayList (java.util.ArrayList)1 List (java.util.List)1 VisibleForTesting (org.apache.flink.annotation.VisibleForTesting)1 TypeInformation (org.apache.flink.api.common.typeinfo.TypeInformation)1 FunctionContext (org.apache.flink.table.functions.FunctionContext)1