Search in sources :

Example 11 with ArrayType

use of org.apache.flink.table.types.logical.ArrayType in project flink by apache.

the class AvroToRowDataConverters method createArrayConverter.

private static AvroToRowDataConverter createArrayConverter(ArrayType arrayType) {
    final AvroToRowDataConverter elementConverter = createNullableConverter(arrayType.getElementType());
    final Class<?> elementClass = LogicalTypeUtils.toInternalConversionClass(arrayType.getElementType());
    return avroObject -> {
        final List<?> list = (List<?>) avroObject;
        final int length = list.size();
        final Object[] array = (Object[]) Array.newInstance(elementClass, length);
        for (int i = 0; i < length; ++i) {
            array[i] = elementConverter.convert(list.get(i));
        }
        return new GenericArrayData(array);
    };
}
Also used : ChronoField(java.time.temporal.ChronoField) Array(java.lang.reflect.Array) GenericArrayData(org.apache.flink.table.data.GenericArrayData) HashMap(java.util.HashMap) RowType(org.apache.flink.table.types.logical.RowType) ByteBuffer(java.nio.ByteBuffer) GenericRowData(org.apache.flink.table.data.GenericRowData) DecimalType(org.apache.flink.table.types.logical.DecimalType) GenericMapData(org.apache.flink.table.data.GenericMapData) Map(java.util.Map) LocalTime(java.time.LocalTime) LogicalTypeUtils(org.apache.flink.table.types.logical.utils.LogicalTypeUtils) IndexedRecord(org.apache.avro.generic.IndexedRecord) GenericRecord(org.apache.avro.generic.GenericRecord) RowData(org.apache.flink.table.data.RowData) GenericFixed(org.apache.avro.generic.GenericFixed) TimestampData(org.apache.flink.table.data.TimestampData) DataTypes(org.apache.flink.table.api.DataTypes) DecimalData(org.apache.flink.table.data.DecimalData) ArrayType(org.apache.flink.table.types.logical.ArrayType) Instant(java.time.Instant) AvroSchemaConverter.extractValueTypeToAvroMap(org.apache.flink.formats.avro.typeutils.AvroSchemaConverter.extractValueTypeToAvroMap) Serializable(java.io.Serializable) StringData(org.apache.flink.table.data.StringData) List(java.util.List) LogicalType(org.apache.flink.table.types.logical.LogicalType) LocalDate(java.time.LocalDate) Internal(org.apache.flink.annotation.Internal) GenericArrayData(org.apache.flink.table.data.GenericArrayData) List(java.util.List)

Example 12 with ArrayType

use of org.apache.flink.table.types.logical.ArrayType in project flink by apache.

the class RowDataToAvroConverters method createConverter.

// --------------------------------------------------------------------------------
// IMPORTANT! We use anonymous classes instead of lambdas for a reason here. It is
// necessary because the maven shade plugin cannot relocate classes in
// SerializedLambdas (MSHADE-260). On the other hand we want to relocate Avro for
// sql-client uber jars.
// --------------------------------------------------------------------------------
/**
 * Creates a runtime converter according to the given logical type that converts objects of
 * Flink Table & SQL internal data structures to corresponding Avro data structures.
 */
public static RowDataToAvroConverter createConverter(LogicalType type) {
    final RowDataToAvroConverter converter;
    switch(type.getTypeRoot()) {
        case NULL:
            converter = new RowDataToAvroConverter() {

                private static final long serialVersionUID = 1L;

                @Override
                public Object convert(Schema schema, Object object) {
                    return null;
                }
            };
            break;
        case TINYINT:
            converter = new RowDataToAvroConverter() {

                private static final long serialVersionUID = 1L;

                @Override
                public Object convert(Schema schema, Object object) {
                    return ((Byte) object).intValue();
                }
            };
            break;
        case SMALLINT:
            converter = new RowDataToAvroConverter() {

                private static final long serialVersionUID = 1L;

                @Override
                public Object convert(Schema schema, Object object) {
                    return ((Short) object).intValue();
                }
            };
            break;
        // boolean
        case BOOLEAN:
        // int
        case INTEGER:
        // long
        case INTERVAL_YEAR_MONTH:
        // long
        case BIGINT:
        // long
        case INTERVAL_DAY_TIME:
        // float
        case FLOAT:
        // double
        case DOUBLE:
        // int
        case TIME_WITHOUT_TIME_ZONE:
        case // int
        DATE:
            converter = new RowDataToAvroConverter() {

                private static final long serialVersionUID = 1L;

                @Override
                public Object convert(Schema schema, Object object) {
                    return object;
                }
            };
            break;
        case CHAR:
        case VARCHAR:
            converter = new RowDataToAvroConverter() {

                private static final long serialVersionUID = 1L;

                @Override
                public Object convert(Schema schema, Object object) {
                    return new Utf8(object.toString());
                }
            };
            break;
        case BINARY:
        case VARBINARY:
            converter = new RowDataToAvroConverter() {

                private static final long serialVersionUID = 1L;

                @Override
                public Object convert(Schema schema, Object object) {
                    return ByteBuffer.wrap((byte[]) object);
                }
            };
            break;
        case TIMESTAMP_WITHOUT_TIME_ZONE:
            converter = new RowDataToAvroConverter() {

                private static final long serialVersionUID = 1L;

                @Override
                public Object convert(Schema schema, Object object) {
                    return ((TimestampData) object).toInstant().toEpochMilli();
                }
            };
            break;
        case DECIMAL:
            converter = new RowDataToAvroConverter() {

                private static final long serialVersionUID = 1L;

                @Override
                public Object convert(Schema schema, Object object) {
                    return ByteBuffer.wrap(((DecimalData) object).toUnscaledBytes());
                }
            };
            break;
        case ARRAY:
            converter = createArrayConverter((ArrayType) type);
            break;
        case ROW:
            converter = createRowConverter((RowType) type);
            break;
        case MAP:
        case MULTISET:
            converter = createMapConverter(type);
            break;
        case RAW:
        default:
            throw new UnsupportedOperationException("Unsupported type: " + type);
    }
    // wrap into nullable converter
    return new RowDataToAvroConverter() {

        private static final long serialVersionUID = 1L;

        @Override
        public Object convert(Schema schema, Object object) {
            if (object == null) {
                return null;
            }
            // get actual schema if it is a nullable schema
            Schema actualSchema;
            if (schema.getType() == Schema.Type.UNION) {
                List<Schema> types = schema.getTypes();
                int size = types.size();
                if (size == 2 && types.get(1).getType() == Schema.Type.NULL) {
                    actualSchema = types.get(0);
                } else if (size == 2 && types.get(0).getType() == Schema.Type.NULL) {
                    actualSchema = types.get(1);
                } else {
                    throw new IllegalArgumentException("The Avro schema is not a nullable type: " + schema.toString());
                }
            } else {
                actualSchema = schema;
            }
            return converter.convert(actualSchema, object);
        }
    };
}
Also used : TimestampData(org.apache.flink.table.data.TimestampData) Schema(org.apache.avro.Schema) RowType(org.apache.flink.table.types.logical.RowType) DecimalData(org.apache.flink.table.data.DecimalData) ArrayType(org.apache.flink.table.types.logical.ArrayType) Utf8(org.apache.avro.util.Utf8)

Example 13 with ArrayType

use of org.apache.flink.table.types.logical.ArrayType in project flink by apache.

the class RowDataVectorizer method setColumn.

private static void setColumn(int rowId, ListColumnVector listColumnVector, LogicalType type, RowData row, int columnId) {
    ArrayData arrayData = row.getArray(columnId);
    ArrayType arrayType = (ArrayType) type;
    listColumnVector.lengths[rowId] = arrayData.size();
    listColumnVector.offsets[rowId] = listColumnVector.childCount;
    listColumnVector.childCount += listColumnVector.lengths[rowId];
    listColumnVector.child.ensureSize(listColumnVector.childCount, listColumnVector.offsets[rowId] != 0);
    RowData convertedRowData = convert(arrayData, arrayType.getElementType());
    for (int i = 0; i < arrayData.size(); i++) {
        setColumn((int) listColumnVector.offsets[rowId] + i, listColumnVector.child, arrayType.getElementType(), convertedRowData, i);
    }
}
Also used : ArrayType(org.apache.flink.table.types.logical.ArrayType) RowData(org.apache.flink.table.data.RowData) GenericRowData(org.apache.flink.table.data.GenericRowData) ArrayData(org.apache.flink.table.data.ArrayData)

Example 14 with ArrayType

use of org.apache.flink.table.types.logical.ArrayType in project flink by apache.

the class HiveInspectors method getConversion.

/**
 * Get conversion for converting Flink object to Hive object from an ObjectInspector and the
 * corresponding Flink DataType.
 */
public static HiveObjectConversion getConversion(ObjectInspector inspector, LogicalType dataType, HiveShim hiveShim) {
    if (inspector instanceof PrimitiveObjectInspector) {
        HiveObjectConversion conversion;
        if (inspector instanceof BooleanObjectInspector || inspector instanceof StringObjectInspector || inspector instanceof ByteObjectInspector || inspector instanceof ShortObjectInspector || inspector instanceof IntObjectInspector || inspector instanceof LongObjectInspector || inspector instanceof FloatObjectInspector || inspector instanceof DoubleObjectInspector || inspector instanceof BinaryObjectInspector || inspector instanceof VoidObjectInspector) {
            conversion = IdentityConversion.INSTANCE;
        } else if (inspector instanceof DateObjectInspector) {
            conversion = hiveShim::toHiveDate;
        } else if (inspector instanceof TimestampObjectInspector) {
            conversion = hiveShim::toHiveTimestamp;
        } else if (inspector instanceof HiveCharObjectInspector) {
            conversion = o -> o == null ? null : new HiveChar((String) o, ((CharType) dataType).getLength());
        } else if (inspector instanceof HiveVarcharObjectInspector) {
            conversion = o -> o == null ? null : new HiveVarchar((String) o, ((VarCharType) dataType).getLength());
        } else if (inspector instanceof HiveDecimalObjectInspector) {
            conversion = o -> o == null ? null : HiveDecimal.create((BigDecimal) o);
        } else {
            throw new FlinkHiveUDFException("Unsupported primitive object inspector " + inspector.getClass().getName());
        }
        // currently this happens for constant arguments for UDFs
        if (((PrimitiveObjectInspector) inspector).preferWritable()) {
            conversion = new WritableHiveObjectConversion(conversion, hiveShim);
        }
        return conversion;
    }
    if (inspector instanceof ListObjectInspector) {
        HiveObjectConversion eleConvert = getConversion(((ListObjectInspector) inspector).getListElementObjectInspector(), ((ArrayType) dataType).getElementType(), hiveShim);
        return o -> {
            if (o == null) {
                return null;
            }
            Object[] array = (Object[]) o;
            List<Object> result = new ArrayList<>();
            for (Object ele : array) {
                result.add(eleConvert.toHiveObject(ele));
            }
            return result;
        };
    }
    if (inspector instanceof MapObjectInspector) {
        MapObjectInspector mapInspector = (MapObjectInspector) inspector;
        MapType kvType = (MapType) dataType;
        HiveObjectConversion keyConversion = getConversion(mapInspector.getMapKeyObjectInspector(), kvType.getKeyType(), hiveShim);
        HiveObjectConversion valueConversion = getConversion(mapInspector.getMapValueObjectInspector(), kvType.getValueType(), hiveShim);
        return o -> {
            if (o == null) {
                return null;
            }
            Map<Object, Object> map = (Map) o;
            Map<Object, Object> result = new HashMap<>(map.size());
            for (Map.Entry<Object, Object> entry : map.entrySet()) {
                result.put(keyConversion.toHiveObject(entry.getKey()), valueConversion.toHiveObject(entry.getValue()));
            }
            return result;
        };
    }
    if (inspector instanceof StructObjectInspector) {
        StructObjectInspector structInspector = (StructObjectInspector) inspector;
        List<? extends StructField> structFields = structInspector.getAllStructFieldRefs();
        List<RowType.RowField> rowFields = ((RowType) dataType).getFields();
        HiveObjectConversion[] conversions = new HiveObjectConversion[structFields.size()];
        for (int i = 0; i < structFields.size(); i++) {
            conversions[i] = getConversion(structFields.get(i).getFieldObjectInspector(), rowFields.get(i).getType(), hiveShim);
        }
        return o -> {
            if (o == null) {
                return null;
            }
            Row row = (Row) o;
            List<Object> result = new ArrayList<>(row.getArity());
            for (int i = 0; i < row.getArity(); i++) {
                result.add(conversions[i].toHiveObject(row.getField(i)));
            }
            return result;
        };
    }
    throw new FlinkHiveUDFException(String.format("Flink doesn't support convert object conversion for %s yet", inspector));
}
Also used : DataType(org.apache.flink.table.types.DataType) VoidObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector) Array(java.lang.reflect.Array) WritableConstantHiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveVarcharObjectInspector) StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) WritableConstantTimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantTimestampObjectInspector) MapType(org.apache.flink.table.types.logical.MapType) WritableConstantDateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantDateObjectInspector) HiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector) WritableConstantByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantByteObjectInspector) CharType(org.apache.flink.table.types.logical.CharType) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) BooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) BigDecimal(java.math.BigDecimal) ShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector) Map(java.util.Map) WritableConstantLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantLongObjectInspector) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) HiveTypeUtil(org.apache.flink.table.catalog.hive.util.HiveTypeUtil) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) WritableConstantHiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveCharObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) FloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) WritableConstantBinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantBinaryObjectInspector) List(java.util.List) HiveReflectionUtils(org.apache.flink.table.catalog.hive.util.HiveReflectionUtils) LogicalType(org.apache.flink.table.types.logical.LogicalType) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) WritableConstantDoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantDoubleObjectInspector) WritableConstantFloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantFloatObjectInspector) WritableConstantBooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantBooleanObjectInspector) Row(org.apache.flink.types.Row) DateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) TypeInfoUtils(org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) HiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector) HiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector) HashMap(java.util.HashMap) IntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector) RowType(org.apache.flink.table.types.logical.RowType) WritableConstantHiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveDecimalObjectInspector) Constructor(java.lang.reflect.Constructor) HiveShim(org.apache.flink.table.catalog.hive.client.HiveShim) ArrayList(java.util.ArrayList) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) Nonnull(javax.annotation.Nonnull) WritableConstantShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantShortObjectInspector) BinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector) WritableConstantStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantStringObjectInspector) VarCharType(org.apache.flink.table.types.logical.VarCharType) ArrayType(org.apache.flink.table.types.logical.ArrayType) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) FlinkHiveUDFException(org.apache.flink.table.functions.hive.FlinkHiveUDFException) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) WritableConstantIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantIntObjectInspector) ObjectInspectorFactory(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory) PrimitiveObjectInspectorFactory(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) ByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector) DoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector) Internal(org.apache.flink.annotation.Internal) TimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) WritableConstantLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantLongObjectInspector) LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) WritableConstantDateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantDateObjectInspector) DateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector) IntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector) WritableConstantIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantIntObjectInspector) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) WritableConstantBinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantBinaryObjectInspector) BinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector) RowType(org.apache.flink.table.types.logical.RowType) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) WritableConstantStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantStringObjectInspector) MapType(org.apache.flink.table.types.logical.MapType) FloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector) WritableConstantFloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantFloatObjectInspector) WritableConstantByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantByteObjectInspector) ByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector) WritableConstantTimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantTimestampObjectInspector) TimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector) ShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector) WritableConstantShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantShortObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) HiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector) WritableConstantHiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveDecimalObjectInspector) List(java.util.List) ArrayList(java.util.ArrayList) WritableConstantHiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveCharObjectInspector) HiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector) FlinkHiveUDFException(org.apache.flink.table.functions.hive.FlinkHiveUDFException) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) BigDecimal(java.math.BigDecimal) WritableConstantDoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantDoubleObjectInspector) DoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector) WritableConstantHiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveVarcharObjectInspector) HiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) VoidObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector) CharType(org.apache.flink.table.types.logical.CharType) VarCharType(org.apache.flink.table.types.logical.VarCharType) Row(org.apache.flink.types.Row) BooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector) WritableConstantBooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantBooleanObjectInspector) Map(java.util.Map) HashMap(java.util.HashMap) StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 15 with ArrayType

use of org.apache.flink.table.types.logical.ArrayType in project flink by apache.

the class HiveFunctionUtil method isPrimitiveArray.

private static boolean isPrimitiveArray(DataType dataType) {
    if (isArrayType(dataType)) {
        ArrayType arrayType = (ArrayType) dataType.getLogicalType();
        LogicalType elementType = arrayType.getElementType();
        return !(elementType.isNullable() || !isPrimitive(elementType));
    } else {
        return false;
    }
}
Also used : ArrayType(org.apache.flink.table.types.logical.ArrayType) LogicalType(org.apache.flink.table.types.logical.LogicalType)

Aggregations

ArrayType (org.apache.flink.table.types.logical.ArrayType)28 LogicalType (org.apache.flink.table.types.logical.LogicalType)18 RowType (org.apache.flink.table.types.logical.RowType)18 DecimalType (org.apache.flink.table.types.logical.DecimalType)11 MapType (org.apache.flink.table.types.logical.MapType)11 TimestampType (org.apache.flink.table.types.logical.TimestampType)10 IntType (org.apache.flink.table.types.logical.IntType)8 ArrayList (java.util.ArrayList)7 GenericRowData (org.apache.flink.table.data.GenericRowData)7 RowData (org.apache.flink.table.data.RowData)7 VarCharType (org.apache.flink.table.types.logical.VarCharType)7 LocalTime (java.time.LocalTime)6 Internal (org.apache.flink.annotation.Internal)6 LocalZonedTimestampType (org.apache.flink.table.types.logical.LocalZonedTimestampType)6 Serializable (java.io.Serializable)5 Array (java.lang.reflect.Array)5 LocalDate (java.time.LocalDate)5 LocalDateTime (java.time.LocalDateTime)5 Map (java.util.Map)5 JsonNode (org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode)5