Search in sources :

Example 1 with CharType

use of org.apache.flink.table.types.logical.CharType in project flink by apache.

the class LogicalTypesTest method testEmptyStringLiterals.

@Test
public void testEmptyStringLiterals() {
    final CharType charType = CharType.ofEmptyLiteral();
    final VarCharType varcharType = VarCharType.ofEmptyLiteral();
    final BinaryType binaryType = BinaryType.ofEmptyLiteral();
    final VarBinaryType varBinaryType = VarBinaryType.ofEmptyLiteral();
    // make the types nullable for testing
    assertThat(charType.copy(true)).satisfies(nonEqualityCheckWithOtherType(new CharType(1)));
    assertThat(varcharType.copy(true)).satisfies(nonEqualityCheckWithOtherType(new VarCharType(1)));
    assertThat(binaryType.copy(true)).satisfies(nonEqualityCheckWithOtherType(new BinaryType(1)));
    assertThat(varBinaryType.copy(true)).satisfies(nonEqualityCheckWithOtherType(new VarBinaryType(1)));
    assertThat(charType).hasSummaryString("CHAR(0) NOT NULL");
    assertThat(varcharType).hasSummaryString("VARCHAR(0) NOT NULL");
    assertThat(binaryType).hasSummaryString("BINARY(0) NOT NULL");
    assertThat(varBinaryType).hasSummaryString("VARBINARY(0) NOT NULL");
    assertThat(charType).hasNoSerializableString();
    assertThat(varcharType).hasNoSerializableString();
    assertThat(binaryType).hasNoSerializableString();
    assertThat(varBinaryType).hasNoSerializableString();
}
Also used : VarBinaryType(org.apache.flink.table.types.logical.VarBinaryType) BinaryType(org.apache.flink.table.types.logical.BinaryType) VarBinaryType(org.apache.flink.table.types.logical.VarBinaryType) CharType(org.apache.flink.table.types.logical.CharType) VarCharType(org.apache.flink.table.types.logical.VarCharType) VarCharType(org.apache.flink.table.types.logical.VarCharType) Test(org.junit.Test)

Example 2 with CharType

use of org.apache.flink.table.types.logical.CharType in project flink by apache.

the class StringConcatTypeStrategy method inferType.

@Override
public Optional<DataType> inferType(CallContext callContext) {
    final List<DataType> argumentDataTypes = callContext.getArgumentDataTypes();
    final LogicalType type1 = argumentDataTypes.get(0).getLogicalType();
    final LogicalType type2 = argumentDataTypes.get(1).getLogicalType();
    int length = getLength(type1) + getLength(type2);
    // handle overflow
    if (length < 0) {
        length = CharType.MAX_LENGTH;
    }
    final LogicalType minimumType;
    if (type1.is(LogicalTypeFamily.CHARACTER_STRING) || type2.is(LogicalTypeFamily.CHARACTER_STRING)) {
        minimumType = new CharType(false, length);
    } else if (type1.is(LogicalTypeFamily.BINARY_STRING) || type2.is(LogicalTypeFamily.BINARY_STRING)) {
        minimumType = new BinaryType(false, length);
    } else {
        return Optional.empty();
    }
    // deal with nullability handling and varying semantics
    return findCommonType(Arrays.asList(type1, type2, minimumType)).map(TypeConversions::fromLogicalToDataType);
}
Also used : BinaryType(org.apache.flink.table.types.logical.BinaryType) TypeConversions(org.apache.flink.table.types.utils.TypeConversions) DataType(org.apache.flink.table.types.DataType) LogicalType(org.apache.flink.table.types.logical.LogicalType) CharType(org.apache.flink.table.types.logical.CharType)

Example 3 with CharType

use of org.apache.flink.table.types.logical.CharType in project flink by apache.

the class LogicalTypeJsonSerdeTest method testLogicalTypeSerde.

private static List<LogicalType> testLogicalTypeSerde() {
    final List<LogicalType> types = Arrays.asList(new BooleanType(), new TinyIntType(), new SmallIntType(), new IntType(), new BigIntType(), new FloatType(), new DoubleType(), new DecimalType(10), new DecimalType(15, 5), CharType.ofEmptyLiteral(), new CharType(), new CharType(5), VarCharType.ofEmptyLiteral(), new VarCharType(), new VarCharType(5), BinaryType.ofEmptyLiteral(), new BinaryType(), new BinaryType(100), VarBinaryType.ofEmptyLiteral(), new VarBinaryType(), new VarBinaryType(100), new DateType(), new TimeType(), new TimeType(3), new TimestampType(), new TimestampType(3), new LocalZonedTimestampType(false, TimestampKind.PROCTIME, 3), new TimestampType(false, TimestampKind.ROWTIME, 3), new ZonedTimestampType(), new ZonedTimestampType(3), new ZonedTimestampType(false, TimestampKind.ROWTIME, 3), new LocalZonedTimestampType(), new LocalZonedTimestampType(3), new LocalZonedTimestampType(false, TimestampKind.PROCTIME, 3), new LocalZonedTimestampType(false, TimestampKind.ROWTIME, 3), new DayTimeIntervalType(DayTimeIntervalType.DayTimeResolution.DAY_TO_HOUR), new DayTimeIntervalType(false, DayTimeIntervalType.DayTimeResolution.DAY_TO_HOUR, 3, 6), new YearMonthIntervalType(YearMonthIntervalType.YearMonthResolution.YEAR_TO_MONTH), new YearMonthIntervalType(false, YearMonthIntervalType.YearMonthResolution.MONTH, 2), new ZonedTimestampType(), new LocalZonedTimestampType(), new LocalZonedTimestampType(false, TimestampKind.PROCTIME, 3), new SymbolType<>(), new ArrayType(new IntType(false)), new ArrayType(new LocalZonedTimestampType(false, TimestampKind.ROWTIME, 3)), new ArrayType(new ZonedTimestampType(false, TimestampKind.ROWTIME, 3)), new ArrayType(new TimestampType()), new ArrayType(CharType.ofEmptyLiteral()), new ArrayType(VarCharType.ofEmptyLiteral()), new ArrayType(BinaryType.ofEmptyLiteral()), new ArrayType(VarBinaryType.ofEmptyLiteral()), new MapType(new BigIntType(), new IntType(false)), new MapType(new TimestampType(false, TimestampKind.ROWTIME, 3), new ZonedTimestampType()), new MapType(CharType.ofEmptyLiteral(), CharType.ofEmptyLiteral()), new MapType(VarCharType.ofEmptyLiteral(), VarCharType.ofEmptyLiteral()), new MapType(BinaryType.ofEmptyLiteral(), BinaryType.ofEmptyLiteral()), new MapType(VarBinaryType.ofEmptyLiteral(), VarBinaryType.ofEmptyLiteral()), new MultisetType(new IntType(false)), new MultisetType(new TimestampType()), new MultisetType(new TimestampType(true, TimestampKind.ROWTIME, 3)), new MultisetType(CharType.ofEmptyLiteral()), new MultisetType(VarCharType.ofEmptyLiteral()), new MultisetType(BinaryType.ofEmptyLiteral()), new MultisetType(VarBinaryType.ofEmptyLiteral()), RowType.of(new BigIntType(), new IntType(false), new VarCharType(200)), RowType.of(new LogicalType[] { new BigIntType(), new IntType(false), new VarCharType(200) }, new String[] { "f1", "f2", "f3" }), RowType.of(new TimestampType(false, TimestampKind.ROWTIME, 3), new TimestampType(false, TimestampKind.REGULAR, 3), new ZonedTimestampType(false, TimestampKind.ROWTIME, 3), new ZonedTimestampType(false, TimestampKind.REGULAR, 3), new LocalZonedTimestampType(false, TimestampKind.ROWTIME, 3), new LocalZonedTimestampType(false, TimestampKind.PROCTIME, 3), new LocalZonedTimestampType(false, TimestampKind.REGULAR, 3)), RowType.of(CharType.ofEmptyLiteral(), VarCharType.ofEmptyLiteral(), BinaryType.ofEmptyLiteral(), VarBinaryType.ofEmptyLiteral()), // registered structured type
    StructuredType.newBuilder(ObjectIdentifier.of("cat", "db", "structuredType"), PojoClass.class).attributes(Arrays.asList(new StructuredType.StructuredAttribute("f0", new IntType(true)), new StructuredType.StructuredAttribute("f1", new BigIntType(true)), new StructuredType.StructuredAttribute("f2", new VarCharType(200), "desc"))).comparison(StructuredType.StructuredComparison.FULL).setFinal(false).setInstantiable(false).superType(StructuredType.newBuilder(ObjectIdentifier.of("cat", "db", "structuredType2")).attributes(Collections.singletonList(new StructuredType.StructuredAttribute("f0", new BigIntType(false)))).build()).description("description for StructuredType").build(), // unregistered structured type
    StructuredType.newBuilder(PojoClass.class).attributes(Arrays.asList(new StructuredType.StructuredAttribute("f0", new IntType(true)), new StructuredType.StructuredAttribute("f1", new BigIntType(true)), new StructuredType.StructuredAttribute("f2", new VarCharType(200), "desc"))).build(), // registered distinct type
    DistinctType.newBuilder(ObjectIdentifier.of("cat", "db", "distinctType"), new VarCharType(5)).build(), DistinctType.newBuilder(ObjectIdentifier.of("cat", "db", "distinctType"), new VarCharType(false, 5)).build(), // custom RawType
    new RawType<>(LocalDateTime.class, LocalDateTimeSerializer.INSTANCE), // external RawType
    new RawType<>(Row.class, ExternalSerializer.of(DataTypes.ROW(DataTypes.INT(), DataTypes.STRING()))));
    final List<LogicalType> mutableTypes = new ArrayList<>(types);
    // RawType for MapView
    addRawTypesForMapView(mutableTypes, new VarCharType(100), new VarCharType(100));
    addRawTypesForMapView(mutableTypes, new VarCharType(100), new BigIntType());
    addRawTypesForMapView(mutableTypes, new BigIntType(), new VarCharType(100));
    addRawTypesForMapView(mutableTypes, new BigIntType(), new BigIntType());
    // RawType for ListView
    addRawTypesForListView(mutableTypes, new VarCharType(100));
    addRawTypesForListView(mutableTypes, new BigIntType());
    // RawType for custom MapView
    mutableTypes.add(DataViewUtils.adjustDataViews(MapView.newMapViewDataType(DataTypes.STRING().toInternal(), DataTypes.STRING().bridgedTo(byte[].class)), false).getLogicalType());
    final List<LogicalType> allTypes = new ArrayList<>();
    // consider nullable
    for (LogicalType type : mutableTypes) {
        allTypes.add(type.copy(true));
        allTypes.add(type.copy(false));
    }
    // ignore nullable for NullType
    allTypes.add(new NullType());
    return allTypes;
}
Also used : LocalDateTime(java.time.LocalDateTime) VarBinaryType(org.apache.flink.table.types.logical.VarBinaryType) ArrayList(java.util.ArrayList) LogicalType(org.apache.flink.table.types.logical.LogicalType) BigIntType(org.apache.flink.table.types.logical.BigIntType) MapType(org.apache.flink.table.types.logical.MapType) TinyIntType(org.apache.flink.table.types.logical.TinyIntType) IntType(org.apache.flink.table.types.logical.IntType) BigIntType(org.apache.flink.table.types.logical.BigIntType) SmallIntType(org.apache.flink.table.types.logical.SmallIntType) FloatType(org.apache.flink.table.types.logical.FloatType) TimeType(org.apache.flink.table.types.logical.TimeType) StructuredType(org.apache.flink.table.types.logical.StructuredType) ArrayType(org.apache.flink.table.types.logical.ArrayType) PojoClass(org.apache.flink.table.planner.plan.nodes.exec.serde.DataTypeJsonSerdeTest.PojoClass) LocalZonedTimestampType(org.apache.flink.table.types.logical.LocalZonedTimestampType) TimestampType(org.apache.flink.table.types.logical.TimestampType) ZonedTimestampType(org.apache.flink.table.types.logical.ZonedTimestampType) VarCharType(org.apache.flink.table.types.logical.VarCharType) DateType(org.apache.flink.table.types.logical.DateType) MultisetType(org.apache.flink.table.types.logical.MultisetType) BinaryType(org.apache.flink.table.types.logical.BinaryType) VarBinaryType(org.apache.flink.table.types.logical.VarBinaryType) LocalZonedTimestampType(org.apache.flink.table.types.logical.LocalZonedTimestampType) ZonedTimestampType(org.apache.flink.table.types.logical.ZonedTimestampType) BooleanType(org.apache.flink.table.types.logical.BooleanType) LocalZonedTimestampType(org.apache.flink.table.types.logical.LocalZonedTimestampType) TinyIntType(org.apache.flink.table.types.logical.TinyIntType) DayTimeIntervalType(org.apache.flink.table.types.logical.DayTimeIntervalType) SmallIntType(org.apache.flink.table.types.logical.SmallIntType) DoubleType(org.apache.flink.table.types.logical.DoubleType) DecimalType(org.apache.flink.table.types.logical.DecimalType) CharType(org.apache.flink.table.types.logical.CharType) VarCharType(org.apache.flink.table.types.logical.VarCharType) Row(org.apache.flink.types.Row) NullType(org.apache.flink.table.types.logical.NullType) YearMonthIntervalType(org.apache.flink.table.types.logical.YearMonthIntervalType)

Example 4 with CharType

use of org.apache.flink.table.types.logical.CharType in project flink by apache.

the class CastRuleProviderTest method testResolvePredefinedToString.

@Test
void testResolvePredefinedToString() {
    assertThat(CastRuleProvider.resolve(INT, new VarCharType(10))).isSameAs(CharVarCharTrimPadCastRule.INSTANCE);
    assertThat(CastRuleProvider.resolve(INT, new CharType(10))).isSameAs(CharVarCharTrimPadCastRule.INSTANCE);
    assertThat(CastRuleProvider.resolve(INT, STRING_TYPE)).isSameAs(NumericToStringCastRule.INSTANCE);
}
Also used : VarCharType(org.apache.flink.table.types.logical.VarCharType) VarCharType(org.apache.flink.table.types.logical.VarCharType) CharType(org.apache.flink.table.types.logical.CharType) Test(org.junit.jupiter.api.Test)

Example 5 with CharType

use of org.apache.flink.table.types.logical.CharType in project flink by apache.

the class HiveInspectors method getConversion.

/**
 * Get conversion for converting Flink object to Hive object from an ObjectInspector and the
 * corresponding Flink DataType.
 */
public static HiveObjectConversion getConversion(ObjectInspector inspector, LogicalType dataType, HiveShim hiveShim) {
    if (inspector instanceof PrimitiveObjectInspector) {
        HiveObjectConversion conversion;
        if (inspector instanceof BooleanObjectInspector || inspector instanceof StringObjectInspector || inspector instanceof ByteObjectInspector || inspector instanceof ShortObjectInspector || inspector instanceof IntObjectInspector || inspector instanceof LongObjectInspector || inspector instanceof FloatObjectInspector || inspector instanceof DoubleObjectInspector || inspector instanceof BinaryObjectInspector || inspector instanceof VoidObjectInspector) {
            conversion = IdentityConversion.INSTANCE;
        } else if (inspector instanceof DateObjectInspector) {
            conversion = hiveShim::toHiveDate;
        } else if (inspector instanceof TimestampObjectInspector) {
            conversion = hiveShim::toHiveTimestamp;
        } else if (inspector instanceof HiveCharObjectInspector) {
            conversion = o -> o == null ? null : new HiveChar((String) o, ((CharType) dataType).getLength());
        } else if (inspector instanceof HiveVarcharObjectInspector) {
            conversion = o -> o == null ? null : new HiveVarchar((String) o, ((VarCharType) dataType).getLength());
        } else if (inspector instanceof HiveDecimalObjectInspector) {
            conversion = o -> o == null ? null : HiveDecimal.create((BigDecimal) o);
        } else {
            throw new FlinkHiveUDFException("Unsupported primitive object inspector " + inspector.getClass().getName());
        }
        // currently this happens for constant arguments for UDFs
        if (((PrimitiveObjectInspector) inspector).preferWritable()) {
            conversion = new WritableHiveObjectConversion(conversion, hiveShim);
        }
        return conversion;
    }
    if (inspector instanceof ListObjectInspector) {
        HiveObjectConversion eleConvert = getConversion(((ListObjectInspector) inspector).getListElementObjectInspector(), ((ArrayType) dataType).getElementType(), hiveShim);
        return o -> {
            if (o == null) {
                return null;
            }
            Object[] array = (Object[]) o;
            List<Object> result = new ArrayList<>();
            for (Object ele : array) {
                result.add(eleConvert.toHiveObject(ele));
            }
            return result;
        };
    }
    if (inspector instanceof MapObjectInspector) {
        MapObjectInspector mapInspector = (MapObjectInspector) inspector;
        MapType kvType = (MapType) dataType;
        HiveObjectConversion keyConversion = getConversion(mapInspector.getMapKeyObjectInspector(), kvType.getKeyType(), hiveShim);
        HiveObjectConversion valueConversion = getConversion(mapInspector.getMapValueObjectInspector(), kvType.getValueType(), hiveShim);
        return o -> {
            if (o == null) {
                return null;
            }
            Map<Object, Object> map = (Map) o;
            Map<Object, Object> result = new HashMap<>(map.size());
            for (Map.Entry<Object, Object> entry : map.entrySet()) {
                result.put(keyConversion.toHiveObject(entry.getKey()), valueConversion.toHiveObject(entry.getValue()));
            }
            return result;
        };
    }
    if (inspector instanceof StructObjectInspector) {
        StructObjectInspector structInspector = (StructObjectInspector) inspector;
        List<? extends StructField> structFields = structInspector.getAllStructFieldRefs();
        List<RowType.RowField> rowFields = ((RowType) dataType).getFields();
        HiveObjectConversion[] conversions = new HiveObjectConversion[structFields.size()];
        for (int i = 0; i < structFields.size(); i++) {
            conversions[i] = getConversion(structFields.get(i).getFieldObjectInspector(), rowFields.get(i).getType(), hiveShim);
        }
        return o -> {
            if (o == null) {
                return null;
            }
            Row row = (Row) o;
            List<Object> result = new ArrayList<>(row.getArity());
            for (int i = 0; i < row.getArity(); i++) {
                result.add(conversions[i].toHiveObject(row.getField(i)));
            }
            return result;
        };
    }
    throw new FlinkHiveUDFException(String.format("Flink doesn't support convert object conversion for %s yet", inspector));
}
Also used : DataType(org.apache.flink.table.types.DataType) VoidObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector) Array(java.lang.reflect.Array) WritableConstantHiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveVarcharObjectInspector) StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) WritableConstantTimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantTimestampObjectInspector) MapType(org.apache.flink.table.types.logical.MapType) WritableConstantDateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantDateObjectInspector) HiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector) WritableConstantByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantByteObjectInspector) CharType(org.apache.flink.table.types.logical.CharType) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) BooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) BigDecimal(java.math.BigDecimal) ShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector) Map(java.util.Map) WritableConstantLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantLongObjectInspector) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) HiveTypeUtil(org.apache.flink.table.catalog.hive.util.HiveTypeUtil) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) WritableConstantHiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveCharObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) FloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) WritableConstantBinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantBinaryObjectInspector) List(java.util.List) HiveReflectionUtils(org.apache.flink.table.catalog.hive.util.HiveReflectionUtils) LogicalType(org.apache.flink.table.types.logical.LogicalType) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) WritableConstantDoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantDoubleObjectInspector) WritableConstantFloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantFloatObjectInspector) WritableConstantBooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantBooleanObjectInspector) Row(org.apache.flink.types.Row) DateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) TypeInfoUtils(org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) HiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector) HiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector) HashMap(java.util.HashMap) IntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector) RowType(org.apache.flink.table.types.logical.RowType) WritableConstantHiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveDecimalObjectInspector) Constructor(java.lang.reflect.Constructor) HiveShim(org.apache.flink.table.catalog.hive.client.HiveShim) ArrayList(java.util.ArrayList) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) Nonnull(javax.annotation.Nonnull) WritableConstantShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantShortObjectInspector) BinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector) WritableConstantStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantStringObjectInspector) VarCharType(org.apache.flink.table.types.logical.VarCharType) ArrayType(org.apache.flink.table.types.logical.ArrayType) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) FlinkHiveUDFException(org.apache.flink.table.functions.hive.FlinkHiveUDFException) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) WritableConstantIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantIntObjectInspector) ObjectInspectorFactory(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory) PrimitiveObjectInspectorFactory(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) ByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector) DoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector) Internal(org.apache.flink.annotation.Internal) TimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) WritableConstantLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantLongObjectInspector) LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) WritableConstantDateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantDateObjectInspector) DateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector) IntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector) WritableConstantIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantIntObjectInspector) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) WritableConstantBinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantBinaryObjectInspector) BinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector) RowType(org.apache.flink.table.types.logical.RowType) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) WritableConstantStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantStringObjectInspector) MapType(org.apache.flink.table.types.logical.MapType) FloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector) WritableConstantFloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantFloatObjectInspector) WritableConstantByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantByteObjectInspector) ByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector) WritableConstantTimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantTimestampObjectInspector) TimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector) ShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector) WritableConstantShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantShortObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) HiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector) WritableConstantHiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveDecimalObjectInspector) List(java.util.List) ArrayList(java.util.ArrayList) WritableConstantHiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveCharObjectInspector) HiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector) FlinkHiveUDFException(org.apache.flink.table.functions.hive.FlinkHiveUDFException) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) BigDecimal(java.math.BigDecimal) WritableConstantDoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantDoubleObjectInspector) DoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector) WritableConstantHiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveVarcharObjectInspector) HiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) VoidObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector) CharType(org.apache.flink.table.types.logical.CharType) VarCharType(org.apache.flink.table.types.logical.VarCharType) Row(org.apache.flink.types.Row) BooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector) WritableConstantBooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantBooleanObjectInspector) Map(java.util.Map) HashMap(java.util.HashMap) StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Aggregations

CharType (org.apache.flink.table.types.logical.CharType)5 VarCharType (org.apache.flink.table.types.logical.VarCharType)4 BinaryType (org.apache.flink.table.types.logical.BinaryType)3 LogicalType (org.apache.flink.table.types.logical.LogicalType)3 ArrayList (java.util.ArrayList)2 DataType (org.apache.flink.table.types.DataType)2 ArrayType (org.apache.flink.table.types.logical.ArrayType)2 MapType (org.apache.flink.table.types.logical.MapType)2 VarBinaryType (org.apache.flink.table.types.logical.VarBinaryType)2 Row (org.apache.flink.types.Row)2 Array (java.lang.reflect.Array)1 Constructor (java.lang.reflect.Constructor)1 BigDecimal (java.math.BigDecimal)1 LocalDateTime (java.time.LocalDateTime)1 HashMap (java.util.HashMap)1 List (java.util.List)1 Map (java.util.Map)1 Nonnull (javax.annotation.Nonnull)1 Internal (org.apache.flink.annotation.Internal)1 CatalogException (org.apache.flink.table.catalog.exceptions.CatalogException)1