Search in sources :

Example 6 with HiveShim

use of org.apache.flink.table.catalog.hive.client.HiveShim in project flink by apache.

the class HivePartitionFetcherTest method testIgnoreNonExistPartition.

@Test
public void testIgnoreNonExistPartition() throws Exception {
    // it's possible a partition path exists but the partition is not added to HMS, e.g. the
    // partition is still being loaded, or the path is simply misplaced
    // make sure the fetch can ignore such paths
    HiveCatalog hiveCatalog = HiveTestUtils.createHiveCatalog();
    hiveCatalog.open();
    // create test table
    String[] fieldNames = new String[] { "i", "date" };
    DataType[] fieldTypes = new DataType[] { DataTypes.INT(), DataTypes.STRING() };
    TableSchema schema = TableSchema.builder().fields(fieldNames, fieldTypes).build();
    List<String> partitionKeys = Collections.singletonList("date");
    Map<String, String> options = new HashMap<>();
    options.put("connector", "hive");
    CatalogTable catalogTable = new CatalogTableImpl(schema, partitionKeys, options, null);
    ObjectPath tablePath = new ObjectPath("default", "test");
    hiveCatalog.createTable(tablePath, catalogTable, false);
    // add a valid partition path
    Table hiveTable = hiveCatalog.getHiveTable(tablePath);
    Path path = new Path(hiveTable.getSd().getLocation(), "date=2021-06-18");
    FileSystem fs = path.getFileSystem(hiveCatalog.getHiveConf());
    fs.mkdirs(path);
    // test partition-time order
    Configuration flinkConf = new Configuration();
    flinkConf.set(STREAMING_SOURCE_PARTITION_ORDER, HiveOptions.PartitionOrder.PARTITION_TIME);
    HiveShim hiveShim = HiveShimLoader.loadHiveShim(hiveCatalog.getHiveVersion());
    JobConfWrapper jobConfWrapper = new JobConfWrapper(new JobConf(hiveCatalog.getHiveConf()));
    String defaultPartName = "__HIVE_DEFAULT_PARTITION__";
    MyHivePartitionFetcherContext fetcherContext = new MyHivePartitionFetcherContext(tablePath, hiveShim, jobConfWrapper, partitionKeys, fieldTypes, fieldNames, flinkConf, defaultPartName);
    fetcherContext.open();
    assertEquals(0, fetcherContext.getComparablePartitionValueList().size());
    // test create-time order
    flinkConf.set(STREAMING_SOURCE_PARTITION_ORDER, HiveOptions.PartitionOrder.CREATE_TIME);
    fetcherContext = new MyHivePartitionFetcherContext(tablePath, hiveShim, jobConfWrapper, partitionKeys, fieldTypes, fieldNames, flinkConf, defaultPartName);
    fetcherContext.open();
    assertEquals(0, fetcherContext.getComparablePartitionValueList().size());
    // test partition-name order
    flinkConf.set(STREAMING_SOURCE_PARTITION_ORDER, HiveOptions.PartitionOrder.PARTITION_NAME);
    fetcherContext = new MyHivePartitionFetcherContext(tablePath, hiveShim, jobConfWrapper, partitionKeys, fieldTypes, fieldNames, flinkConf, defaultPartName);
    fetcherContext.open();
    assertEquals(0, fetcherContext.getComparablePartitionValueList().size());
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) Path(org.apache.hadoop.fs.Path) HiveCatalog(org.apache.flink.table.catalog.hive.HiveCatalog) ObjectPath(org.apache.flink.table.catalog.ObjectPath) CatalogTable(org.apache.flink.table.catalog.CatalogTable) Table(org.apache.hadoop.hive.metastore.api.Table) JobConfWrapper(org.apache.flink.connectors.hive.JobConfWrapper) TableSchema(org.apache.flink.table.api.TableSchema) Configuration(org.apache.flink.configuration.Configuration) HashMap(java.util.HashMap) CatalogTable(org.apache.flink.table.catalog.CatalogTable) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) FileSystem(org.apache.hadoop.fs.FileSystem) DataType(org.apache.flink.table.types.DataType) HiveShim(org.apache.flink.table.catalog.hive.client.HiveShim) JobConf(org.apache.hadoop.mapred.JobConf) Test(org.junit.Test)

Example 7 with HiveShim

use of org.apache.flink.table.catalog.hive.client.HiveShim in project flink by apache.

the class HiveParserTypeConverter method convert.

public static RelDataType convert(PrimitiveTypeInfo type, RelDataTypeFactory dtFactory) {
    RelDataType convertedType = null;
    HiveShim hiveShim = HiveParserUtils.getSessionHiveShim();
    switch(type.getPrimitiveCategory()) {
        case VOID:
            convertedType = dtFactory.createSqlType(SqlTypeName.NULL);
            break;
        case BOOLEAN:
            convertedType = dtFactory.createSqlType(SqlTypeName.BOOLEAN);
            break;
        case BYTE:
            convertedType = dtFactory.createSqlType(SqlTypeName.TINYINT);
            break;
        case SHORT:
            convertedType = dtFactory.createSqlType(SqlTypeName.SMALLINT);
            break;
        case INT:
            convertedType = dtFactory.createSqlType(SqlTypeName.INTEGER);
            break;
        case LONG:
            convertedType = dtFactory.createSqlType(SqlTypeName.BIGINT);
            break;
        case FLOAT:
            convertedType = dtFactory.createSqlType(SqlTypeName.FLOAT);
            break;
        case DOUBLE:
            convertedType = dtFactory.createSqlType(SqlTypeName.DOUBLE);
            break;
        case STRING:
            convertedType = dtFactory.createTypeWithCharsetAndCollation(dtFactory.createSqlType(SqlTypeName.VARCHAR, Integer.MAX_VALUE), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
            break;
        case DATE:
            convertedType = dtFactory.createSqlType(SqlTypeName.DATE);
            break;
        case TIMESTAMP:
            convertedType = dtFactory.createSqlType(SqlTypeName.TIMESTAMP, 9);
            break;
        case BINARY:
            convertedType = dtFactory.createSqlType(SqlTypeName.BINARY);
            break;
        case DECIMAL:
            DecimalTypeInfo dtInf = (DecimalTypeInfo) type;
            convertedType = dtFactory.createSqlType(SqlTypeName.DECIMAL, dtInf.precision(), dtInf.scale());
            break;
        case VARCHAR:
            convertedType = dtFactory.createTypeWithCharsetAndCollation(dtFactory.createSqlType(SqlTypeName.VARCHAR, ((BaseCharTypeInfo) type).getLength()), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
            break;
        case CHAR:
            convertedType = dtFactory.createTypeWithCharsetAndCollation(dtFactory.createSqlType(SqlTypeName.CHAR, ((BaseCharTypeInfo) type).getLength()), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
            break;
        case UNKNOWN:
            convertedType = dtFactory.createSqlType(SqlTypeName.OTHER);
            break;
        default:
            if (hiveShim.isIntervalYearMonthType(type.getPrimitiveCategory())) {
                convertedType = dtFactory.createSqlIntervalType(new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, new SqlParserPos(1, 1)));
            } else if (hiveShim.isIntervalDayTimeType(type.getPrimitiveCategory())) {
                convertedType = dtFactory.createSqlIntervalType(new SqlIntervalQualifier(TimeUnit.DAY, TimeUnit.SECOND, new SqlParserPos(1, 1)));
            }
    }
    if (null == convertedType) {
        throw new RuntimeException("Unsupported Type : " + type.getTypeName());
    }
    return dtFactory.createTypeWithNullability(convertedType, true);
}
Also used : DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) SqlParserPos(org.apache.calcite.sql.parser.SqlParserPos) SqlIntervalQualifier(org.apache.calcite.sql.SqlIntervalQualifier) RelDataType(org.apache.calcite.rel.type.RelDataType) HiveShim(org.apache.flink.table.catalog.hive.client.HiveShim)

Example 8 with HiveShim

use of org.apache.flink.table.catalog.hive.client.HiveShim in project flink by apache.

the class HiveTableSink method createBatchSink.

private DataStreamSink<Row> createBatchSink(DataStream<RowData> dataStream, DataStructureConverter converter, StorageDescriptor sd, HiveWriterFactory recordWriterFactory, OutputFileConfig fileNaming, final int parallelism) throws IOException {
    FileSystemOutputFormat.Builder<Row> builder = new FileSystemOutputFormat.Builder<>();
    builder.setPartitionComputer(new HiveRowPartitionComputer(hiveShim, JobConfUtils.getDefaultPartitionName(jobConf), tableSchema.getFieldNames(), tableSchema.getFieldDataTypes(), getPartitionKeyArray()));
    builder.setDynamicGrouped(dynamicGrouping);
    builder.setPartitionColumns(getPartitionKeyArray());
    builder.setFileSystemFactory(fsFactory());
    builder.setFormatFactory(new HiveOutputFormatFactory(recordWriterFactory));
    builder.setMetaStoreFactory(msFactory());
    builder.setOverwrite(overwrite);
    builder.setStaticPartitions(staticPartitionSpec);
    builder.setTempPath(new org.apache.flink.core.fs.Path(toStagingDir(sd.getLocation(), jobConf)));
    builder.setOutputFileConfig(fileNaming);
    return dataStream.map((MapFunction<RowData, Row>) value -> (Row) converter.toExternal(value)).writeUsingOutputFormat(builder.build()).setParallelism(parallelism);
}
Also used : FileSystem(org.apache.hadoop.fs.FileSystem) HiveMetastoreClientFactory(org.apache.flink.table.catalog.hive.client.HiveMetastoreClientFactory) HiveBulkWriterFactory(org.apache.flink.connectors.hive.write.HiveBulkWriterFactory) CatalogTable(org.apache.flink.table.catalog.CatalogTable) LoggerFactory(org.slf4j.LoggerFactory) JobConfUtils(org.apache.flink.connectors.hive.util.JobConfUtils) MapFunction(org.apache.flink.api.common.functions.MapFunction) OrcSplitReaderUtil(org.apache.flink.orc.OrcSplitReaderUtil) PartitionCommitInfo(org.apache.flink.connector.file.table.stream.PartitionCommitInfo) SupportsPartitioning(org.apache.flink.table.connector.sink.abilities.SupportsPartitioning) Configuration(org.apache.hadoop.conf.Configuration) Map(java.util.Map) SINK_ROLLING_POLICY_CHECK_INTERVAL(org.apache.flink.connector.file.table.FileSystemConnectorOptions.SINK_ROLLING_POLICY_CHECK_INTERVAL) StreamingFileSink(org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSink) Path(org.apache.hadoop.fs.Path) HiveWriterFactory(org.apache.flink.connectors.hive.write.HiveWriterFactory) PartFileInfo(org.apache.flink.streaming.api.functions.sink.filesystem.PartFileInfo) CheckpointRollingPolicy(org.apache.flink.streaming.api.functions.sink.filesystem.rollingpolicies.CheckpointRollingPolicy) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) HiveShimLoader(org.apache.flink.table.catalog.hive.client.HiveShimLoader) HiveCatalogFactoryOptions(org.apache.flink.table.catalog.hive.factories.HiveCatalogFactoryOptions) DynamicTableSink(org.apache.flink.table.connector.sink.DynamicTableSink) SINK_ROLLING_POLICY_ROLLOVER_INTERVAL(org.apache.flink.connector.file.table.FileSystemConnectorOptions.SINK_ROLLING_POLICY_ROLLOVER_INTERVAL) TableSchema(org.apache.flink.table.api.TableSchema) CompactOperator.convertToUncompacted(org.apache.flink.connector.file.table.stream.compact.CompactOperator.convertToUncompacted) UUID(java.util.UUID) HiveOutputFormat(org.apache.hadoop.hive.ql.io.HiveOutputFormat) Preconditions(org.apache.flink.util.Preconditions) StringUtils(org.apache.flink.util.StringUtils) UncheckedIOException(java.io.UncheckedIOException) List(java.util.List) HiveReflectionUtils(org.apache.flink.table.catalog.hive.util.HiveReflectionUtils) LogicalType(org.apache.flink.table.types.logical.LogicalType) DataStreamSinkProvider(org.apache.flink.table.connector.sink.DataStreamSinkProvider) Optional(java.util.Optional) Row(org.apache.flink.types.Row) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) ChangelogMode(org.apache.flink.table.connector.ChangelogMode) RowType(org.apache.flink.table.types.logical.RowType) HiveShim(org.apache.flink.table.catalog.hive.client.HiveShim) ParquetRowDataBuilder(org.apache.flink.formats.parquet.row.ParquetRowDataBuilder) BucketsBuilder(org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSink.BucketsBuilder) Utilities(org.apache.hadoop.hive.ql.exec.Utilities) LinkedHashMap(java.util.LinkedHashMap) ReadableConfig(org.apache.flink.configuration.ReadableConfig) ThreadLocalClassLoaderConfiguration(org.apache.flink.orc.writer.ThreadLocalClassLoaderConfiguration) FileSystemConnectorOptions(org.apache.flink.connector.file.table.FileSystemConnectorOptions) SINK_ROLLING_POLICY_INACTIVITY_INTERVAL(org.apache.flink.connector.file.table.FileSystemConnectorOptions.SINK_ROLLING_POLICY_INACTIVITY_INTERVAL) SupportsOverwrite(org.apache.flink.table.connector.sink.abilities.SupportsOverwrite) HiveMetastoreClientWrapper(org.apache.flink.table.catalog.hive.client.HiveMetastoreClientWrapper) Nullable(javax.annotation.Nullable) StreamingSink(org.apache.flink.connector.file.table.stream.StreamingSink) DataStreamSink(org.apache.flink.streaming.api.datastream.DataStreamSink) HiveConfUtils(org.apache.flink.connectors.hive.util.HiveConfUtils) HiveCompactReaderFactory(org.apache.flink.connectors.hive.read.HiveCompactReaderFactory) RowData(org.apache.flink.table.data.RowData) Logger(org.slf4j.Logger) Properties(java.util.Properties) ProviderContext(org.apache.flink.table.connector.ProviderContext) FlinkRuntimeException(org.apache.flink.util.FlinkRuntimeException) BulkWriter(org.apache.flink.api.common.serialization.BulkWriter) HiveConf(org.apache.hadoop.hive.conf.HiveConf) HiveOutputFormatFactory(org.apache.flink.connectors.hive.write.HiveOutputFormatFactory) TypeDescription(org.apache.orc.TypeDescription) TException(org.apache.thrift.TException) IOException(java.io.IOException) HadoopPathBasedBulkFormatBuilder(org.apache.flink.streaming.api.functions.sink.filesystem.HadoopPathBasedBulkFormatBuilder) Table(org.apache.hadoop.hive.metastore.api.Table) VisibleForTesting(org.apache.flink.annotation.VisibleForTesting) DataStream(org.apache.flink.streaming.api.datastream.DataStream) JobConf(org.apache.hadoop.mapred.JobConf) TableBucketAssigner(org.apache.flink.connector.file.table.FileSystemTableSink.TableBucketAssigner) CompactReader(org.apache.flink.connector.file.table.stream.compact.CompactReader) OutputFileConfig(org.apache.flink.streaming.api.functions.sink.filesystem.OutputFileConfig) FileSystemTableSink(org.apache.flink.connector.file.table.FileSystemTableSink) TableSchemaUtils(org.apache.flink.table.utils.TableSchemaUtils) FileSystemOutputFormat(org.apache.flink.connector.file.table.FileSystemOutputFormat) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) SINK_ROLLING_POLICY_FILE_SIZE(org.apache.flink.connector.file.table.FileSystemConnectorOptions.SINK_ROLLING_POLICY_FILE_SIZE) HiveTableUtil.checkAcidTable(org.apache.flink.table.catalog.hive.util.HiveTableUtil.checkAcidTable) RowData(org.apache.flink.table.data.RowData) FileSystemOutputFormat(org.apache.flink.connector.file.table.FileSystemOutputFormat) ParquetRowDataBuilder(org.apache.flink.formats.parquet.row.ParquetRowDataBuilder) BucketsBuilder(org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSink.BucketsBuilder) HadoopPathBasedBulkFormatBuilder(org.apache.flink.streaming.api.functions.sink.filesystem.HadoopPathBasedBulkFormatBuilder) Row(org.apache.flink.types.Row) HiveOutputFormatFactory(org.apache.flink.connectors.hive.write.HiveOutputFormatFactory)

Example 9 with HiveShim

use of org.apache.flink.table.catalog.hive.client.HiveShim in project flink by apache.

the class SqlFunctionConverter method visitCall.

@Override
public RexNode visitCall(RexCall call) {
    SqlOperator operator = call.getOperator();
    List<RexNode> operands = call.getOperands();
    SqlOperator convertedOp = convertOperator(operator);
    final boolean[] update = null;
    if (convertedOp instanceof SqlCastFunction) {
        RelDataType type = call.getType();
        return builder.makeCall(type, convertedOp, visitList(operands, update));
    } else {
        if (convertedOp instanceof FlinkSqlTimestampFunction) {
            // flink's current_timestamp has different type from hive's, convert it to a literal
            Timestamp currentTS = ((HiveParser.HiveParserSessionState) SessionState.get()).getHiveParserCurrentTS();
            HiveShim hiveShim = HiveParserUtils.getSessionHiveShim();
            try {
                return HiveParserRexNodeConverter.convertConstant(new ExprNodeConstantDesc(hiveShim.toHiveTimestamp(currentTS)), cluster);
            } catch (SemanticException e) {
                throw new FlinkHiveException(e);
            }
        }
        return builder.makeCall(convertedOp, visitList(operands, update));
    }
}
Also used : ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) SqlCastFunction(org.apache.calcite.sql.fun.SqlCastFunction) SqlOperator(org.apache.calcite.sql.SqlOperator) FlinkHiveException(org.apache.flink.connectors.hive.FlinkHiveException) RelDataType(org.apache.calcite.rel.type.RelDataType) Timestamp(java.sql.Timestamp) FlinkSqlTimestampFunction(org.apache.flink.table.planner.functions.sql.FlinkSqlTimestampFunction) HiveShim(org.apache.flink.table.catalog.hive.client.HiveShim) RexNode(org.apache.calcite.rex.RexNode) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 10 with HiveShim

use of org.apache.flink.table.catalog.hive.client.HiveShim in project flink by apache.

the class HiveInspectors method getConversion.

/**
 * Get conversion for converting Flink object to Hive object from an ObjectInspector and the
 * corresponding Flink DataType.
 */
public static HiveObjectConversion getConversion(ObjectInspector inspector, LogicalType dataType, HiveShim hiveShim) {
    if (inspector instanceof PrimitiveObjectInspector) {
        HiveObjectConversion conversion;
        if (inspector instanceof BooleanObjectInspector || inspector instanceof StringObjectInspector || inspector instanceof ByteObjectInspector || inspector instanceof ShortObjectInspector || inspector instanceof IntObjectInspector || inspector instanceof LongObjectInspector || inspector instanceof FloatObjectInspector || inspector instanceof DoubleObjectInspector || inspector instanceof BinaryObjectInspector || inspector instanceof VoidObjectInspector) {
            conversion = IdentityConversion.INSTANCE;
        } else if (inspector instanceof DateObjectInspector) {
            conversion = hiveShim::toHiveDate;
        } else if (inspector instanceof TimestampObjectInspector) {
            conversion = hiveShim::toHiveTimestamp;
        } else if (inspector instanceof HiveCharObjectInspector) {
            conversion = o -> o == null ? null : new HiveChar((String) o, ((CharType) dataType).getLength());
        } else if (inspector instanceof HiveVarcharObjectInspector) {
            conversion = o -> o == null ? null : new HiveVarchar((String) o, ((VarCharType) dataType).getLength());
        } else if (inspector instanceof HiveDecimalObjectInspector) {
            conversion = o -> o == null ? null : HiveDecimal.create((BigDecimal) o);
        } else {
            throw new FlinkHiveUDFException("Unsupported primitive object inspector " + inspector.getClass().getName());
        }
        // currently this happens for constant arguments for UDFs
        if (((PrimitiveObjectInspector) inspector).preferWritable()) {
            conversion = new WritableHiveObjectConversion(conversion, hiveShim);
        }
        return conversion;
    }
    if (inspector instanceof ListObjectInspector) {
        HiveObjectConversion eleConvert = getConversion(((ListObjectInspector) inspector).getListElementObjectInspector(), ((ArrayType) dataType).getElementType(), hiveShim);
        return o -> {
            if (o == null) {
                return null;
            }
            Object[] array = (Object[]) o;
            List<Object> result = new ArrayList<>();
            for (Object ele : array) {
                result.add(eleConvert.toHiveObject(ele));
            }
            return result;
        };
    }
    if (inspector instanceof MapObjectInspector) {
        MapObjectInspector mapInspector = (MapObjectInspector) inspector;
        MapType kvType = (MapType) dataType;
        HiveObjectConversion keyConversion = getConversion(mapInspector.getMapKeyObjectInspector(), kvType.getKeyType(), hiveShim);
        HiveObjectConversion valueConversion = getConversion(mapInspector.getMapValueObjectInspector(), kvType.getValueType(), hiveShim);
        return o -> {
            if (o == null) {
                return null;
            }
            Map<Object, Object> map = (Map) o;
            Map<Object, Object> result = new HashMap<>(map.size());
            for (Map.Entry<Object, Object> entry : map.entrySet()) {
                result.put(keyConversion.toHiveObject(entry.getKey()), valueConversion.toHiveObject(entry.getValue()));
            }
            return result;
        };
    }
    if (inspector instanceof StructObjectInspector) {
        StructObjectInspector structInspector = (StructObjectInspector) inspector;
        List<? extends StructField> structFields = structInspector.getAllStructFieldRefs();
        List<RowType.RowField> rowFields = ((RowType) dataType).getFields();
        HiveObjectConversion[] conversions = new HiveObjectConversion[structFields.size()];
        for (int i = 0; i < structFields.size(); i++) {
            conversions[i] = getConversion(structFields.get(i).getFieldObjectInspector(), rowFields.get(i).getType(), hiveShim);
        }
        return o -> {
            if (o == null) {
                return null;
            }
            Row row = (Row) o;
            List<Object> result = new ArrayList<>(row.getArity());
            for (int i = 0; i < row.getArity(); i++) {
                result.add(conversions[i].toHiveObject(row.getField(i)));
            }
            return result;
        };
    }
    throw new FlinkHiveUDFException(String.format("Flink doesn't support convert object conversion for %s yet", inspector));
}
Also used : DataType(org.apache.flink.table.types.DataType) VoidObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector) Array(java.lang.reflect.Array) WritableConstantHiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveVarcharObjectInspector) StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) WritableConstantTimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantTimestampObjectInspector) MapType(org.apache.flink.table.types.logical.MapType) WritableConstantDateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantDateObjectInspector) HiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector) WritableConstantByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantByteObjectInspector) CharType(org.apache.flink.table.types.logical.CharType) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) BooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) BigDecimal(java.math.BigDecimal) ShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector) Map(java.util.Map) WritableConstantLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantLongObjectInspector) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) HiveTypeUtil(org.apache.flink.table.catalog.hive.util.HiveTypeUtil) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) WritableConstantHiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveCharObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) FloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) WritableConstantBinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantBinaryObjectInspector) List(java.util.List) HiveReflectionUtils(org.apache.flink.table.catalog.hive.util.HiveReflectionUtils) LogicalType(org.apache.flink.table.types.logical.LogicalType) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) WritableConstantDoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantDoubleObjectInspector) WritableConstantFloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantFloatObjectInspector) WritableConstantBooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantBooleanObjectInspector) Row(org.apache.flink.types.Row) DateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) TypeInfoUtils(org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) HiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector) HiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector) HashMap(java.util.HashMap) IntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector) RowType(org.apache.flink.table.types.logical.RowType) WritableConstantHiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveDecimalObjectInspector) Constructor(java.lang.reflect.Constructor) HiveShim(org.apache.flink.table.catalog.hive.client.HiveShim) ArrayList(java.util.ArrayList) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) Nonnull(javax.annotation.Nonnull) WritableConstantShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantShortObjectInspector) BinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector) WritableConstantStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantStringObjectInspector) VarCharType(org.apache.flink.table.types.logical.VarCharType) ArrayType(org.apache.flink.table.types.logical.ArrayType) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) FlinkHiveUDFException(org.apache.flink.table.functions.hive.FlinkHiveUDFException) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) WritableConstantIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantIntObjectInspector) ObjectInspectorFactory(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory) PrimitiveObjectInspectorFactory(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) ByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector) DoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector) Internal(org.apache.flink.annotation.Internal) TimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) WritableConstantLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantLongObjectInspector) LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) WritableConstantDateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantDateObjectInspector) DateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector) IntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector) WritableConstantIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantIntObjectInspector) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) WritableConstantBinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantBinaryObjectInspector) BinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector) RowType(org.apache.flink.table.types.logical.RowType) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) WritableConstantStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantStringObjectInspector) MapType(org.apache.flink.table.types.logical.MapType) FloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector) WritableConstantFloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantFloatObjectInspector) WritableConstantByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantByteObjectInspector) ByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector) WritableConstantTimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantTimestampObjectInspector) TimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector) ShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector) WritableConstantShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantShortObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) HiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector) WritableConstantHiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveDecimalObjectInspector) List(java.util.List) ArrayList(java.util.ArrayList) WritableConstantHiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveCharObjectInspector) HiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector) FlinkHiveUDFException(org.apache.flink.table.functions.hive.FlinkHiveUDFException) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) BigDecimal(java.math.BigDecimal) WritableConstantDoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantDoubleObjectInspector) DoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector) WritableConstantHiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveVarcharObjectInspector) HiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) VoidObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector) CharType(org.apache.flink.table.types.logical.CharType) VarCharType(org.apache.flink.table.types.logical.VarCharType) Row(org.apache.flink.types.Row) BooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector) WritableConstantBooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantBooleanObjectInspector) Map(java.util.Map) HashMap(java.util.HashMap) StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Aggregations

HiveShim (org.apache.flink.table.catalog.hive.client.HiveShim)13 DataType (org.apache.flink.table.types.DataType)5 List (java.util.List)4 ArrayList (java.util.ArrayList)3 RelDataType (org.apache.calcite.rel.type.RelDataType)3 VisibleForTesting (org.apache.flink.annotation.VisibleForTesting)3 CatalogTable (org.apache.flink.table.catalog.CatalogTable)3 CatalogException (org.apache.flink.table.catalog.exceptions.CatalogException)3 JobConf (org.apache.hadoop.mapred.JobConf)3 Logger (org.slf4j.Logger)3 LoggerFactory (org.slf4j.LoggerFactory)3 BigDecimal (java.math.BigDecimal)2 Arrays (java.util.Arrays)2 HashMap (java.util.HashMap)2 Optional (java.util.Optional)2 SqlIntervalQualifier (org.apache.calcite.sql.SqlIntervalQualifier)2 SqlParserPos (org.apache.calcite.sql.parser.SqlParserPos)2 Internal (org.apache.flink.annotation.Internal)2 Configuration (org.apache.flink.configuration.Configuration)2 ReadableConfig (org.apache.flink.configuration.ReadableConfig)2