Search in sources :

Example 36 with CatalogException

use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.

the class HiveShimV100 method getHiveRecordWriter.

@Override
public FileSinkOperator.RecordWriter getHiveRecordWriter(JobConf jobConf, Class outputFormatClz, Class<? extends Writable> outValClz, boolean isCompressed, Properties tableProps, Path outPath) {
    try {
        Class utilClass = HiveFileFormatUtils.class;
        HiveOutputFormat outputFormat = (HiveOutputFormat) outputFormatClz.newInstance();
        Method utilMethod = utilClass.getDeclaredMethod("getRecordWriter", JobConf.class, HiveOutputFormat.class, Class.class, boolean.class, Properties.class, Path.class, Reporter.class);
        return (FileSinkOperator.RecordWriter) utilMethod.invoke(null, jobConf, outputFormat, outValClz, isCompressed, tableProps, outPath, Reporter.NULL);
    } catch (Exception e) {
        throw new CatalogException("Failed to create Hive RecordWriter", e);
    }
}
Also used : HiveFileFormatUtils(org.apache.hadoop.hive.ql.io.HiveFileFormatUtils) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) HiveOutputFormat(org.apache.hadoop.hive.ql.io.HiveOutputFormat) Method(java.lang.reflect.Method) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) FlinkHiveException(org.apache.flink.connectors.hive.FlinkHiveException) InvocationTargetException(java.lang.reflect.InvocationTargetException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) UnknownDBException(org.apache.hadoop.hive.metastore.api.UnknownDBException) TException(org.apache.thrift.TException) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException)

Example 37 with CatalogException

use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.

the class AbstractJdbcCatalog method getTable.

// ------ tables and views ------
@Override
public CatalogBaseTable getTable(ObjectPath tablePath) throws TableNotExistException, CatalogException {
    if (!tableExists(tablePath)) {
        throw new TableNotExistException(getName(), tablePath);
    }
    String dbUrl = baseUrl + tablePath.getDatabaseName();
    try (Connection conn = DriverManager.getConnection(dbUrl, username, pwd)) {
        DatabaseMetaData metaData = conn.getMetaData();
        Optional<UniqueConstraint> primaryKey = getPrimaryKey(metaData, getSchemaName(tablePath), getTableName(tablePath));
        PreparedStatement ps = conn.prepareStatement(String.format("SELECT * FROM %s;", getSchemaTableName(tablePath)));
        ResultSetMetaData resultSetMetaData = ps.getMetaData();
        String[] columnNames = new String[resultSetMetaData.getColumnCount()];
        DataType[] types = new DataType[resultSetMetaData.getColumnCount()];
        for (int i = 1; i <= resultSetMetaData.getColumnCount(); i++) {
            columnNames[i - 1] = resultSetMetaData.getColumnName(i);
            types[i - 1] = fromJDBCType(tablePath, resultSetMetaData, i);
            if (resultSetMetaData.isNullable(i) == ResultSetMetaData.columnNoNulls) {
                types[i - 1] = types[i - 1].notNull();
            }
        }
        Schema.Builder schemaBuilder = Schema.newBuilder().fromFields(columnNames, types);
        primaryKey.ifPresent(pk -> schemaBuilder.primaryKeyNamed(pk.getName(), pk.getColumns()));
        Schema tableSchema = schemaBuilder.build();
        Map<String, String> props = new HashMap<>();
        props.put(CONNECTOR.key(), IDENTIFIER);
        props.put(URL.key(), dbUrl);
        props.put(USERNAME.key(), username);
        props.put(PASSWORD.key(), pwd);
        props.put(TABLE_NAME.key(), getSchemaTableName(tablePath));
        return CatalogTable.of(tableSchema, null, Lists.newArrayList(), props);
    } catch (Exception e) {
        throw new CatalogException(String.format("Failed getting table %s", tablePath.getFullName()), e);
    }
}
Also used : HashMap(java.util.HashMap) TableNotExistException(org.apache.flink.table.catalog.exceptions.TableNotExistException) Schema(org.apache.flink.table.api.Schema) Connection(java.sql.Connection) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) UniqueConstraint(org.apache.flink.table.catalog.UniqueConstraint) PreparedStatement(java.sql.PreparedStatement) DatabaseMetaData(java.sql.DatabaseMetaData) UniqueConstraint(org.apache.flink.table.catalog.UniqueConstraint) FunctionAlreadyExistException(org.apache.flink.table.catalog.exceptions.FunctionAlreadyExistException) PartitionNotExistException(org.apache.flink.table.catalog.exceptions.PartitionNotExistException) PartitionSpecInvalidException(org.apache.flink.table.catalog.exceptions.PartitionSpecInvalidException) TablePartitionedException(org.apache.flink.table.catalog.exceptions.TablePartitionedException) FunctionNotExistException(org.apache.flink.table.catalog.exceptions.FunctionNotExistException) DatabaseNotEmptyException(org.apache.flink.table.catalog.exceptions.DatabaseNotEmptyException) DatabaseAlreadyExistException(org.apache.flink.table.catalog.exceptions.DatabaseAlreadyExistException) TableNotPartitionedException(org.apache.flink.table.catalog.exceptions.TableNotPartitionedException) ValidationException(org.apache.flink.table.api.ValidationException) DatabaseNotExistException(org.apache.flink.table.catalog.exceptions.DatabaseNotExistException) PartitionAlreadyExistsException(org.apache.flink.table.catalog.exceptions.PartitionAlreadyExistsException) SQLException(java.sql.SQLException) TableAlreadyExistException(org.apache.flink.table.catalog.exceptions.TableAlreadyExistException) TableNotExistException(org.apache.flink.table.catalog.exceptions.TableNotExistException) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) ResultSetMetaData(java.sql.ResultSetMetaData) DataType(org.apache.flink.table.types.DataType)

Example 38 with CatalogException

use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.

the class MySqlCatalog method getDriverVersion.

private String getDriverVersion() {
    try (Connection conn = DriverManager.getConnection(defaultUrl, username, pwd)) {
        String driverVersion = conn.getMetaData().getDriverVersion();
        Pattern regexp = Pattern.compile("\\d+?\\.\\d+?\\.\\d+");
        Matcher matcher = regexp.matcher(driverVersion);
        return matcher.find() ? matcher.group(0) : null;
    } catch (Exception e) {
        throw new CatalogException(String.format("Failed in getting MySQL driver version by %s.", defaultUrl), e);
    }
}
Also used : Pattern(java.util.regex.Pattern) Matcher(java.util.regex.Matcher) Connection(java.sql.Connection) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) DatabaseNotExistException(org.apache.flink.table.catalog.exceptions.DatabaseNotExistException) SQLException(java.sql.SQLException) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException)

Example 39 with CatalogException

use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.

the class HiveInspectors method getObjectInspector.

public static ObjectInspector getObjectInspector(TypeInfo type) {
    switch(type.getCategory()) {
        case PRIMITIVE:
            PrimitiveTypeInfo primitiveType = (PrimitiveTypeInfo) type;
            return PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(primitiveType);
        case LIST:
            ListTypeInfo listType = (ListTypeInfo) type;
            return ObjectInspectorFactory.getStandardListObjectInspector(getObjectInspector(listType.getListElementTypeInfo()));
        case MAP:
            MapTypeInfo mapType = (MapTypeInfo) type;
            return ObjectInspectorFactory.getStandardMapObjectInspector(getObjectInspector(mapType.getMapKeyTypeInfo()), getObjectInspector(mapType.getMapValueTypeInfo()));
        case STRUCT:
            StructTypeInfo structType = (StructTypeInfo) type;
            List<TypeInfo> fieldTypes = structType.getAllStructFieldTypeInfos();
            List<ObjectInspector> fieldInspectors = new ArrayList<ObjectInspector>();
            for (TypeInfo fieldType : fieldTypes) {
                fieldInspectors.add(getObjectInspector(fieldType));
            }
            return ObjectInspectorFactory.getStandardStructObjectInspector(structType.getAllStructFieldNames(), fieldInspectors);
        default:
            throw new CatalogException("Unsupported Hive type category " + type.getCategory());
    }
}
Also used : VoidObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector) WritableConstantHiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveVarcharObjectInspector) StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) WritableConstantTimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantTimestampObjectInspector) WritableConstantDateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantDateObjectInspector) HiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector) WritableConstantByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantByteObjectInspector) BooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector) ShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector) WritableConstantLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantLongObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) WritableConstantHiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveCharObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) FloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) WritableConstantBinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantBinaryObjectInspector) WritableConstantDoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantDoubleObjectInspector) WritableConstantFloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantFloatObjectInspector) WritableConstantBooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantBooleanObjectInspector) DateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) HiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector) HiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector) IntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector) WritableConstantHiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveDecimalObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) WritableConstantShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantShortObjectInspector) BinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector) WritableConstantStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantStringObjectInspector) WritableConstantIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantIntObjectInspector) ByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector) DoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector) TimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) ArrayList(java.util.ArrayList) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)

Example 40 with CatalogException

use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.

the class HiveShimV100 method getFieldsFromDeserializer.

@Override
public List<FieldSchema> getFieldsFromDeserializer(Configuration conf, Table table, boolean skipConfError) {
    try {
        Method utilMethod = getHiveMetaStoreUtilsClass().getMethod("getDeserializer", Configuration.class, Table.class);
        Deserializer deserializer = (Deserializer) utilMethod.invoke(null, conf, table);
        utilMethod = getHiveMetaStoreUtilsClass().getMethod("getFieldsFromDeserializer", String.class, Deserializer.class);
        return (List<FieldSchema>) utilMethod.invoke(null, table.getTableName(), deserializer);
    } catch (Exception e) {
        throw new CatalogException("Failed to get table schema from deserializer", e);
    }
}
Also used : Deserializer(org.apache.hadoop.hive.serde2.Deserializer) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) List(java.util.List) ArrayList(java.util.ArrayList) Method(java.lang.reflect.Method) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) FlinkHiveException(org.apache.flink.connectors.hive.FlinkHiveException) InvocationTargetException(java.lang.reflect.InvocationTargetException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) UnknownDBException(org.apache.hadoop.hive.metastore.api.UnknownDBException) TException(org.apache.thrift.TException) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException)

Aggregations

CatalogException (org.apache.flink.table.catalog.exceptions.CatalogException)53 TException (org.apache.thrift.TException)28 TableNotExistException (org.apache.flink.table.catalog.exceptions.TableNotExistException)16 Table (org.apache.hadoop.hive.metastore.api.Table)15 InvocationTargetException (java.lang.reflect.InvocationTargetException)14 CatalogTable (org.apache.flink.table.catalog.CatalogTable)14 Method (java.lang.reflect.Method)13 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)13 SqlCreateHiveTable (org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable)12 PartitionNotExistException (org.apache.flink.table.catalog.exceptions.PartitionNotExistException)9 PartitionSpecInvalidException (org.apache.flink.table.catalog.exceptions.PartitionSpecInvalidException)9 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)9 ArrayList (java.util.ArrayList)8 List (java.util.List)8 FlinkHiveException (org.apache.flink.connectors.hive.FlinkHiveException)8 DatabaseNotExistException (org.apache.flink.table.catalog.exceptions.DatabaseNotExistException)8 CatalogPartition (org.apache.flink.table.catalog.CatalogPartition)7 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)7 PartitionAlreadyExistsException (org.apache.flink.table.catalog.exceptions.PartitionAlreadyExistsException)6 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)6