Search in sources :

Example 16 with CatalogException

use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.

the class HiveShimV120 method toHiveDateColStats.

@Override
public ColumnStatisticsData toHiveDateColStats(CatalogColumnStatisticsDataDate flinkDateColStats) {
    try {
        Class dateStatsClz = Class.forName("org.apache.hadoop.hive.metastore.api.DateColumnStatsData");
        Object dateStats = dateStatsClz.getDeclaredConstructor().newInstance();
        dateStatsClz.getMethod("clear").invoke(dateStats);
        if (null != flinkDateColStats.getNdv()) {
            dateStatsClz.getMethod("setNumDVs", long.class).invoke(dateStats, flinkDateColStats.getNdv());
        }
        if (null != flinkDateColStats.getNullCount()) {
            dateStatsClz.getMethod("setNumNulls", long.class).invoke(dateStats, flinkDateColStats.getNullCount());
        }
        Class hmsDateClz = Class.forName("org.apache.hadoop.hive.metastore.api.Date");
        Constructor hmsDateConstructor = hmsDateClz.getConstructor(long.class);
        if (null != flinkDateColStats.getMax()) {
            Method setHigh = dateStatsClz.getDeclaredMethod("setHighValue", hmsDateClz);
            setHigh.invoke(dateStats, hmsDateConstructor.newInstance(flinkDateColStats.getMax().getDaysSinceEpoch()));
        }
        if (null != flinkDateColStats.getMin()) {
            Method setLow = dateStatsClz.getDeclaredMethod("setLowValue", hmsDateClz);
            setLow.invoke(dateStats, hmsDateConstructor.newInstance(flinkDateColStats.getMin().getDaysSinceEpoch()));
        }
        Class colStatsClz = ColumnStatisticsData.class;
        return (ColumnStatisticsData) colStatsClz.getDeclaredMethod("dateStats", dateStatsClz).invoke(null, dateStats);
    } catch (ClassNotFoundException | NoSuchMethodException | InstantiationException | IllegalAccessException | InvocationTargetException e) {
        throw new CatalogException("Failed to create Hive statistics for date column", e);
    }
}
Also used : Constructor(java.lang.reflect.Constructor) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) Method(java.lang.reflect.Method) ColumnStatisticsData(org.apache.hadoop.hive.metastore.api.ColumnStatisticsData) InvocationTargetException(java.lang.reflect.InvocationTargetException)

Example 17 with CatalogException

use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.

the class HiveShimV120 method toFlinkDateColStats.

@Override
public CatalogColumnStatisticsDataDate toFlinkDateColStats(ColumnStatisticsData hiveDateColStats) {
    try {
        Object dateStats = ColumnStatisticsData.class.getDeclaredMethod("getDateStats").invoke(hiveDateColStats);
        Class dateStatsClz = dateStats.getClass();
        boolean isSetNumDv = (boolean) dateStatsClz.getMethod("isSetNumDVs").invoke(dateStats);
        boolean isSetNumNull = (boolean) dateStatsClz.getMethod("isSetNumNulls").invoke(dateStats);
        boolean isSetHighValue = (boolean) dateStatsClz.getMethod("isSetHighValue").invoke(dateStats);
        boolean isSetLowValue = (boolean) dateStatsClz.getMethod("isSetLowValue").invoke(dateStats);
        Long numDV = isSetNumDv ? (Long) dateStatsClz.getMethod("getNumDVs").invoke(dateStats) : null;
        Long numNull = isSetNumNull ? (Long) dateStatsClz.getMethod("getNumNulls").invoke(dateStats) : null;
        Object hmsHighDate = dateStatsClz.getMethod("getHighValue").invoke(dateStats);
        Object hmsLowDate = dateStatsClz.getMethod("getLowValue").invoke(dateStats);
        Class hmsDateClz = hmsHighDate.getClass();
        Method hmsDateDays = hmsDateClz.getMethod("getDaysSinceEpoch");
        Date highDateDays = isSetHighValue ? new Date((Long) hmsDateDays.invoke(hmsHighDate)) : null;
        Date lowDateDays = isSetLowValue ? new Date((Long) hmsDateDays.invoke(hmsLowDate)) : null;
        return new CatalogColumnStatisticsDataDate(lowDateDays, highDateDays, numDV, numNull);
    } catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) {
        throw new CatalogException("Failed to create Flink statistics for date column", e);
    }
}
Also used : CatalogColumnStatisticsDataDate(org.apache.flink.table.catalog.stats.CatalogColumnStatisticsDataDate) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) Method(java.lang.reflect.Method) Date(org.apache.flink.table.catalog.stats.Date) CatalogColumnStatisticsDataDate(org.apache.flink.table.catalog.stats.CatalogColumnStatisticsDataDate) InvocationTargetException(java.lang.reflect.InvocationTargetException) ColumnStatisticsData(org.apache.hadoop.hive.metastore.api.ColumnStatisticsData)

Example 18 with CatalogException

use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.

the class HiveShimV310 method createTableWithConstraints.

@Override
public void createTableWithConstraints(IMetaStoreClient client, Table table, Configuration conf, UniqueConstraint pk, List<Byte> pkTraits, List<String> notNullCols, List<Byte> nnTraits) {
    try {
        List<Object> hivePKs = createHivePKs(table, pk, pkTraits);
        List<Object> hiveNNs = createHiveNNs(table, conf, notNullCols, nnTraits);
        // createTableWithConstraints takes PK, FK, UNIQUE, NN, DEFAULT, CHECK lists
        HiveReflectionUtils.invokeMethod(client.getClass(), client, "createTableWithConstraints", new Class[] { Table.class, List.class, List.class, List.class, List.class, List.class, List.class }, new Object[] { table, hivePKs, Collections.emptyList(), Collections.emptyList(), hiveNNs, Collections.emptyList(), Collections.emptyList() });
    } catch (Exception e) {
        throw new CatalogException("Failed to create Hive table with constraints", e);
    }
}
Also used : CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) FlinkHiveException(org.apache.flink.connectors.hive.FlinkHiveException) InvocationTargetException(java.lang.reflect.InvocationTargetException) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException)

Example 19 with CatalogException

use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.

the class HiveShimV200 method getHiveMetastoreClient.

@Override
public IMetaStoreClient getHiveMetastoreClient(HiveConf hiveConf) {
    try {
        Class<?>[] constructorArgTypes = { HiveConf.class };
        Object[] constructorArgs = { hiveConf };
        Method method = RetryingMetaStoreClient.class.getMethod("getProxy", HiveConf.class, constructorArgTypes.getClass(), constructorArgs.getClass(), String.class);
        // getProxy is a static method
        return (IMetaStoreClient) method.invoke(null, hiveConf, constructorArgTypes, constructorArgs, HiveMetaStoreClient.class.getName());
    } catch (Exception ex) {
        throw new CatalogException("Failed to create Hive Metastore client", ex);
    }
}
Also used : CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) Method(java.lang.reflect.Method) IMetaStoreClient(org.apache.hadoop.hive.metastore.IMetaStoreClient) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException)

Example 20 with CatalogException

use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.

the class HiveShimV210 method getPrimaryKey.

@Override
public Optional<UniqueConstraint> getPrimaryKey(IMetaStoreClient client, String dbName, String tableName, byte requiredTrait) {
    try {
        Class requestClz = Class.forName("org.apache.hadoop.hive.metastore.api.PrimaryKeysRequest");
        Object request = requestClz.getDeclaredConstructor(String.class, String.class).newInstance(dbName, tableName);
        List<?> constraints = (List<?>) HiveReflectionUtils.invokeMethod(client.getClass(), client, "getPrimaryKeys", new Class[] { requestClz }, new Object[] { request });
        if (constraints.isEmpty()) {
            return Optional.empty();
        }
        Class constraintClz = Class.forName("org.apache.hadoop.hive.metastore.api.SQLPrimaryKey");
        Method colNameMethod = constraintClz.getDeclaredMethod("getColumn_name");
        Method isEnableMethod = constraintClz.getDeclaredMethod("isEnable_cstr");
        Method isValidateMethod = constraintClz.getDeclaredMethod("isValidate_cstr");
        Method isRelyMethod = constraintClz.getDeclaredMethod("isRely_cstr");
        List<String> colNames = new ArrayList<>();
        for (Object constraint : constraints) {
            // check whether a constraint satisfies all the traits the caller specified
            boolean satisfy = !HiveTableUtil.requireEnableConstraint(requiredTrait) || (boolean) isEnableMethod.invoke(constraint);
            if (satisfy) {
                satisfy = !HiveTableUtil.requireValidateConstraint(requiredTrait) || (boolean) isValidateMethod.invoke(constraint);
            }
            if (satisfy) {
                satisfy = !HiveTableUtil.requireRelyConstraint(requiredTrait) || (boolean) isRelyMethod.invoke(constraint);
            }
            if (satisfy) {
                colNames.add((String) colNameMethod.invoke(constraint));
            } else {
                return Optional.empty();
            }
        }
        // all pk constraints should have the same name, so let's use the name of the first one
        String pkName = (String) HiveReflectionUtils.invokeMethod(constraintClz, constraints.get(0), "getPk_name", null, null);
        return Optional.of(UniqueConstraint.primaryKey(pkName, colNames));
    } catch (Throwable t) {
        if (t instanceof InvocationTargetException) {
            t = t.getCause();
        }
        if (t instanceof TApplicationException && t.getMessage() != null && t.getMessage().contains("Invalid method name")) {
            return Optional.empty();
        }
        throw new CatalogException("Failed to get PrimaryKey constraints", t);
    }
}
Also used : ArrayList(java.util.ArrayList) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) Method(java.lang.reflect.Method) InvocationTargetException(java.lang.reflect.InvocationTargetException) TApplicationException(org.apache.thrift.TApplicationException) ArrayList(java.util.ArrayList) List(java.util.List)

Aggregations

CatalogException (org.apache.flink.table.catalog.exceptions.CatalogException)53 TException (org.apache.thrift.TException)28 TableNotExistException (org.apache.flink.table.catalog.exceptions.TableNotExistException)16 Table (org.apache.hadoop.hive.metastore.api.Table)15 InvocationTargetException (java.lang.reflect.InvocationTargetException)14 CatalogTable (org.apache.flink.table.catalog.CatalogTable)14 Method (java.lang.reflect.Method)13 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)13 SqlCreateHiveTable (org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable)12 PartitionNotExistException (org.apache.flink.table.catalog.exceptions.PartitionNotExistException)9 PartitionSpecInvalidException (org.apache.flink.table.catalog.exceptions.PartitionSpecInvalidException)9 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)9 ArrayList (java.util.ArrayList)8 List (java.util.List)8 FlinkHiveException (org.apache.flink.connectors.hive.FlinkHiveException)8 DatabaseNotExistException (org.apache.flink.table.catalog.exceptions.DatabaseNotExistException)8 CatalogPartition (org.apache.flink.table.catalog.CatalogPartition)7 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)7 PartitionAlreadyExistsException (org.apache.flink.table.catalog.exceptions.PartitionAlreadyExistsException)6 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)6