use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.
the class HiveShimV120 method toHiveDateColStats.
@Override
public ColumnStatisticsData toHiveDateColStats(CatalogColumnStatisticsDataDate flinkDateColStats) {
try {
Class dateStatsClz = Class.forName("org.apache.hadoop.hive.metastore.api.DateColumnStatsData");
Object dateStats = dateStatsClz.getDeclaredConstructor().newInstance();
dateStatsClz.getMethod("clear").invoke(dateStats);
if (null != flinkDateColStats.getNdv()) {
dateStatsClz.getMethod("setNumDVs", long.class).invoke(dateStats, flinkDateColStats.getNdv());
}
if (null != flinkDateColStats.getNullCount()) {
dateStatsClz.getMethod("setNumNulls", long.class).invoke(dateStats, flinkDateColStats.getNullCount());
}
Class hmsDateClz = Class.forName("org.apache.hadoop.hive.metastore.api.Date");
Constructor hmsDateConstructor = hmsDateClz.getConstructor(long.class);
if (null != flinkDateColStats.getMax()) {
Method setHigh = dateStatsClz.getDeclaredMethod("setHighValue", hmsDateClz);
setHigh.invoke(dateStats, hmsDateConstructor.newInstance(flinkDateColStats.getMax().getDaysSinceEpoch()));
}
if (null != flinkDateColStats.getMin()) {
Method setLow = dateStatsClz.getDeclaredMethod("setLowValue", hmsDateClz);
setLow.invoke(dateStats, hmsDateConstructor.newInstance(flinkDateColStats.getMin().getDaysSinceEpoch()));
}
Class colStatsClz = ColumnStatisticsData.class;
return (ColumnStatisticsData) colStatsClz.getDeclaredMethod("dateStats", dateStatsClz).invoke(null, dateStats);
} catch (ClassNotFoundException | NoSuchMethodException | InstantiationException | IllegalAccessException | InvocationTargetException e) {
throw new CatalogException("Failed to create Hive statistics for date column", e);
}
}
use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.
the class HiveShimV120 method toFlinkDateColStats.
@Override
public CatalogColumnStatisticsDataDate toFlinkDateColStats(ColumnStatisticsData hiveDateColStats) {
try {
Object dateStats = ColumnStatisticsData.class.getDeclaredMethod("getDateStats").invoke(hiveDateColStats);
Class dateStatsClz = dateStats.getClass();
boolean isSetNumDv = (boolean) dateStatsClz.getMethod("isSetNumDVs").invoke(dateStats);
boolean isSetNumNull = (boolean) dateStatsClz.getMethod("isSetNumNulls").invoke(dateStats);
boolean isSetHighValue = (boolean) dateStatsClz.getMethod("isSetHighValue").invoke(dateStats);
boolean isSetLowValue = (boolean) dateStatsClz.getMethod("isSetLowValue").invoke(dateStats);
Long numDV = isSetNumDv ? (Long) dateStatsClz.getMethod("getNumDVs").invoke(dateStats) : null;
Long numNull = isSetNumNull ? (Long) dateStatsClz.getMethod("getNumNulls").invoke(dateStats) : null;
Object hmsHighDate = dateStatsClz.getMethod("getHighValue").invoke(dateStats);
Object hmsLowDate = dateStatsClz.getMethod("getLowValue").invoke(dateStats);
Class hmsDateClz = hmsHighDate.getClass();
Method hmsDateDays = hmsDateClz.getMethod("getDaysSinceEpoch");
Date highDateDays = isSetHighValue ? new Date((Long) hmsDateDays.invoke(hmsHighDate)) : null;
Date lowDateDays = isSetLowValue ? new Date((Long) hmsDateDays.invoke(hmsLowDate)) : null;
return new CatalogColumnStatisticsDataDate(lowDateDays, highDateDays, numDV, numNull);
} catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) {
throw new CatalogException("Failed to create Flink statistics for date column", e);
}
}
use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.
the class HiveShimV310 method createTableWithConstraints.
@Override
public void createTableWithConstraints(IMetaStoreClient client, Table table, Configuration conf, UniqueConstraint pk, List<Byte> pkTraits, List<String> notNullCols, List<Byte> nnTraits) {
try {
List<Object> hivePKs = createHivePKs(table, pk, pkTraits);
List<Object> hiveNNs = createHiveNNs(table, conf, notNullCols, nnTraits);
// createTableWithConstraints takes PK, FK, UNIQUE, NN, DEFAULT, CHECK lists
HiveReflectionUtils.invokeMethod(client.getClass(), client, "createTableWithConstraints", new Class[] { Table.class, List.class, List.class, List.class, List.class, List.class, List.class }, new Object[] { table, hivePKs, Collections.emptyList(), Collections.emptyList(), hiveNNs, Collections.emptyList(), Collections.emptyList() });
} catch (Exception e) {
throw new CatalogException("Failed to create Hive table with constraints", e);
}
}
use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.
the class HiveShimV200 method getHiveMetastoreClient.
@Override
public IMetaStoreClient getHiveMetastoreClient(HiveConf hiveConf) {
try {
Class<?>[] constructorArgTypes = { HiveConf.class };
Object[] constructorArgs = { hiveConf };
Method method = RetryingMetaStoreClient.class.getMethod("getProxy", HiveConf.class, constructorArgTypes.getClass(), constructorArgs.getClass(), String.class);
// getProxy is a static method
return (IMetaStoreClient) method.invoke(null, hiveConf, constructorArgTypes, constructorArgs, HiveMetaStoreClient.class.getName());
} catch (Exception ex) {
throw new CatalogException("Failed to create Hive Metastore client", ex);
}
}
use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.
the class HiveShimV210 method getPrimaryKey.
@Override
public Optional<UniqueConstraint> getPrimaryKey(IMetaStoreClient client, String dbName, String tableName, byte requiredTrait) {
try {
Class requestClz = Class.forName("org.apache.hadoop.hive.metastore.api.PrimaryKeysRequest");
Object request = requestClz.getDeclaredConstructor(String.class, String.class).newInstance(dbName, tableName);
List<?> constraints = (List<?>) HiveReflectionUtils.invokeMethod(client.getClass(), client, "getPrimaryKeys", new Class[] { requestClz }, new Object[] { request });
if (constraints.isEmpty()) {
return Optional.empty();
}
Class constraintClz = Class.forName("org.apache.hadoop.hive.metastore.api.SQLPrimaryKey");
Method colNameMethod = constraintClz.getDeclaredMethod("getColumn_name");
Method isEnableMethod = constraintClz.getDeclaredMethod("isEnable_cstr");
Method isValidateMethod = constraintClz.getDeclaredMethod("isValidate_cstr");
Method isRelyMethod = constraintClz.getDeclaredMethod("isRely_cstr");
List<String> colNames = new ArrayList<>();
for (Object constraint : constraints) {
// check whether a constraint satisfies all the traits the caller specified
boolean satisfy = !HiveTableUtil.requireEnableConstraint(requiredTrait) || (boolean) isEnableMethod.invoke(constraint);
if (satisfy) {
satisfy = !HiveTableUtil.requireValidateConstraint(requiredTrait) || (boolean) isValidateMethod.invoke(constraint);
}
if (satisfy) {
satisfy = !HiveTableUtil.requireRelyConstraint(requiredTrait) || (boolean) isRelyMethod.invoke(constraint);
}
if (satisfy) {
colNames.add((String) colNameMethod.invoke(constraint));
} else {
return Optional.empty();
}
}
// all pk constraints should have the same name, so let's use the name of the first one
String pkName = (String) HiveReflectionUtils.invokeMethod(constraintClz, constraints.get(0), "getPk_name", null, null);
return Optional.of(UniqueConstraint.primaryKey(pkName, colNames));
} catch (Throwable t) {
if (t instanceof InvocationTargetException) {
t = t.getCause();
}
if (t instanceof TApplicationException && t.getMessage() != null && t.getMessage().contains("Invalid method name")) {
return Optional.empty();
}
throw new CatalogException("Failed to get PrimaryKey constraints", t);
}
}
Aggregations