use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.
the class HiveShimV120 method listBuiltInFunctions.
@Override
public Set<String> listBuiltInFunctions() {
try {
Method method = FunctionRegistry.class.getMethod("getFunctionNames");
// getFunctionNames is a static method
Set<String> names = (Set<String>) method.invoke(null);
return names.stream().filter(n -> getBuiltInFunctionInfo(n).isPresent()).collect(Collectors.toSet());
} catch (Exception ex) {
throw new CatalogException("Failed to invoke FunctionRegistry.getFunctionNames()", ex);
}
}
use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.
the class HiveShimV310 method getNotNullColumns.
@Override
public Set<String> getNotNullColumns(IMetaStoreClient client, Configuration conf, String dbName, String tableName) {
try {
String hiveDefaultCatalog = getHMSDefaultCatalog(conf);
Class requestClz = Class.forName("org.apache.hadoop.hive.metastore.api.NotNullConstraintsRequest");
Object request = requestClz.getDeclaredConstructor(String.class, String.class, String.class).newInstance(hiveDefaultCatalog, dbName, tableName);
List<?> constraints = (List<?>) HiveReflectionUtils.invokeMethod(client.getClass(), client, "getNotNullConstraints", new Class[] { requestClz }, new Object[] { request });
Class constraintClz = Class.forName("org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint");
Method colNameMethod = constraintClz.getDeclaredMethod("getColumn_name");
Method isRelyMethod = constraintClz.getDeclaredMethod("isRely_cstr");
Set<String> res = new HashSet<>();
for (Object constraint : constraints) {
if ((boolean) isRelyMethod.invoke(constraint)) {
res.add((String) colNameMethod.invoke(constraint));
}
}
return res;
} catch (Exception e) {
throw new CatalogException("Failed to get NOT NULL constraints", e);
}
}
use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.
the class HiveShimV110 method getHiveRecordWriter.
@Override
public FileSinkOperator.RecordWriter getHiveRecordWriter(JobConf jobConf, Class outputFormatClz, Class<? extends Writable> outValClz, boolean isCompressed, Properties tableProps, Path outPath) {
try {
Class utilClass = HiveFileFormatUtils.class;
OutputFormat outputFormat = (OutputFormat) outputFormatClz.newInstance();
Method utilMethod = utilClass.getDeclaredMethod("getRecordWriter", JobConf.class, OutputFormat.class, Class.class, boolean.class, Properties.class, Path.class, Reporter.class);
return (FileSinkOperator.RecordWriter) utilMethod.invoke(null, jobConf, outputFormat, outValClz, isCompressed, tableProps, outPath, Reporter.NULL);
} catch (Exception e) {
throw new CatalogException("Failed to create Hive RecordWriter", e);
}
}
use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.
the class HiveShimV210 method createTableWithConstraints.
@Override
public void createTableWithConstraints(IMetaStoreClient client, Table table, Configuration conf, UniqueConstraint pk, List<Byte> pkTraits, List<String> notNullCols, List<Byte> nnTraits) {
if (!notNullCols.isEmpty()) {
throw new UnsupportedOperationException("NOT NULL constraints not supported until 3.0.0");
}
try {
List<Object> hivePKs = createHivePKs(table, pk, pkTraits);
// createTableWithConstraints takes PK and FK lists
HiveReflectionUtils.invokeMethod(client.getClass(), client, "createTableWithConstraints", new Class[] { Table.class, List.class, List.class }, new Object[] { table, hivePKs, Collections.emptyList() });
} catch (Exception e) {
throw new CatalogException("Failed to create Hive table with constraints", e);
}
}
use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.
the class HiveShimV210 method alterPartition.
@Override
public void alterPartition(IMetaStoreClient client, String databaseName, String tableName, Partition partition) throws InvalidOperationException, MetaException, TException {
String errorMsg = "Failed to alter partition for table %s in database %s";
try {
Method method = client.getClass().getMethod("alter_partition", String.class, String.class, Partition.class, EnvironmentContext.class);
method.invoke(client, databaseName, tableName, partition, null);
} catch (InvocationTargetException ite) {
Throwable targetEx = ite.getTargetException();
if (targetEx instanceof TException) {
throw (TException) targetEx;
} else {
throw new CatalogException(String.format(errorMsg, tableName, databaseName), targetEx);
}
} catch (NoSuchMethodException | IllegalAccessException e) {
throw new CatalogException(String.format(errorMsg, tableName, databaseName), e);
}
}
Aggregations