use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class KinesisPartitionKeyGeneratorFactory method initializePartitioner.
/**
* Returns a class value with the given class name.
*/
private static <T> PartitionKeyGenerator<T> initializePartitioner(String name, ClassLoader classLoader) {
try {
Class<?> clazz = Class.forName(name, true, classLoader);
if (!PartitionKeyGenerator.class.isAssignableFrom(clazz)) {
throw new ValidationException(String.format("Partitioner class '%s' should have %s in its parents chain", name, PartitionKeyGenerator.class.getName()));
}
@SuppressWarnings("unchecked") final PartitionKeyGenerator<T> partitioner = InstantiationUtil.instantiate(name, PartitionKeyGenerator.class, classLoader);
return partitioner;
} catch (ClassNotFoundException | FlinkException e) {
throw new ValidationException(String.format("Could not find and instantiate partitioner class '%s'", name), e);
}
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class HiveParser method processCmd.
private List<Operation> processCmd(String cmd, HiveConf hiveConf, HiveShim hiveShim, HiveCatalog hiveCatalog) {
try {
final HiveParserContext context = new HiveParserContext(hiveConf);
// parse statement to get AST
final HiveParserASTNode node = HiveASTParseUtils.parse(cmd, context);
Operation operation;
if (DDL_NODES.contains(node.getType())) {
HiveParserQueryState queryState = new HiveParserQueryState(hiveConf);
HiveParserDDLSemanticAnalyzer ddlAnalyzer = new HiveParserDDLSemanticAnalyzer(queryState, hiveCatalog, getCatalogManager(), this, hiveShim, context, dmlHelper);
operation = ddlAnalyzer.convertToOperation(node);
return Collections.singletonList(operation);
} else {
final boolean explain = node.getType() == HiveASTParser.TOK_EXPLAIN;
// first child is the underlying explicandum
HiveParserASTNode input = explain ? (HiveParserASTNode) node.getChild(0) : node;
operation = analyzeSql(context, hiveConf, hiveShim, input);
// explain an nop is also considered nop
if (explain && !(operation instanceof NopOperation)) {
operation = new ExplainOperation(operation);
}
}
return Collections.singletonList(operation);
} catch (HiveASTParseException e) {
// ParseException can happen for flink-specific statements, e.g. catalog DDLs
try {
return super.parse(cmd);
} catch (SqlParserException parserException) {
throw new SqlParserException("SQL parse failed", e);
}
} catch (SemanticException e) {
throw new ValidationException("HiveParser failed to parse " + cmd, e);
}
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class ElasticsearchDynamicSinkFactoryBaseTest method assertValidationException.
void assertValidationException(String expectedMessage, Executable executable) {
ValidationException thrown = Assertions.assertThrows(ValidationException.class, executable);
Assertions.assertEquals(expectedMessage, thrown.getMessage());
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class HiveCatalogFactoryTest method testCreateHiveCatalogWithIllegalHadoopConfDir.
@Test
public void testCreateHiveCatalogWithIllegalHadoopConfDir() throws IOException {
final String catalogName = "mycatalog";
final String hadoopConfDir = tempFolder.newFolder().getAbsolutePath();
try {
final Map<String, String> options = new HashMap<>();
options.put(CommonCatalogOptions.CATALOG_TYPE.key(), HiveCatalogFactoryOptions.IDENTIFIER);
options.put(HiveCatalogFactoryOptions.HIVE_CONF_DIR.key(), CONF_DIR.getPath());
options.put(HiveCatalogFactoryOptions.HADOOP_CONF_DIR.key(), hadoopConfDir);
final Catalog actualCatalog = FactoryUtil.createCatalog(catalogName, options, null, Thread.currentThread().getContextClassLoader());
Assert.fail();
} catch (ValidationException e) {
}
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class HiveParserDDLSemanticAnalyzer method convertShowTables.
private Operation convertShowTables(HiveParserASTNode ast, boolean expectView) {
String dbName = currentDB;
String pattern = null;
if (ast.getChildCount() > 3) {
throw new ValidationException("Internal error : Invalid AST " + ast.toStringTree());
}
switch(ast.getChildCount()) {
case // Uses a pattern
1:
pattern = HiveParserBaseSemanticAnalyzer.unescapeSQLString(ast.getChild(0).getText());
break;
case // Specifies a DB
2:
assert (ast.getChild(0).getType() == HiveASTParser.TOK_FROM);
dbName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(1).getText());
break;
case // Uses a pattern and specifies a DB
3:
assert (ast.getChild(0).getType() == HiveASTParser.TOK_FROM);
dbName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(1).getText());
pattern = HiveParserBaseSemanticAnalyzer.unescapeSQLString(ast.getChild(2).getText());
break;
default:
// No pattern or DB
break;
}
if (!dbName.equalsIgnoreCase(currentDB)) {
handleUnsupportedOperation("SHOW TABLES/VIEWS IN DATABASE is not supported");
}
if (pattern != null) {
handleUnsupportedOperation("SHOW TABLES/VIEWS LIKE is not supported");
}
return expectView ? new ShowViewsOperation() : new ShowTablesOperation();
}
Aggregations