Search in sources :

Example 1 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class KinesisPartitionKeyGeneratorFactory method initializePartitioner.

/**
 * Returns a class value with the given class name.
 */
private static <T> PartitionKeyGenerator<T> initializePartitioner(String name, ClassLoader classLoader) {
    try {
        Class<?> clazz = Class.forName(name, true, classLoader);
        if (!PartitionKeyGenerator.class.isAssignableFrom(clazz)) {
            throw new ValidationException(String.format("Partitioner class '%s' should have %s in its parents chain", name, PartitionKeyGenerator.class.getName()));
        }
        @SuppressWarnings("unchecked") final PartitionKeyGenerator<T> partitioner = InstantiationUtil.instantiate(name, PartitionKeyGenerator.class, classLoader);
        return partitioner;
    } catch (ClassNotFoundException | FlinkException e) {
        throw new ValidationException(String.format("Could not find and instantiate partitioner class '%s'", name), e);
    }
}
Also used : PartitionKeyGenerator(org.apache.flink.connector.kinesis.sink.PartitionKeyGenerator) ValidationException(org.apache.flink.table.api.ValidationException) FlinkException(org.apache.flink.util.FlinkException)

Example 2 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class HiveParser method processCmd.

private List<Operation> processCmd(String cmd, HiveConf hiveConf, HiveShim hiveShim, HiveCatalog hiveCatalog) {
    try {
        final HiveParserContext context = new HiveParserContext(hiveConf);
        // parse statement to get AST
        final HiveParserASTNode node = HiveASTParseUtils.parse(cmd, context);
        Operation operation;
        if (DDL_NODES.contains(node.getType())) {
            HiveParserQueryState queryState = new HiveParserQueryState(hiveConf);
            HiveParserDDLSemanticAnalyzer ddlAnalyzer = new HiveParserDDLSemanticAnalyzer(queryState, hiveCatalog, getCatalogManager(), this, hiveShim, context, dmlHelper);
            operation = ddlAnalyzer.convertToOperation(node);
            return Collections.singletonList(operation);
        } else {
            final boolean explain = node.getType() == HiveASTParser.TOK_EXPLAIN;
            // first child is the underlying explicandum
            HiveParserASTNode input = explain ? (HiveParserASTNode) node.getChild(0) : node;
            operation = analyzeSql(context, hiveConf, hiveShim, input);
            // explain an nop is also considered nop
            if (explain && !(operation instanceof NopOperation)) {
                operation = new ExplainOperation(operation);
            }
        }
        return Collections.singletonList(operation);
    } catch (HiveASTParseException e) {
        // ParseException can happen for flink-specific statements, e.g. catalog DDLs
        try {
            return super.parse(cmd);
        } catch (SqlParserException parserException) {
            throw new SqlParserException("SQL parse failed", e);
        }
    } catch (SemanticException e) {
        throw new ValidationException("HiveParser failed to parse " + cmd, e);
    }
}
Also used : HiveParserDDLSemanticAnalyzer(org.apache.flink.table.planner.delegation.hive.parse.HiveParserDDLSemanticAnalyzer) NopOperation(org.apache.flink.table.operations.NopOperation) SqlParserException(org.apache.flink.table.api.SqlParserException) ExplainOperation(org.apache.flink.table.operations.ExplainOperation) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ValidationException(org.apache.flink.table.api.ValidationException) HiveParserContext(org.apache.flink.table.planner.delegation.hive.copy.HiveParserContext) HiveParserQueryState(org.apache.flink.table.planner.delegation.hive.copy.HiveParserQueryState) PlannerQueryOperation(org.apache.flink.table.planner.operations.PlannerQueryOperation) Operation(org.apache.flink.table.operations.Operation) ExplainOperation(org.apache.flink.table.operations.ExplainOperation) NopOperation(org.apache.flink.table.operations.NopOperation) HiveASTParseException(org.apache.flink.table.planner.delegation.hive.copy.HiveASTParseException) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 3 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class ElasticsearchDynamicSinkFactoryBaseTest method assertValidationException.

void assertValidationException(String expectedMessage, Executable executable) {
    ValidationException thrown = Assertions.assertThrows(ValidationException.class, executable);
    Assertions.assertEquals(expectedMessage, thrown.getMessage());
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException)

Example 4 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class HiveCatalogFactoryTest method testCreateHiveCatalogWithIllegalHadoopConfDir.

@Test
public void testCreateHiveCatalogWithIllegalHadoopConfDir() throws IOException {
    final String catalogName = "mycatalog";
    final String hadoopConfDir = tempFolder.newFolder().getAbsolutePath();
    try {
        final Map<String, String> options = new HashMap<>();
        options.put(CommonCatalogOptions.CATALOG_TYPE.key(), HiveCatalogFactoryOptions.IDENTIFIER);
        options.put(HiveCatalogFactoryOptions.HIVE_CONF_DIR.key(), CONF_DIR.getPath());
        options.put(HiveCatalogFactoryOptions.HADOOP_CONF_DIR.key(), hadoopConfDir);
        final Catalog actualCatalog = FactoryUtil.createCatalog(catalogName, options, null, Thread.currentThread().getContextClassLoader());
        Assert.fail();
    } catch (ValidationException e) {
    }
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) HashMap(java.util.HashMap) HiveCatalog(org.apache.flink.table.catalog.hive.HiveCatalog) Catalog(org.apache.flink.table.catalog.Catalog) Test(org.junit.Test)

Example 5 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertShowTables.

private Operation convertShowTables(HiveParserASTNode ast, boolean expectView) {
    String dbName = currentDB;
    String pattern = null;
    if (ast.getChildCount() > 3) {
        throw new ValidationException("Internal error : Invalid AST " + ast.toStringTree());
    }
    switch(ast.getChildCount()) {
        case // Uses a pattern
        1:
            pattern = HiveParserBaseSemanticAnalyzer.unescapeSQLString(ast.getChild(0).getText());
            break;
        case // Specifies a DB
        2:
            assert (ast.getChild(0).getType() == HiveASTParser.TOK_FROM);
            dbName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(1).getText());
            break;
        case // Uses a pattern and specifies a DB
        3:
            assert (ast.getChild(0).getType() == HiveASTParser.TOK_FROM);
            dbName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(1).getText());
            pattern = HiveParserBaseSemanticAnalyzer.unescapeSQLString(ast.getChild(2).getText());
            break;
        default:
            // No pattern or DB
            break;
    }
    if (!dbName.equalsIgnoreCase(currentDB)) {
        handleUnsupportedOperation("SHOW TABLES/VIEWS IN DATABASE is not supported");
    }
    if (pattern != null) {
        handleUnsupportedOperation("SHOW TABLES/VIEWS LIKE is not supported");
    }
    return expectView ? new ShowViewsOperation() : new ShowTablesOperation();
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) ShowTablesOperation(org.apache.flink.table.operations.ShowTablesOperation) ShowViewsOperation(org.apache.flink.table.operations.ShowViewsOperation)

Aggregations

ValidationException (org.apache.flink.table.api.ValidationException)143 DataType (org.apache.flink.table.types.DataType)25 Test (org.junit.Test)23 HashMap (java.util.HashMap)21 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)19 LogicalType (org.apache.flink.table.types.logical.LogicalType)18 TableException (org.apache.flink.table.api.TableException)17 List (java.util.List)14 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)14 QueryOperation (org.apache.flink.table.operations.QueryOperation)14 LinkedHashMap (java.util.LinkedHashMap)13 DescriptorProperties (org.apache.flink.table.descriptors.DescriptorProperties)13 CatalogTable (org.apache.flink.table.catalog.CatalogTable)12 Expression (org.apache.flink.table.expressions.Expression)12 TableSchema (org.apache.flink.table.api.TableSchema)11 Catalog (org.apache.flink.table.catalog.Catalog)11 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)11 ArrayList (java.util.ArrayList)10 Map (java.util.Map)10 Internal (org.apache.flink.annotation.Internal)10