Search in sources :

Example 81 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class KafkaConnectorOptionsUtil method parseSpecificOffsets.

/**
 * Parses SpecificOffsets String to Map.
 *
 * <p>SpecificOffsets String format was given as following:
 *
 * <pre>
 *     scan.startup.specific-offsets = partition:0,offset:42;partition:1,offset:300
 * </pre>
 *
 * @return SpecificOffsets with Map format, key is partition, and value is offset
 */
public static Map<Integer, Long> parseSpecificOffsets(String specificOffsetsStr, String optionKey) {
    final Map<Integer, Long> offsetMap = new HashMap<>();
    final String[] pairs = specificOffsetsStr.split(";");
    final String validationExceptionMessage = String.format("Invalid properties '%s' should follow the format " + "'partition:0,offset:42;partition:1,offset:300', but is '%s'.", optionKey, specificOffsetsStr);
    if (pairs.length == 0) {
        throw new ValidationException(validationExceptionMessage);
    }
    for (String pair : pairs) {
        if (null == pair || pair.length() == 0 || !pair.contains(",")) {
            throw new ValidationException(validationExceptionMessage);
        }
        final String[] kv = pair.split(",");
        if (kv.length != 2 || !kv[0].startsWith(PARTITION + ':') || !kv[1].startsWith(OFFSET + ':')) {
            throw new ValidationException(validationExceptionMessage);
        }
        String partitionValue = kv[0].substring(kv[0].indexOf(":") + 1);
        String offsetValue = kv[1].substring(kv[1].indexOf(":") + 1);
        try {
            final Integer partition = Integer.valueOf(partitionValue);
            final Long offset = Long.valueOf(offsetValue);
            offsetMap.put(partition, offset);
        } catch (NumberFormatException e) {
            throw new ValidationException(validationExceptionMessage, e);
        }
    }
    return offsetMap;
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) HashMap(java.util.HashMap)

Example 82 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertAlterTable.

private Operation convertAlterTable(HiveParserASTNode input) throws SemanticException {
    Operation operation = null;
    HiveParserASTNode ast = (HiveParserASTNode) input.getChild(1);
    String[] qualified = HiveParserBaseSemanticAnalyzer.getQualifiedTableName((HiveParserASTNode) input.getChild(0));
    String tableName = HiveParserBaseSemanticAnalyzer.getDotName(qualified);
    HashMap<String, String> partSpec = null;
    HiveParserASTNode partSpecNode = (HiveParserASTNode) input.getChild(2);
    if (partSpecNode != null) {
        partSpec = getPartSpec(partSpecNode);
    }
    CatalogBaseTable alteredTable = getAlteredTable(tableName, false);
    switch(ast.getType()) {
        case HiveASTParser.TOK_ALTERTABLE_RENAME:
            operation = convertAlterTableRename(tableName, ast, false);
            break;
        case HiveASTParser.TOK_ALTERTABLE_ADDCOLS:
            operation = convertAlterTableModifyCols(alteredTable, tableName, ast, false);
            break;
        case HiveASTParser.TOK_ALTERTABLE_REPLACECOLS:
            operation = convertAlterTableModifyCols(alteredTable, tableName, ast, true);
            break;
        case HiveASTParser.TOK_ALTERTABLE_RENAMECOL:
            operation = convertAlterTableChangeCol(alteredTable, qualified, ast);
            break;
        case HiveASTParser.TOK_ALTERTABLE_ADDPARTS:
            operation = convertAlterTableAddParts(qualified, ast);
            break;
        case HiveASTParser.TOK_ALTERTABLE_DROPPARTS:
            operation = convertAlterTableDropParts(qualified, ast);
            break;
        case HiveASTParser.TOK_ALTERTABLE_PROPERTIES:
            operation = convertAlterTableProps(alteredTable, tableName, null, ast, false, false);
            break;
        case HiveASTParser.TOK_ALTERTABLE_DROPPROPERTIES:
            operation = convertAlterTableProps(alteredTable, tableName, null, ast, false, true);
            break;
        case HiveASTParser.TOK_ALTERTABLE_UPDATESTATS:
            operation = convertAlterTableProps(alteredTable, tableName, partSpec, ast, false, false);
            break;
        case HiveASTParser.TOK_ALTERTABLE_FILEFORMAT:
            operation = convertAlterTableFileFormat(alteredTable, ast, tableName, partSpec);
            break;
        case HiveASTParser.TOK_ALTERTABLE_LOCATION:
            operation = convertAlterTableLocation(alteredTable, ast, tableName, partSpec);
            break;
        case HiveASTParser.TOK_ALTERTABLE_SERIALIZER:
            operation = convertAlterTableSerde(alteredTable, ast, tableName, partSpec);
            break;
        case HiveASTParser.TOK_ALTERTABLE_SERDEPROPERTIES:
            operation = convertAlterTableSerdeProps(alteredTable, ast, tableName, partSpec);
            break;
        case HiveASTParser.TOK_ALTERTABLE_TOUCH:
        case HiveASTParser.TOK_ALTERTABLE_ARCHIVE:
        case HiveASTParser.TOK_ALTERTABLE_UNARCHIVE:
        case HiveASTParser.TOK_ALTERTABLE_PARTCOLTYPE:
        case HiveASTParser.TOK_ALTERTABLE_SKEWED:
        case HiveASTParser.TOK_ALTERTABLE_EXCHANGEPARTITION:
        case HiveASTParser.TOK_ALTERTABLE_MERGEFILES:
        case HiveASTParser.TOK_ALTERTABLE_RENAMEPART:
        case HiveASTParser.TOK_ALTERTABLE_SKEWED_LOCATION:
        case HiveASTParser.TOK_ALTERTABLE_BUCKETS:
        case HiveASTParser.TOK_ALTERTABLE_CLUSTER_SORT:
        case HiveASTParser.TOK_ALTERTABLE_COMPACT:
        case HiveASTParser.TOK_ALTERTABLE_UPDATECOLSTATS:
        case HiveASTParser.TOK_ALTERTABLE_DROPCONSTRAINT:
        case HiveASTParser.TOK_ALTERTABLE_ADDCONSTRAINT:
            handleUnsupportedOperation(ast);
            break;
        default:
            throw new ValidationException("Unknown AST node for ALTER TABLE: " + ast);
    }
    return operation;
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ValidationException(org.apache.flink.table.api.ValidationException) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) HiveOperation(org.apache.hadoop.hive.ql.plan.HiveOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) DropCatalogFunctionOperation(org.apache.flink.table.operations.ddl.DropCatalogFunctionOperation) ShowTablesOperation(org.apache.flink.table.operations.ShowTablesOperation) DescribeTableOperation(org.apache.flink.table.operations.DescribeTableOperation) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) AlterPartitionPropertiesOperation(org.apache.flink.table.operations.ddl.AlterPartitionPropertiesOperation) ShowPartitionsOperation(org.apache.flink.table.operations.ShowPartitionsOperation) AlterViewPropertiesOperation(org.apache.flink.table.operations.ddl.AlterViewPropertiesOperation) Operation(org.apache.flink.table.operations.Operation) DropTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.DropTempSystemFunctionOperation) ShowViewsOperation(org.apache.flink.table.operations.ShowViewsOperation) ShowDatabasesOperation(org.apache.flink.table.operations.ShowDatabasesOperation) AlterTableSchemaOperation(org.apache.flink.table.operations.ddl.AlterTableSchemaOperation) CreateTableASOperation(org.apache.flink.table.operations.ddl.CreateTableASOperation) DropTableOperation(org.apache.flink.table.operations.ddl.DropTableOperation) AlterViewAsOperation(org.apache.flink.table.operations.ddl.AlterViewAsOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) DropViewOperation(org.apache.flink.table.operations.ddl.DropViewOperation) AddPartitionsOperation(org.apache.flink.table.operations.ddl.AddPartitionsOperation) DropPartitionsOperation(org.apache.flink.table.operations.ddl.DropPartitionsOperation) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) AlterViewRenameOperation(org.apache.flink.table.operations.ddl.AlterViewRenameOperation) CreateCatalogFunctionOperation(org.apache.flink.table.operations.ddl.CreateCatalogFunctionOperation) CreateTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.CreateTempSystemFunctionOperation)

Example 83 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertAlterView.

private Operation convertAlterView(HiveParserASTNode ast) throws SemanticException {
    Operation operation = null;
    String[] qualified = HiveParserBaseSemanticAnalyzer.getQualifiedTableName((HiveParserASTNode) ast.getChild(0));
    String tableName = HiveParserBaseSemanticAnalyzer.getDotName(qualified);
    CatalogBaseTable alteredTable = getAlteredTable(tableName, true);
    if (ast.getChild(1).getType() == HiveASTParser.TOK_QUERY) {
        // alter view as
        operation = convertCreateView(ast);
    } else {
        ast = (HiveParserASTNode) ast.getChild(1);
        switch(ast.getType()) {
            case HiveASTParser.TOK_ALTERVIEW_PROPERTIES:
                operation = convertAlterTableProps(alteredTable, tableName, null, ast, true, false);
                break;
            case HiveASTParser.TOK_ALTERVIEW_DROPPROPERTIES:
                operation = convertAlterTableProps(alteredTable, tableName, null, ast, true, true);
                break;
            case HiveASTParser.TOK_ALTERVIEW_RENAME:
                operation = convertAlterTableRename(tableName, ast, true);
                break;
            case HiveASTParser.TOK_ALTERVIEW_ADDPARTS:
            case HiveASTParser.TOK_ALTERVIEW_DROPPARTS:
                handleUnsupportedOperation("ADD/DROP PARTITION for view is not supported");
                break;
            default:
                throw new ValidationException("Unknown AST node for ALTER VIEW: " + ast);
        }
    }
    return operation;
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) ValidationException(org.apache.flink.table.api.ValidationException) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) HiveOperation(org.apache.hadoop.hive.ql.plan.HiveOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) DropCatalogFunctionOperation(org.apache.flink.table.operations.ddl.DropCatalogFunctionOperation) ShowTablesOperation(org.apache.flink.table.operations.ShowTablesOperation) DescribeTableOperation(org.apache.flink.table.operations.DescribeTableOperation) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) AlterPartitionPropertiesOperation(org.apache.flink.table.operations.ddl.AlterPartitionPropertiesOperation) ShowPartitionsOperation(org.apache.flink.table.operations.ShowPartitionsOperation) AlterViewPropertiesOperation(org.apache.flink.table.operations.ddl.AlterViewPropertiesOperation) Operation(org.apache.flink.table.operations.Operation) DropTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.DropTempSystemFunctionOperation) ShowViewsOperation(org.apache.flink.table.operations.ShowViewsOperation) ShowDatabasesOperation(org.apache.flink.table.operations.ShowDatabasesOperation) AlterTableSchemaOperation(org.apache.flink.table.operations.ddl.AlterTableSchemaOperation) CreateTableASOperation(org.apache.flink.table.operations.ddl.CreateTableASOperation) DropTableOperation(org.apache.flink.table.operations.ddl.DropTableOperation) AlterViewAsOperation(org.apache.flink.table.operations.ddl.AlterViewAsOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) DropViewOperation(org.apache.flink.table.operations.ddl.DropViewOperation) AddPartitionsOperation(org.apache.flink.table.operations.ddl.AddPartitionsOperation) DropPartitionsOperation(org.apache.flink.table.operations.ddl.DropPartitionsOperation) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) AlterViewRenameOperation(org.apache.flink.table.operations.ddl.AlterViewRenameOperation) CreateCatalogFunctionOperation(org.apache.flink.table.operations.ddl.CreateCatalogFunctionOperation) CreateTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.CreateTempSystemFunctionOperation)

Example 84 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method getTypeName.

public static String getTypeName(HiveParserASTNode node) throws SemanticException {
    int token = node.getType();
    String typeName;
    // datetime type isn't currently supported
    if (token == HiveASTParser.TOK_DATETIME) {
        throw new ValidationException(ErrorMsg.UNSUPPORTED_TYPE.getMsg());
    }
    switch(token) {
        case HiveASTParser.TOK_CHAR:
            CharTypeInfo charTypeInfo = HiveASTParseUtils.getCharTypeInfo(node);
            typeName = charTypeInfo.getQualifiedName();
            break;
        case HiveASTParser.TOK_VARCHAR:
            VarcharTypeInfo varcharTypeInfo = HiveASTParseUtils.getVarcharTypeInfo(node);
            typeName = varcharTypeInfo.getQualifiedName();
            break;
        case HiveASTParser.TOK_DECIMAL:
            DecimalTypeInfo decTypeInfo = HiveASTParseUtils.getDecimalTypeTypeInfo(node);
            typeName = decTypeInfo.getQualifiedName();
            break;
        default:
            typeName = TokenToTypeName.get(token);
    }
    return typeName;
}
Also used : DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) ValidationException(org.apache.flink.table.api.ValidationException) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint)

Example 85 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertCreateFunction.

private Operation convertCreateFunction(HiveParserASTNode ast) {
    // ^(TOK_CREATEFUNCTION identifier StringLiteral ({isTempFunction}? => TOK_TEMPORARY))
    String functionName = ast.getChild(0).getText().toLowerCase();
    boolean isTemporaryFunction = (ast.getFirstChildWithType(HiveASTParser.TOK_TEMPORARY) != null);
    String className = HiveParserBaseSemanticAnalyzer.unescapeSQLString(ast.getChild(1).getText());
    // Temp functions are not allowed to have qualified names.
    if (isTemporaryFunction && FunctionUtils.isQualifiedFunctionName(functionName)) {
        // belong to a catalog/db
        throw new ValidationException("Temporary function cannot be created with a qualified name.");
    }
    if (isTemporaryFunction) {
        FunctionDefinition funcDefinition = funcDefFactory.createFunctionDefinition(functionName, new CatalogFunctionImpl(className, FunctionLanguage.JAVA));
        return new CreateTempSystemFunctionOperation(functionName, false, funcDefinition);
    } else {
        ObjectIdentifier identifier = parseObjectIdentifier(functionName);
        CatalogFunction catalogFunction = new CatalogFunctionImpl(className, FunctionLanguage.JAVA);
        return new CreateCatalogFunctionOperation(identifier, catalogFunction, false, false);
    }
}
Also used : CreateCatalogFunctionOperation(org.apache.flink.table.operations.ddl.CreateCatalogFunctionOperation) ValidationException(org.apache.flink.table.api.ValidationException) FunctionDefinition(org.apache.flink.table.functions.FunctionDefinition) CatalogFunction(org.apache.flink.table.catalog.CatalogFunction) CreateTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.CreateTempSystemFunctionOperation) CatalogFunctionImpl(org.apache.flink.table.catalog.CatalogFunctionImpl) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Aggregations

ValidationException (org.apache.flink.table.api.ValidationException)143 DataType (org.apache.flink.table.types.DataType)25 Test (org.junit.Test)23 HashMap (java.util.HashMap)21 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)19 LogicalType (org.apache.flink.table.types.logical.LogicalType)18 TableException (org.apache.flink.table.api.TableException)17 List (java.util.List)14 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)14 QueryOperation (org.apache.flink.table.operations.QueryOperation)14 LinkedHashMap (java.util.LinkedHashMap)13 DescriptorProperties (org.apache.flink.table.descriptors.DescriptorProperties)13 CatalogTable (org.apache.flink.table.catalog.CatalogTable)12 Expression (org.apache.flink.table.expressions.Expression)12 TableSchema (org.apache.flink.table.api.TableSchema)11 Catalog (org.apache.flink.table.catalog.Catalog)11 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)11 ArrayList (java.util.ArrayList)10 Map (java.util.Map)10 Internal (org.apache.flink.annotation.Internal)10