Search in sources :

Example 91 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertAlterTableAddParts.

/**
 * Add one or more partitions to a table. Useful when the data has been copied to the right
 * location by some other process.
 */
private Operation convertAlterTableAddParts(String[] qualified, CommonTree ast) {
    // ^(TOK_ALTERTABLE_ADDPARTS identifier ifNotExists?
    // alterStatementSuffixAddPartitionsElement+)
    boolean ifNotExists = ast.getChild(0).getType() == HiveASTParser.TOK_IFNOTEXISTS;
    Table tab = getTable(new ObjectPath(qualified[0], qualified[1]));
    boolean isView = tab.isView();
    validateAlterTableType(tab);
    int numCh = ast.getChildCount();
    int start = ifNotExists ? 1 : 0;
    String currentLocation = null;
    Map<String, String> currentPartSpec = null;
    // Parser has done some verification, so the order of tokens doesn't need to be verified
    // here.
    List<CatalogPartitionSpec> specs = new ArrayList<>();
    List<CatalogPartition> partitions = new ArrayList<>();
    for (int num = start; num < numCh; num++) {
        HiveParserASTNode child = (HiveParserASTNode) ast.getChild(num);
        switch(child.getToken().getType()) {
            case HiveASTParser.TOK_PARTSPEC:
                if (currentPartSpec != null) {
                    specs.add(new CatalogPartitionSpec(currentPartSpec));
                    Map<String, String> props = new HashMap<>();
                    if (currentLocation != null) {
                        props.put(TABLE_LOCATION_URI, currentLocation);
                    }
                    partitions.add(new CatalogPartitionImpl(props, null));
                    currentLocation = null;
                }
                currentPartSpec = getPartSpec(child);
                // validate reserved values
                validatePartitionValues(currentPartSpec);
                break;
            case HiveASTParser.TOK_PARTITIONLOCATION:
                // if location specified, set in partition
                if (isView) {
                    throw new ValidationException("LOCATION clause illegal for view partition");
                }
                currentLocation = HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText());
                break;
            default:
                throw new ValidationException("Unknown child: " + child);
        }
    }
    // add the last one
    if (currentPartSpec != null) {
        specs.add(new CatalogPartitionSpec(currentPartSpec));
        Map<String, String> props = new HashMap<>();
        if (currentLocation != null) {
            props.put(TABLE_LOCATION_URI, currentLocation);
        }
        partitions.add(new CatalogPartitionImpl(props, null));
    }
    ObjectIdentifier tableIdentifier = tab.getDbName() == null ? parseObjectIdentifier(tab.getTableName()) : catalogManager.qualifyIdentifier(UnresolvedIdentifier.of(tab.getDbName(), tab.getTableName()));
    return new AddPartitionsOperation(tableIdentifier, ifNotExists, specs, partitions);
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) CatalogTable(org.apache.flink.table.catalog.CatalogTable) SqlCreateHiveTable(org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable) Table(org.apache.hadoop.hive.ql.metadata.Table) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ValidationException(org.apache.flink.table.api.ValidationException) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) CatalogPartition(org.apache.flink.table.catalog.CatalogPartition) AddPartitionsOperation(org.apache.flink.table.operations.ddl.AddPartitionsOperation) CatalogPartitionSpec(org.apache.flink.table.catalog.CatalogPartitionSpec) CatalogPartitionImpl(org.apache.flink.table.catalog.CatalogPartitionImpl) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 92 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertCreateView.

private Operation convertCreateView(HiveParserASTNode ast) throws SemanticException {
    String[] qualTabName = HiveParserBaseSemanticAnalyzer.getQualifiedTableName((HiveParserASTNode) ast.getChild(0));
    String dbDotTable = HiveParserBaseSemanticAnalyzer.getDotName(qualTabName);
    List<FieldSchema> cols = null;
    boolean ifNotExists = false;
    boolean isAlterViewAs = false;
    String comment = null;
    HiveParserASTNode selectStmt = null;
    Map<String, String> tblProps = null;
    boolean isMaterialized = ast.getToken().getType() == HiveASTParser.TOK_CREATE_MATERIALIZED_VIEW;
    if (isMaterialized) {
        handleUnsupportedOperation("MATERIALIZED VIEW is not supported");
    }
    HiveParserStorageFormat storageFormat = new HiveParserStorageFormat(conf);
    LOG.info("Creating view " + dbDotTable + " position=" + ast.getCharPositionInLine());
    int numCh = ast.getChildCount();
    for (int num = 1; num < numCh; num++) {
        HiveParserASTNode child = (HiveParserASTNode) ast.getChild(num);
        if (storageFormat.fillStorageFormat(child)) {
            handleUnsupportedOperation("FILE FORMAT for view is not supported");
        }
        switch(child.getToken().getType()) {
            case HiveASTParser.TOK_IFNOTEXISTS:
                ifNotExists = true;
                break;
            case HiveASTParser.TOK_REWRITE_ENABLED:
                handleUnsupportedOperation("MATERIALIZED VIEW REWRITE is not supported");
                break;
            case HiveASTParser.TOK_ORREPLACE:
                handleUnsupportedOperation("CREATE OR REPLACE VIEW is not supported");
                break;
            case HiveASTParser.TOK_QUERY:
                selectStmt = child;
                break;
            case HiveASTParser.TOK_TABCOLNAME:
                cols = HiveParserBaseSemanticAnalyzer.getColumns(child);
                break;
            case HiveASTParser.TOK_TABLECOMMENT:
                comment = HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText());
                break;
            case HiveASTParser.TOK_TABLEPROPERTIES:
                tblProps = getProps((HiveParserASTNode) child.getChild(0));
                break;
            case HiveASTParser.TOK_TABLEROWFORMAT:
                handleUnsupportedOperation("ROW FORMAT for view is not supported");
                break;
            case HiveASTParser.TOK_TABLESERIALIZER:
                handleUnsupportedOperation("SERDE for view is not supported");
                break;
            case HiveASTParser.TOK_TABLELOCATION:
                handleUnsupportedOperation("LOCATION for view is not supported");
                break;
            case HiveASTParser.TOK_VIEWPARTCOLS:
                handleUnsupportedOperation("PARTITION COLUMN for view is not supported");
                break;
            default:
                throw new ValidationException("Unknown AST node for CREATE/ALTER VIEW: " + child);
        }
    }
    if (ast.getToken().getType() == HiveASTParser.TOK_ALTERVIEW && ast.getChild(1).getType() == HiveASTParser.TOK_QUERY) {
        isAlterViewAs = true;
    }
    queryState.setCommandType(HiveOperation.CREATEVIEW);
    HiveParserCreateViewInfo createViewInfo = new HiveParserCreateViewInfo(dbDotTable, cols, selectStmt);
    hiveParser.analyzeCreateView(createViewInfo, context, queryState, hiveShim);
    ObjectIdentifier viewIdentifier = parseObjectIdentifier(createViewInfo.getCompoundName());
    TableSchema schema = HiveTableUtil.createTableSchema(createViewInfo.getSchema(), Collections.emptyList(), Collections.emptySet(), null);
    Map<String, String> props = new HashMap<>();
    if (isAlterViewAs) {
        CatalogBaseTable baseTable = getCatalogBaseTable(viewIdentifier);
        props.putAll(baseTable.getOptions());
        comment = baseTable.getComment();
    } else {
        if (tblProps != null) {
            props.putAll(tblProps);
        }
    }
    CatalogView catalogView = new CatalogViewImpl(createViewInfo.getOriginalText(), createViewInfo.getExpandedText(), schema, props, comment);
    if (isAlterViewAs) {
        return new AlterViewAsOperation(viewIdentifier, catalogView);
    } else {
        return new CreateViewOperation(viewIdentifier, catalogView, ifNotExists, false);
    }
}
Also used : AlterViewAsOperation(org.apache.flink.table.operations.ddl.AlterViewAsOperation) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ValidationException(org.apache.flink.table.api.ValidationException) CatalogViewImpl(org.apache.flink.table.catalog.CatalogViewImpl) TableSchema(org.apache.flink.table.api.TableSchema) HiveParserStorageFormat(org.apache.flink.table.planner.delegation.hive.copy.HiveParserStorageFormat) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) CatalogView(org.apache.flink.table.catalog.CatalogView) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 93 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class JdbcDataTypeTest method testDataTypeValidate.

@Test
public void testDataTypeValidate() {
    String sqlDDL = String.format(DDL_FORMAT, testItem.dataTypeExpr, testItem.dialect);
    StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
    StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
    tEnv.executeSql(sqlDDL);
    if (testItem.expectError != null) {
        try {
            tEnv.sqlQuery("SELECT * FROM T");
            fail();
        } catch (ValidationException ex) {
            Assert.assertEquals(testItem.expectError, ex.getCause().getMessage());
        } catch (UnsupportedOperationException ex) {
            Assert.assertEquals(testItem.expectError, ex.getMessage());
        } catch (Exception e) {
            fail(e);
        }
    } else {
        tEnv.sqlQuery("SELECT * FROM T");
    }
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) StreamTableEnvironment(org.apache.flink.table.api.bridge.java.StreamTableEnvironment) ValidationException(org.apache.flink.table.api.ValidationException) Test(org.junit.Test)

Example 94 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class UserDefinedFunctionHelper method validateImplementationMethod.

/**
 * Validates an implementation method such as {@code eval()} or {@code accumulate()}.
 */
private static void validateImplementationMethod(Class<? extends UserDefinedFunction> clazz, boolean rejectStatic, boolean isOptional, String... methodNameOptions) {
    final Set<String> nameSet = new HashSet<>(Arrays.asList(methodNameOptions));
    final List<Method> methods = getAllDeclaredMethods(clazz);
    boolean found = false;
    for (Method method : methods) {
        if (!nameSet.contains(method.getName())) {
            continue;
        }
        found = true;
        final int modifier = method.getModifiers();
        if (!Modifier.isPublic(modifier)) {
            throw new ValidationException(String.format("Method '%s' of function class '%s' is not public.", method.getName(), clazz.getName()));
        }
        if (Modifier.isAbstract(modifier)) {
            throw new ValidationException(String.format("Method '%s' of function class '%s' must not be abstract.", method.getName(), clazz.getName()));
        }
        if (rejectStatic && Modifier.isStatic(modifier)) {
            throw new ValidationException(String.format("Method '%s' of function class '%s' must not be static.", method.getName(), clazz.getName()));
        }
    }
    if (!found && !isOptional) {
        throw new ValidationException(String.format("Function class '%s' does not implement a method named %s.", clazz.getName(), nameSet.stream().map(s -> "'" + s + "'").collect(Collectors.joining(" or "))));
    }
}
Also used : DataType(org.apache.flink.table.types.DataType) Arrays(java.util.Arrays) SpecializedContext(org.apache.flink.table.functions.SpecializedFunction.SpecializedContext) PipelineOptions(org.apache.flink.configuration.PipelineOptions) PythonFunctionUtils(org.apache.flink.table.functions.python.utils.PythonFunctionUtils) HashSet(java.util.HashSet) InstantiationUtil(org.apache.flink.util.InstantiationUtil) ReadableConfig(org.apache.flink.configuration.ReadableConfig) TypeInformation(org.apache.flink.api.common.typeinfo.TypeInformation) Method(java.lang.reflect.Method) Nullable(javax.annotation.Nullable) Preconditions.checkState(org.apache.flink.util.Preconditions.checkState) TableException(org.apache.flink.table.api.TableException) Set(java.util.Set) TypeExtractionUtils.getAllDeclaredMethods(org.apache.flink.api.java.typeutils.TypeExtractionUtils.getAllDeclaredMethods) Field(java.lang.reflect.Field) Collectors(java.util.stream.Collectors) CallContext(org.apache.flink.table.types.inference.CallContext) List(java.util.List) TypeExtractor(org.apache.flink.api.java.typeutils.TypeExtractor) CatalogFunction(org.apache.flink.table.catalog.CatalogFunction) ClosureCleanerLevel(org.apache.flink.api.common.ExecutionConfig.ClosureCleanerLevel) ValidationException(org.apache.flink.table.api.ValidationException) Modifier(java.lang.reflect.Modifier) Internal(org.apache.flink.annotation.Internal) ClosureCleaner(org.apache.flink.api.java.ClosureCleaner) ExtractionUtils(org.apache.flink.table.types.extraction.ExtractionUtils) ValidationException(org.apache.flink.table.api.ValidationException) Method(java.lang.reflect.Method) HashSet(java.util.HashSet)

Example 95 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class TimestampExtractorUtils method mapToResolvedField.

private static ResolvedFieldReference mapToResolvedField(Function<String, String> nameRemapping, ResolvedSchema schema, String arg) {
    String remappedName = nameRemapping.apply(arg);
    int idx = IntStream.range(0, schema.getColumnCount()).filter(i -> schema.getColumnNames().get(i).equals(remappedName)).findFirst().orElseThrow(() -> new ValidationException(String.format("Field %s does not exist", remappedName)));
    TypeInformation<?> dataType = TypeConversions.fromDataTypeToLegacyInfo(schema.getColumnDataTypes().get(idx));
    return new ResolvedFieldReference(remappedName, dataType, idx);
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) ResolvedFieldReference(org.apache.flink.table.expressions.ResolvedFieldReference)

Aggregations

ValidationException (org.apache.flink.table.api.ValidationException)143 DataType (org.apache.flink.table.types.DataType)25 Test (org.junit.Test)23 HashMap (java.util.HashMap)21 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)19 LogicalType (org.apache.flink.table.types.logical.LogicalType)18 TableException (org.apache.flink.table.api.TableException)17 List (java.util.List)14 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)14 QueryOperation (org.apache.flink.table.operations.QueryOperation)14 LinkedHashMap (java.util.LinkedHashMap)13 DescriptorProperties (org.apache.flink.table.descriptors.DescriptorProperties)13 CatalogTable (org.apache.flink.table.catalog.CatalogTable)12 Expression (org.apache.flink.table.expressions.Expression)12 TableSchema (org.apache.flink.table.api.TableSchema)11 Catalog (org.apache.flink.table.catalog.Catalog)11 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)11 ArrayList (java.util.ArrayList)10 Map (java.util.Map)10 Internal (org.apache.flink.annotation.Internal)10