Search in sources :

Example 1 with TypeCheckCtx

use of org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx in project hive by apache.

the class ConstraintsUtils method validateCheckConstraint.

public static void validateCheckConstraint(List<FieldSchema> columns, List<SQLCheckConstraint> checkConstraints, Configuration conf) throws SemanticException {
    // create colinfo and then row resolver
    RowResolver rr = new RowResolver();
    for (FieldSchema column : columns) {
        ColumnInfo ci = new ColumnInfo(column.getName(), TypeInfoUtils.getTypeInfoFromTypeString(column.getType()), null, false);
        rr.put(null, column.getName(), ci);
    }
    TypeCheckCtx typeCheckCtx = new TypeCheckCtx(rr);
    // TypeCheckProcFactor expects typecheckctx to have unparse translator
    UnparseTranslator unparseTranslator = new UnparseTranslator(conf);
    typeCheckCtx.setUnparseTranslator(unparseTranslator);
    for (SQLCheckConstraint cc : checkConstraints) {
        try {
            ParseDriver parseDriver = new ParseDriver();
            ASTNode checkExprAST = parseDriver.parseExpression(cc.getCheck_expression());
            validateCheckExprAST(checkExprAST);
            Map<ASTNode, ExprNodeDesc> genExprs = ExprNodeTypeCheck.genExprNode(checkExprAST, typeCheckCtx);
            ExprNodeDesc checkExpr = genExprs.get(checkExprAST);
            if (checkExpr == null) {
                throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Invalid type for CHECK constraint: ") + cc.getCheck_expression());
            }
            if (checkExpr.getTypeInfo().getTypeName() != serdeConstants.BOOLEAN_TYPE_NAME) {
                throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Only boolean type is supported for CHECK constraint: ") + cc.getCheck_expression() + ". Found: " + checkExpr.getTypeInfo().getTypeName());
            }
            validateCheckExpr(checkExpr);
        } catch (Exception e) {
            throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Invalid CHECK constraint expression: ") + cc.getCheck_expression() + ". " + e.getMessage(), e);
        }
    }
}
Also used : TypeCheckCtx(org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ParseDriver(org.apache.hadoop.hive.ql.parse.ParseDriver) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) RowResolver(org.apache.hadoop.hive.ql.parse.RowResolver) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) UnparseTranslator(org.apache.hadoop.hive.ql.parse.UnparseTranslator) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 2 with TypeCheckCtx

use of org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx in project hive by apache.

the class CalcitePlanner method genRexNode.

RexNode genRexNode(ASTNode expr, RowResolver input, RowResolver outerRR, Map<ASTNode, QBSubQueryParseInfo> subqueryToRelNode, boolean useCaching, RexBuilder rexBuilder) throws SemanticException {
    TypeCheckCtx tcCtx = new TypeCheckCtx(input, rexBuilder, useCaching, false);
    tcCtx.setOuterRR(outerRR);
    tcCtx.setSubqueryToRelNode(subqueryToRelNode);
    return genRexNode(expr, input, tcCtx);
}
Also used : TypeCheckCtx(org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx) JoinTypeCheckCtx(org.apache.hadoop.hive.ql.parse.type.JoinTypeCheckCtx)

Example 3 with TypeCheckCtx

use of org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx in project hive by apache.

the class ExecuteStatementAnalyzer method getConstant.

/**
 * This method creates a constant expression to replace the given dynamic expression.
 * @param dynamicExpr Expression node representing Dynamic expression
 * @param typeInfo Type info used to create constant expression from ASTNode
 * @param parameterMap Integer to AST node map
 */
private ExprNodeConstantDesc getConstant(ExprDynamicParamDesc dynamicExpr, TypeInfo typeInfo, Map<Integer, ASTNode> parameterMap) throws SemanticException {
    Preconditions.checkArgument(parameterMap.containsKey(dynamicExpr.getIndex()), "Paramter index not found");
    ASTNode paramNode = parameterMap.get(dynamicExpr.getIndex());
    TypeCheckCtx typeCheckCtx = new TypeCheckCtx(null);
    ExprNodeDesc node = ExprNodeTypeCheck.genExprNode(paramNode, typeCheckCtx).get(paramNode);
    Preconditions.checkArgument(node instanceof ExprNodeConstantDesc, "Invalid expression created");
    return (ExprNodeConstantDesc) node;
}
Also used : TypeCheckCtx(org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx) ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc)

Example 4 with TypeCheckCtx

use of org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx in project hive by apache.

the class ConstraintsUtils method getDefaultValue.

/**
 * Validate and get the default value from the AST.
 * @param node AST node corresponding to default value
 * @return retrieve the default value and return it as string
 */
private static String getDefaultValue(ASTNode node, ASTNode typeChild, TokenRewriteStream tokenStream) throws SemanticException {
    // first create expression from defaultValueAST
    TypeCheckCtx typeCheckCtx = new TypeCheckCtx(null);
    ExprNodeDesc defaultValExpr = ExprNodeTypeCheck.genExprNode(node, typeCheckCtx).get(node);
    if (defaultValExpr == null) {
        throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Invalid Default value!"));
    }
    // get default value to be be stored in metastore
    String defaultValueText = tokenStream.toOriginalString(node.getTokenStartIndex(), node.getTokenStopIndex());
    if (defaultValueText.length() > DEFAULT_MAX_LEN) {
        throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Invalid Default value:  " + defaultValueText + " .Maximum character length allowed is " + DEFAULT_MAX_LEN + " ."));
    }
    // Make sure the default value expression type is exactly same as column's type.
    TypeInfo defaultValTypeInfo = defaultValExpr.getTypeInfo();
    TypeInfo colTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(BaseSemanticAnalyzer.getTypeStringFromAST(typeChild));
    if (!defaultValTypeInfo.equals(colTypeInfo)) {
        throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Invalid type: " + defaultValTypeInfo.getTypeName() + " for default value: " + defaultValueText + ". Please make sure that " + "the type is compatible with column type: " + colTypeInfo.getTypeName()));
    }
    // throw an error if default value isn't what hive allows
    if (!isDefaultValueAllowed(defaultValExpr)) {
        throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Invalid Default value: " + defaultValueText + ". DEFAULT only allows constant or function expressions"));
    }
    return defaultValueText;
}
Also used : TypeCheckCtx(org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 5 with TypeCheckCtx

use of org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx in project hive by apache.

the class ShowPartitionAnalyzer method getShowPartitionsFilter.

@VisibleForTesting
ExprNodeDesc getShowPartitionsFilter(Table table, ASTNode command) throws SemanticException {
    ExprNodeDesc showFilter = null;
    for (int childIndex = 0; childIndex < command.getChildCount(); childIndex++) {
        ASTNode astChild = (ASTNode) command.getChild(childIndex);
        if (astChild.getType() == HiveParser.TOK_WHERE) {
            RowResolver rwsch = new RowResolver();
            Map<String, String> colTypes = new HashMap<String, String>();
            for (FieldSchema fs : table.getPartCols()) {
                rwsch.put(table.getTableName(), fs.getName(), new ColumnInfo(fs.getName(), TypeInfoFactory.stringTypeInfo, null, true));
                colTypes.put(fs.getName().toLowerCase(), fs.getType());
            }
            TypeCheckCtx tcCtx = new TypeCheckCtx(rwsch);
            ASTNode conds = (ASTNode) astChild.getChild(0);
            Map<ASTNode, ExprNodeDesc> nodeOutputs = ExprNodeTypeCheck.genExprNode(conds, tcCtx);
            ExprNodeDesc target = nodeOutputs.get(conds);
            if (!(target instanceof ExprNodeGenericFuncDesc) || !target.getTypeInfo().equals(TypeInfoFactory.booleanTypeInfo)) {
                String errorMsg = tcCtx.getError() != null ? ". " + tcCtx.getError() : "";
                throw new SemanticException("Not a filter expr: " + (target == null ? "null" : target.getExprString()) + errorMsg);
            }
            showFilter = replaceDefaultPartNameAndCastType(target, colTypes, HiveConf.getVar(conf, HiveConf.ConfVars.DEFAULTPARTITIONNAME));
        }
    }
    return showFilter;
}
Also used : TypeCheckCtx(org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx) HashMap(java.util.HashMap) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) RowResolver(org.apache.hadoop.hive.ql.parse.RowResolver) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) VisibleForTesting(com.google.common.annotations.VisibleForTesting)

Aggregations

TypeCheckCtx (org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx)13 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)7 ColumnInfo (org.apache.hadoop.hive.ql.exec.ColumnInfo)6 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)5 HashMap (java.util.HashMap)4 ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)4 RowResolver (org.apache.hadoop.hive.ql.parse.RowResolver)4 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)4 ArrayList (java.util.ArrayList)3 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)3 ExprNodeColumnDesc (org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc)3 ExprNodeConstantDesc (org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc)3 ExprNodeGenericFuncDesc (org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)3 HashSet (java.util.HashSet)2 LinkedHashMap (java.util.LinkedHashMap)2 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)2 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)2 SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)2 DefaultConstraint (org.apache.hadoop.hive.ql.metadata.DefaultConstraint)2 CalciteSemanticException (org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException)2