Search in sources :

Example 6 with TypeCheckCtx

use of org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx in project hive by apache.

the class PTFTranslator method setupShapeForNoop.

private ShapeDetails setupShapeForNoop(ShapeDetails inpShape, StructObjectInspector OI, List<String> columnNames, RowResolver rr) throws SemanticException {
    ShapeDetails shp = new ShapeDetails();
    shp.setRr(rr);
    shp.setOI(inpShape.getOI());
    shp.setSerde(inpShape.getSerde());
    shp.setSerdeClassName(inpShape.getSerde().getClass().getName());
    shp.setSerdeProps(inpShape.getSerdeProps());
    shp.setColumnNames(columnNames);
    TypeCheckCtx tCtx = new TypeCheckCtx(rr);
    tCtx.setUnparseTranslator(unparseT);
    shp.setTypeCheckCtx(tCtx);
    return shp;
}
Also used : TypeCheckCtx(org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx) ShapeDetails(org.apache.hadoop.hive.ql.plan.ptf.ShapeDetails)

Example 7 with TypeCheckCtx

use of org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx in project hive by apache.

the class BaseSemanticAnalyzer method getPartExprNodeDesc.

private static boolean getPartExprNodeDesc(ASTNode astNode, HiveConf conf, Map<ASTNode, ExprNodeDesc> astExprNodeMap) throws SemanticException {
    if (astNode == null) {
        return true;
    } else if ((astNode.getChildren() == null) || (astNode.getChildren().size() == 0)) {
        return astNode.getType() != HiveParser.TOK_PARTVAL;
    }
    TypeCheckCtx typeCheckCtx = new TypeCheckCtx(null);
    String defaultPartitionName = HiveConf.getVar(conf, HiveConf.ConfVars.DEFAULTPARTITIONNAME);
    boolean result = true;
    for (Node childNode : astNode.getChildren()) {
        ASTNode childASTNode = (ASTNode) childNode;
        if (childASTNode.getType() != HiveParser.TOK_PARTVAL) {
            result = getPartExprNodeDesc(childASTNode, conf, astExprNodeMap) && result;
        } else {
            boolean isDynamicPart = childASTNode.getChildren().size() <= 1;
            result = !isDynamicPart && result;
            if (!isDynamicPart) {
                ASTNode partVal = (ASTNode) childASTNode.getChildren().get(1);
                if (!defaultPartitionName.equalsIgnoreCase(unescapeSQLString(partVal.getText()))) {
                    astExprNodeMap.put((ASTNode) childASTNode.getChildren().get(0), ExprNodeTypeCheck.genExprNode(partVal, typeCheckCtx).get(partVal));
                }
            }
        }
    }
    return result;
}
Also used : TypeCheckCtx(org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx) Node(org.apache.hadoop.hive.ql.lib.Node)

Example 8 with TypeCheckCtx

use of org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx in project hive by apache.

the class ConstraintsUtils method getDefaultValue.

/**
 * Validate and get the default value from the AST.
 * @param node AST node corresponding to default value
 * @return retrieve the default value and return it as string
 */
private static String getDefaultValue(ASTNode node, ASTNode typeChild, TokenRewriteStream tokenStream) throws SemanticException {
    // first create expression from defaultValueAST
    TypeCheckCtx typeCheckCtx = new TypeCheckCtx(null);
    ExprNodeDesc defaultValExpr = ExprNodeTypeCheck.genExprNode(node, typeCheckCtx).get(node);
    if (defaultValExpr == null) {
        throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Invalid Default value!"));
    }
    // get default value to be be stored in metastore
    String defaultValueText = tokenStream.toOriginalString(node.getTokenStartIndex(), node.getTokenStopIndex());
    if (defaultValueText.length() > DEFAULT_MAX_LEN) {
        throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Invalid Default value:  " + defaultValueText + " .Maximum character length allowed is " + DEFAULT_MAX_LEN + " ."));
    }
    // Make sure the default value expression type is exactly same as column's type.
    TypeInfo defaultValTypeInfo = defaultValExpr.getTypeInfo();
    TypeInfo colTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(BaseSemanticAnalyzer.getTypeStringFromAST(typeChild));
    if (!defaultValTypeInfo.equals(colTypeInfo)) {
        throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Invalid type: " + defaultValTypeInfo.getTypeName() + " for default value: " + defaultValueText + ". Please make sure that " + "the type is compatible with column type: " + colTypeInfo.getTypeName()));
    }
    // throw an error if default value isn't what hive allows
    if (!isDefaultValueAllowed(defaultValExpr)) {
        throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Invalid Default value: " + defaultValueText + ". DEFAULT only allows constant or function expressions"));
    }
    return defaultValueText;
}
Also used : TypeCheckCtx(org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 9 with TypeCheckCtx

use of org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx in project hive by apache.

the class PTFTranslator method setupShape.

private ShapeDetails setupShape(StructObjectInspector OI, List<String> columnNames, RowResolver rr) throws SemanticException {
    Map<String, String> serdePropsMap = new LinkedHashMap<String, String>();
    AbstractSerDe serde = null;
    ShapeDetails shp = new ShapeDetails();
    try {
        serde = PTFTranslator.createLazyBinarySerDe(hCfg, OI, serdePropsMap);
        StructObjectInspector outOI = PTFPartition.setupPartitionOutputOI(serde, OI);
        shp.setOI(outOI);
    } catch (SerDeException se) {
        throw new SemanticException(se);
    }
    shp.setRr(rr);
    shp.setSerde(serde);
    shp.setSerdeClassName(serde.getClass().getName());
    shp.setSerdeProps(serdePropsMap);
    shp.setColumnNames(columnNames);
    TypeCheckCtx tCtx = new TypeCheckCtx(rr);
    tCtx.setUnparseTranslator(unparseT);
    shp.setTypeCheckCtx(tCtx);
    return shp;
}
Also used : TypeCheckCtx(org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx) ShapeDetails(org.apache.hadoop.hive.ql.plan.ptf.ShapeDetails) AbstractSerDe(org.apache.hadoop.hive.serde2.AbstractSerDe) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) LinkedHashMap(java.util.LinkedHashMap) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 10 with TypeCheckCtx

use of org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx in project hive by apache.

the class ParseUtils method getFullPartitionSpecs.

/**
 * Get the partition specs from the tree. This stores the full specification
 * with the comparator operator into the output list.
 *
 * @return Map of partitions by prefix length. Most of the time prefix length will
 *         be the same for all partition specs, so we can just OR the expressions.
 */
public static Map<Integer, List<ExprNodeGenericFuncDesc>> getFullPartitionSpecs(CommonTree ast, Table table, Configuration conf, boolean canGroupExprs) throws SemanticException {
    String defaultPartitionName = HiveConf.getVar(conf, HiveConf.ConfVars.DEFAULTPARTITIONNAME);
    Map<String, String> colTypes = new HashMap<>();
    for (FieldSchema fs : table.getPartitionKeys()) {
        colTypes.put(fs.getName().toLowerCase(), fs.getType());
    }
    Map<Integer, List<ExprNodeGenericFuncDesc>> result = new HashMap<>();
    for (int childIndex = 0; childIndex < ast.getChildCount(); childIndex++) {
        Tree partSpecTree = ast.getChild(childIndex);
        if (partSpecTree.getType() != HiveParser.TOK_PARTSPEC) {
            continue;
        }
        ExprNodeGenericFuncDesc expr = null;
        Set<String> names = new HashSet<>(partSpecTree.getChildCount());
        for (int i = 0; i < partSpecTree.getChildCount(); ++i) {
            CommonTree partSpecSingleKey = (CommonTree) partSpecTree.getChild(i);
            assert (partSpecSingleKey.getType() == HiveParser.TOK_PARTVAL);
            String key = stripIdentifierQuotes(partSpecSingleKey.getChild(0).getText()).toLowerCase();
            String operator = partSpecSingleKey.getChild(1).getText();
            ASTNode partValNode = (ASTNode) partSpecSingleKey.getChild(2);
            TypeCheckCtx typeCheckCtx = new TypeCheckCtx(null);
            ExprNodeConstantDesc valExpr = (ExprNodeConstantDesc) ExprNodeTypeCheck.genExprNode(partValNode, typeCheckCtx).get(partValNode);
            Object val = valExpr.getValue();
            boolean isDefaultPartitionName = val.equals(defaultPartitionName);
            String type = colTypes.get(key);
            if (type == null) {
                throw new SemanticException("Column " + key + " is not a partition key");
            }
            PrimitiveTypeInfo pti = TypeInfoFactory.getPrimitiveTypeInfo(type);
            // Create the corresponding hive expression to filter on partition columns.
            if (!isDefaultPartitionName) {
                if (!valExpr.getTypeString().equals(type)) {
                    ObjectInspectorConverters.Converter converter = ObjectInspectorConverters.getConverter(TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(valExpr.getTypeInfo()), TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(pti));
                    val = converter.convert(valExpr.getValue());
                }
            }
            ExprNodeColumnDesc column = new ExprNodeColumnDesc(pti, key, null, true);
            ExprNodeGenericFuncDesc op;
            if (!isDefaultPartitionName) {
                op = PartitionUtils.makeBinaryPredicate(operator, column, new ExprNodeConstantDesc(pti, val));
            } else {
                GenericUDF originalOp = FunctionRegistry.getFunctionInfo(operator).getGenericUDF();
                String fnName;
                if (FunctionRegistry.isEq(originalOp)) {
                    fnName = "isnull";
                } else if (FunctionRegistry.isNeq(originalOp)) {
                    fnName = "isnotnull";
                } else {
                    throw new SemanticException("Cannot use " + operator + " in a default partition spec; only '=' and '!=' are allowed.");
                }
                op = PartitionUtils.makeUnaryPredicate(fnName, column);
            }
            // If it's multi-expr filter (e.g. a='5', b='2012-01-02'), AND with previous exprs.
            expr = (expr == null) ? op : PartitionUtils.makeBinaryPredicate("and", expr, op);
            names.add(key);
        }
        if (expr == null) {
            continue;
        }
        // We got the expr for one full partition spec. Determine the prefix length.
        int prefixLength = calculatePartPrefix(table, names);
        List<ExprNodeGenericFuncDesc> orExpr = result.get(prefixLength);
        // If we don't, create a new separate filter. In most cases there will only be one.
        if (orExpr == null) {
            result.put(prefixLength, Lists.newArrayList(expr));
        } else if (canGroupExprs) {
            orExpr.set(0, PartitionUtils.makeBinaryPredicate("or", expr, orExpr.get(0)));
        } else {
            orExpr.add(expr);
        }
    }
    return result;
}
Also used : TypeCheckCtx(org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx) HashMap(java.util.HashMap) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) ObjectInspectorConverters(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) CommonTree(org.antlr.runtime.tree.CommonTree) Tree(org.antlr.runtime.tree.Tree) ArrayList(java.util.ArrayList) List(java.util.List) HashSet(java.util.HashSet) ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) CommonTree(org.antlr.runtime.tree.CommonTree) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF)

Aggregations

TypeCheckCtx (org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx)13 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)7 ColumnInfo (org.apache.hadoop.hive.ql.exec.ColumnInfo)6 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)5 HashMap (java.util.HashMap)4 ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)4 RowResolver (org.apache.hadoop.hive.ql.parse.RowResolver)4 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)4 ArrayList (java.util.ArrayList)3 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)3 ExprNodeColumnDesc (org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc)3 ExprNodeConstantDesc (org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc)3 ExprNodeGenericFuncDesc (org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)3 HashSet (java.util.HashSet)2 LinkedHashMap (java.util.LinkedHashMap)2 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)2 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)2 SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)2 DefaultConstraint (org.apache.hadoop.hive.ql.metadata.DefaultConstraint)2 CalciteSemanticException (org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException)2