Search in sources :

Example 1 with ExprNodeConstantDesc

use of org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc in project hive by apache.

the class ReplicationSemanticAnalyzer method genPartSpecs.

private Map<Integer, List<ExprNodeGenericFuncDesc>> genPartSpecs(Table table, List<Map<String, String>> partitions) throws SemanticException {
    Map<Integer, List<ExprNodeGenericFuncDesc>> partSpecs = new HashMap<Integer, List<ExprNodeGenericFuncDesc>>();
    int partPrefixLength = 0;
    if ((partitions != null) && (partitions.size() > 0)) {
        partPrefixLength = partitions.get(0).size();
    // pick the length of the first ptn, we expect all ptns listed to have the same number of
    // key-vals.
    }
    List<ExprNodeGenericFuncDesc> ptnDescs = new ArrayList<ExprNodeGenericFuncDesc>();
    for (Map<String, String> ptn : partitions) {
        // convert each key-value-map to appropriate expression.
        ExprNodeGenericFuncDesc expr = null;
        for (Map.Entry<String, String> kvp : ptn.entrySet()) {
            String key = kvp.getKey();
            Object val = kvp.getValue();
            String type = table.getPartColByName(key).getType();
            ;
            PrimitiveTypeInfo pti = TypeInfoFactory.getPrimitiveTypeInfo(type);
            ExprNodeColumnDesc column = new ExprNodeColumnDesc(pti, key, null, true);
            ExprNodeGenericFuncDesc op = DDLSemanticAnalyzer.makeBinaryPredicate("=", column, new ExprNodeConstantDesc(pti, val));
            expr = (expr == null) ? op : DDLSemanticAnalyzer.makeBinaryPredicate("and", expr, op);
        }
        if (expr != null) {
            ptnDescs.add(expr);
        }
    }
    if (ptnDescs.size() > 0) {
        partSpecs.put(partPrefixLength, ptnDescs);
    }
    return partSpecs;
}
Also used : ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) ArrayList(java.util.ArrayList) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) List(java.util.List) ArrayList(java.util.ArrayList) Map(java.util.Map) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap)

Example 2 with ExprNodeConstantDesc

use of org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc in project phoenix by apache.

the class IndexPredicateAnalyzer method processingInOperator.

private void processingInOperator(ExprNodeGenericFuncDesc expr, List<IndexSearchCondition> searchConditions, boolean isNot, Object... nodeOutputs) {
    ExprNodeColumnDesc columnDesc = null;
    String[] fields = null;
    if (LOG.isTraceEnabled()) {
        LOG.trace("Processing In Operator. nodeOutputs : " + Lists.newArrayList(nodeOutputs));
    }
    if (nodeOutputs[0] instanceof ExprNodeFieldDesc) {
        // rowKey field
        ExprNodeFieldDesc fieldDesc = (ExprNodeFieldDesc) nodeOutputs[0];
        fields = ExprNodeDescUtils.extractFields(fieldDesc);
        ExprNodeDesc[] extracted = ExprNodeDescUtils.extractComparePair((ExprNodeDesc) nodeOutputs[0], (ExprNodeDesc) nodeOutputs[1]);
        if (extracted == null) {
            // adding for tez
            return;
        }
        if (LOG.isTraceEnabled()) {
            LOG.trace("nodeOutputs[0] : " + nodeOutputs[0] + ", nodeOutputs[1] : " + nodeOutputs[1] + " => " + Lists.newArrayList(extracted));
        }
        columnDesc = (ExprNodeColumnDesc) extracted[0];
    } else if (nodeOutputs[0] instanceof ExprNodeGenericFuncDesc) {
        columnDesc = (ExprNodeColumnDesc) ((ExprNodeGenericFuncDesc) nodeOutputs[0]).getChildren().get(0);
    } else {
        columnDesc = (ExprNodeColumnDesc) nodeOutputs[0];
    }
    String udfName = expr.getGenericUDF().getUdfName();
    ExprNodeConstantDesc[] inConstantDescs = new ExprNodeConstantDesc[nodeOutputs.length - 1];
    for (int i = 0, limit = inConstantDescs.length; i < limit; i++) {
        if (!(nodeOutputs[i + 1] instanceof ExprNodeConstantDesc)) {
            // adding for tez
            return;
        }
        inConstantDescs[i] = (ExprNodeConstantDesc) nodeOutputs[i + 1];
    }
    searchConditions.add(new IndexSearchCondition(columnDesc, udfName, inConstantDescs, expr, fields, isNot));
}
Also used : ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) ExprNodeFieldDesc(org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc)

Example 3 with ExprNodeConstantDesc

use of org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc in project phoenix by apache.

the class IndexPredicateAnalyzer method analyzeExpr.

private ExprNodeDesc analyzeExpr(ExprNodeGenericFuncDesc expr, List<IndexSearchCondition> searchConditions, Object... nodeOutputs) throws SemanticException {
    if (FunctionRegistry.isOpAnd(expr)) {
        assert (nodeOutputs.length == 2);
        ExprNodeDesc residual1 = (ExprNodeDesc) nodeOutputs[0];
        ExprNodeDesc residual2 = (ExprNodeDesc) nodeOutputs[1];
        if (residual1 == null) {
            return residual2;
        }
        if (residual2 == null) {
            return residual1;
        }
        List<ExprNodeDesc> residuals = new ArrayList<ExprNodeDesc>();
        residuals.add(residual1);
        residuals.add(residual2);
        return new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, FunctionRegistry.getGenericUDFForAnd(), residuals);
    }
    GenericUDF genericUDF = expr.getGenericUDF();
    if (!(genericUDF instanceof GenericUDFBaseCompare)) {
        // 2015-10-22 Added by JeongMin Ju : Processing Between/In Operator
        if (genericUDF instanceof GenericUDFBetween) {
            // In case of not between, The value of first element of nodeOutputs is true.
            // otherwise false.
            processingBetweenOperator(expr, searchConditions, nodeOutputs);
            return expr;
        } else if (genericUDF instanceof GenericUDFIn) {
            // In case of not in operator, in operator exist as child of not operator.
            processingInOperator(expr, searchConditions, false, nodeOutputs);
            return expr;
        } else if (genericUDF instanceof GenericUDFOPNot && ((ExprNodeGenericFuncDesc) expr.getChildren().get(0)).getGenericUDF() instanceof GenericUDFIn) {
            // In case of not in operator, in operator exist as child of not operator.
            processingInOperator((ExprNodeGenericFuncDesc) expr.getChildren().get(0), searchConditions, true, ((ExprNodeGenericFuncDesc) nodeOutputs[0]).getChildren().toArray());
            return expr;
        } else if (genericUDF instanceof GenericUDFOPNull) {
            processingNullOperator(expr, searchConditions, nodeOutputs);
            return expr;
        } else if (genericUDF instanceof GenericUDFOPNotNull) {
            processingNotNullOperator(expr, searchConditions, nodeOutputs);
            return expr;
        } else {
            return expr;
        }
    }
    ExprNodeDesc expr1 = (ExprNodeDesc) nodeOutputs[0];
    ExprNodeDesc expr2 = (ExprNodeDesc) nodeOutputs[1];
    // user
    if (expr1.getTypeInfo().equals(expr2.getTypeInfo())) {
        expr1 = getColumnExpr(expr1);
        expr2 = getColumnExpr(expr2);
    }
    ExprNodeDesc[] extracted = ExprNodeDescUtils.extractComparePair(expr1, expr2);
    if (extracted == null || (extracted.length > 2 && !acceptsFields)) {
        return expr;
    }
    ExprNodeColumnDesc columnDesc;
    ExprNodeConstantDesc constantDesc;
    if (extracted[0] instanceof ExprNodeConstantDesc) {
        genericUDF = genericUDF.flip();
        columnDesc = (ExprNodeColumnDesc) extracted[1];
        constantDesc = (ExprNodeConstantDesc) extracted[0];
    } else {
        columnDesc = (ExprNodeColumnDesc) extracted[0];
        constantDesc = (ExprNodeConstantDesc) extracted[1];
    }
    Set<String> allowed = columnToUDFs.get(columnDesc.getColumn());
    if (allowed == null) {
        return expr;
    }
    String udfName = genericUDF.getUdfName();
    if (!allowed.contains(genericUDF.getUdfName())) {
        return expr;
    }
    String[] fields = null;
    if (extracted.length > 2) {
        ExprNodeFieldDesc fieldDesc = (ExprNodeFieldDesc) extracted[2];
        if (!isValidField(fieldDesc)) {
            return expr;
        }
        fields = ExprNodeDescUtils.extractFields(fieldDesc);
    }
    // We also need to update the expr so that the index query can be
    // generated.
    // Note that, hive does not support UDFToDouble etc in the query text.
    List<ExprNodeDesc> list = new ArrayList<ExprNodeDesc>();
    list.add(expr1);
    list.add(expr2);
    expr = new ExprNodeGenericFuncDesc(expr.getTypeInfo(), expr.getGenericUDF(), list);
    searchConditions.add(new IndexSearchCondition(columnDesc, udfName, constantDesc, expr, fields));
    // remove it from the residual predicate
    return fields == null ? null : expr;
}
Also used : GenericUDFBetween(org.apache.hadoop.hive.ql.udf.generic.GenericUDFBetween) ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) GenericUDFOPNull(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull) ArrayList(java.util.ArrayList) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) GenericUDFOPNotNull(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull) GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) GenericUDFBaseCompare(org.apache.hadoop.hive.ql.udf.generic.GenericUDFBaseCompare) ExprNodeFieldDesc(org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) GenericUDFIn(org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) GenericUDFOPNot(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNot)

Example 4 with ExprNodeConstantDesc

use of org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc in project phoenix by apache.

the class IndexPredicateAnalyzer method processingBetweenOperator.

private void processingBetweenOperator(ExprNodeGenericFuncDesc expr, List<IndexSearchCondition> searchConditions, Object... nodeOutputs) {
    ExprNodeColumnDesc columnDesc = null;
    String[] fields = null;
    if (nodeOutputs[1] instanceof ExprNodeFieldDesc) {
        // rowKey field
        ExprNodeFieldDesc fieldDesc = (ExprNodeFieldDesc) nodeOutputs[1];
        fields = ExprNodeDescUtils.extractFields(fieldDesc);
        ExprNodeDesc[] extracted = ExprNodeDescUtils.extractComparePair((ExprNodeDesc) nodeOutputs[1], (ExprNodeDesc) nodeOutputs[2]);
        columnDesc = (ExprNodeColumnDesc) extracted[0];
    } else if (nodeOutputs[0] instanceof ExprNodeGenericFuncDesc) {
        columnDesc = (ExprNodeColumnDesc) ((ExprNodeGenericFuncDesc) nodeOutputs[1]).getChildren().get(0);
    } else {
        columnDesc = (ExprNodeColumnDesc) nodeOutputs[1];
    }
    String udfName = expr.getGenericUDF().getUdfName();
    ExprNodeConstantDesc[] betweenConstants = new ExprNodeConstantDesc[] { (ExprNodeConstantDesc) nodeOutputs[2], (ExprNodeConstantDesc) nodeOutputs[3] };
    boolean isNot = (Boolean) ((ExprNodeConstantDesc) nodeOutputs[0]).getValue();
    searchConditions.add(new IndexSearchCondition(columnDesc, udfName, betweenConstants, expr, fields, isNot));
}
Also used : ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) ExprNodeFieldDesc(org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc)

Example 5 with ExprNodeConstantDesc

use of org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc in project hive by apache.

the class SemanticAnalyzer method getColForInsertStmtSpec.

private RowResolver getColForInsertStmtSpec(Map<String, ExprNodeDesc> targetCol2Projection, final Table target, Map<String, ColumnInfo> targetCol2ColumnInfo, int colListPos, List<TypeInfo> targetTableColTypes, ArrayList<ExprNodeDesc> new_col_list, List<String> targetTableColNames) throws SemanticException {
    RowResolver newOutputRR = new RowResolver();
    Map<String, String> colNameToDefaultVal = null;
    // see if we need to fetch default constraints from metastore
    if (targetCol2Projection.size() < targetTableColNames.size()) {
        try {
            DefaultConstraint dc = Hive.get().getEnabledDefaultConstraints(target.getDbName(), target.getTableName());
            colNameToDefaultVal = dc.getColNameToDefaultValueMap();
        } catch (Exception e) {
            if (e instanceof SemanticException) {
                throw (SemanticException) e;
            } else {
                throw (new RuntimeException(e));
            }
        }
    }
    boolean defaultConstraintsFetch = true;
    for (int i = 0; i < targetTableColNames.size(); i++) {
        String f = targetTableColNames.get(i);
        if (targetCol2Projection.containsKey(f)) {
            // put existing column in new list to make sure it is in the right position
            new_col_list.add(targetCol2Projection.get(f));
            ColumnInfo ci = targetCol2ColumnInfo.get(f);
            ci.setInternalName(getColumnInternalName(colListPos));
            newOutputRR.put(ci.getTabAlias(), ci.getInternalName(), ci);
        } else {
            // add new 'synthetic' columns for projections not provided by Select
            assert (colNameToDefaultVal != null);
            ExprNodeDesc exp = null;
            if (colNameToDefaultVal.containsKey(f)) {
                // make an expression for default value
                String defaultValue = colNameToDefaultVal.get(f);
                ParseDriver parseDriver = new ParseDriver();
                try {
                    ASTNode defValAst = parseDriver.parseExpression(defaultValue);
                    exp = TypeCheckProcFactory.genExprNode(defValAst, new TypeCheckCtx(null)).get(defValAst);
                } catch (Exception e) {
                    throw new SemanticException("Error while parsing default value: " + defaultValue + ". Error message: " + e.getMessage());
                }
                LOG.debug("Added default value from metastore: " + exp);
            } else {
                exp = new ExprNodeConstantDesc(targetTableColTypes.get(i), null);
            }
            new_col_list.add(exp);
            // this column doesn't come from any table
            final String tableAlias = null;
            ColumnInfo colInfo = new ColumnInfo(getColumnInternalName(colListPos), exp.getWritableObjectInspector(), tableAlias, false);
            newOutputRR.put(colInfo.getTabAlias(), colInfo.getInternalName(), colInfo);
        }
        colListPos++;
    }
    return newOutputRR;
}
Also used : ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) LockException(org.apache.hadoop.hive.ql.lockmgr.LockException) IOException(java.io.IOException) CalciteSemanticException(org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) PatternSyntaxException(java.util.regex.PatternSyntaxException) FileNotFoundException(java.io.FileNotFoundException) AccessControlException(java.security.AccessControlException) InvalidTableException(org.apache.hadoop.hive.ql.metadata.InvalidTableException) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) CheckConstraint(org.apache.hadoop.hive.ql.metadata.CheckConstraint) NotNullConstraint(org.apache.hadoop.hive.ql.metadata.NotNullConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) DefaultConstraint(org.apache.hadoop.hive.ql.metadata.DefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) DefaultConstraint(org.apache.hadoop.hive.ql.metadata.DefaultConstraint) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) CalciteSemanticException(org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException)

Aggregations

ExprNodeConstantDesc (org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc)208 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)178 ExprNodeColumnDesc (org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc)134 ExprNodeGenericFuncDesc (org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)123 ArrayList (java.util.ArrayList)97 Test (org.junit.Test)71 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)46 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)39 HashMap (java.util.HashMap)32 GenericUDF (org.apache.hadoop.hive.ql.udf.generic.GenericUDF)30 VectorExpression (org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression)27 GenericUDFOPAnd (org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd)27 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)27 List (java.util.List)23 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)22 ColumnInfo (org.apache.hadoop.hive.ql.exec.ColumnInfo)21 GenericUDFOPEqualOrLessThan (org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan)21 GenericUDFOPEqualOrGreaterThan (org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrGreaterThan)20 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)20 DataTypePhysicalVariation (org.apache.hadoop.hive.common.type.DataTypePhysicalVariation)19