Search in sources :

Example 1 with GenericUDFOPNull

use of org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull in project phoenix by apache.

the class IndexPredicateAnalyzer method analyzeExpr.

private ExprNodeDesc analyzeExpr(ExprNodeGenericFuncDesc expr, List<IndexSearchCondition> searchConditions, Object... nodeOutputs) throws SemanticException {
    if (FunctionRegistry.isOpAnd(expr)) {
        assert (nodeOutputs.length == 2);
        ExprNodeDesc residual1 = (ExprNodeDesc) nodeOutputs[0];
        ExprNodeDesc residual2 = (ExprNodeDesc) nodeOutputs[1];
        if (residual1 == null) {
            return residual2;
        }
        if (residual2 == null) {
            return residual1;
        }
        List<ExprNodeDesc> residuals = new ArrayList<ExprNodeDesc>();
        residuals.add(residual1);
        residuals.add(residual2);
        return new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, FunctionRegistry.getGenericUDFForAnd(), residuals);
    }
    GenericUDF genericUDF = expr.getGenericUDF();
    if (!(genericUDF instanceof GenericUDFBaseCompare)) {
        // 2015-10-22 Added by JeongMin Ju : Processing Between/In Operator
        if (genericUDF instanceof GenericUDFBetween) {
            // In case of not between, The value of first element of nodeOutputs is true.
            // otherwise false.
            processingBetweenOperator(expr, searchConditions, nodeOutputs);
            return expr;
        } else if (genericUDF instanceof GenericUDFIn) {
            // In case of not in operator, in operator exist as child of not operator.
            processingInOperator(expr, searchConditions, false, nodeOutputs);
            return expr;
        } else if (genericUDF instanceof GenericUDFOPNot && ((ExprNodeGenericFuncDesc) expr.getChildren().get(0)).getGenericUDF() instanceof GenericUDFIn) {
            // In case of not in operator, in operator exist as child of not operator.
            processingInOperator((ExprNodeGenericFuncDesc) expr.getChildren().get(0), searchConditions, true, ((ExprNodeGenericFuncDesc) nodeOutputs[0]).getChildren().toArray());
            return expr;
        } else if (genericUDF instanceof GenericUDFOPNull) {
            processingNullOperator(expr, searchConditions, nodeOutputs);
            return expr;
        } else if (genericUDF instanceof GenericUDFOPNotNull) {
            processingNotNullOperator(expr, searchConditions, nodeOutputs);
            return expr;
        } else {
            return expr;
        }
    }
    ExprNodeDesc expr1 = (ExprNodeDesc) nodeOutputs[0];
    ExprNodeDesc expr2 = (ExprNodeDesc) nodeOutputs[1];
    // user
    if (expr1.getTypeInfo().equals(expr2.getTypeInfo())) {
        expr1 = getColumnExpr(expr1);
        expr2 = getColumnExpr(expr2);
    }
    ExprNodeDesc[] extracted = ExprNodeDescUtils.extractComparePair(expr1, expr2);
    if (extracted == null || (extracted.length > 2 && !acceptsFields)) {
        return expr;
    }
    ExprNodeColumnDesc columnDesc;
    ExprNodeConstantDesc constantDesc;
    if (extracted[0] instanceof ExprNodeConstantDesc) {
        genericUDF = genericUDF.flip();
        columnDesc = (ExprNodeColumnDesc) extracted[1];
        constantDesc = (ExprNodeConstantDesc) extracted[0];
    } else {
        columnDesc = (ExprNodeColumnDesc) extracted[0];
        constantDesc = (ExprNodeConstantDesc) extracted[1];
    }
    Set<String> allowed = columnToUDFs.get(columnDesc.getColumn());
    if (allowed == null) {
        return expr;
    }
    String udfName = genericUDF.getUdfName();
    if (!allowed.contains(genericUDF.getUdfName())) {
        return expr;
    }
    String[] fields = null;
    if (extracted.length > 2) {
        ExprNodeFieldDesc fieldDesc = (ExprNodeFieldDesc) extracted[2];
        if (!isValidField(fieldDesc)) {
            return expr;
        }
        fields = ExprNodeDescUtils.extractFields(fieldDesc);
    }
    // We also need to update the expr so that the index query can be
    // generated.
    // Note that, hive does not support UDFToDouble etc in the query text.
    List<ExprNodeDesc> list = new ArrayList<ExprNodeDesc>();
    list.add(expr1);
    list.add(expr2);
    expr = new ExprNodeGenericFuncDesc(expr.getTypeInfo(), expr.getGenericUDF(), list);
    searchConditions.add(new IndexSearchCondition(columnDesc, udfName, constantDesc, expr, fields));
    // remove it from the residual predicate
    return fields == null ? null : expr;
}
Also used : GenericUDFBetween(org.apache.hadoop.hive.ql.udf.generic.GenericUDFBetween) ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) GenericUDFOPNull(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull) ArrayList(java.util.ArrayList) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) GenericUDFOPNotNull(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull) GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) GenericUDFBaseCompare(org.apache.hadoop.hive.ql.udf.generic.GenericUDFBaseCompare) ExprNodeFieldDesc(org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) GenericUDFIn(org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) GenericUDFOPNot(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNot)

Example 2 with GenericUDFOPNull

use of org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull in project hive by apache.

the class ConstantPropagateProcFactory method propagate.

/**
 * Propagate assignment expression, adding an entry into constant map constants.
 *
 * @param udf expression UDF, currently only 2 UDFs are supported: '=' and 'is null'.
 * @param newExprs child expressions (parameters).
 * @param cppCtx
 * @param op
 * @param constants
 */
private static void propagate(GenericUDF udf, List<ExprNodeDesc> newExprs, RowSchema rs, Map<ColumnInfo, ExprNodeDesc> constants) {
    if (udf instanceof GenericUDFOPEqual) {
        ExprNodeDesc lOperand = newExprs.get(0);
        ExprNodeDesc rOperand = newExprs.get(1);
        ExprNodeConstantDesc v;
        if (lOperand instanceof ExprNodeConstantDesc) {
            v = (ExprNodeConstantDesc) lOperand;
        } else if (rOperand instanceof ExprNodeConstantDesc) {
            v = (ExprNodeConstantDesc) rOperand;
        } else {
            // we need a constant on one side.
            return;
        }
        // If both sides are constants, there is nothing to propagate
        ExprNodeColumnDesc c;
        if (lOperand instanceof ExprNodeColumnDesc) {
            c = (ExprNodeColumnDesc) lOperand;
        } else if (rOperand instanceof ExprNodeColumnDesc) {
            c = (ExprNodeColumnDesc) rOperand;
        } else {
            // truncate information
            return;
        }
        ColumnInfo ci = resolveColumn(rs, c);
        if (ci != null) {
            if (LOG.isDebugEnabled()) {
                LOG.debug("Filter {} is identified as a value assignment, propagate it.", udf.getDisplayString(new String[] { lOperand.getExprString(), rOperand.getExprString() }));
            }
            if (!v.getTypeInfo().equals(ci.getType())) {
                v = typeCast(v, ci.getType(), true);
            }
            if (v != null) {
                constants.put(ci, v);
            }
        }
    } else if (udf instanceof GenericUDFOPNull) {
        ExprNodeDesc operand = newExprs.get(0);
        if (operand instanceof ExprNodeColumnDesc) {
            if (LOG.isDebugEnabled()) {
                LOG.debug("Filter {} is identified as a value assignment, propagate it.", udf.getDisplayString(new String[] { operand.getExprString() }));
            }
            ExprNodeColumnDesc c = (ExprNodeColumnDesc) operand;
            ColumnInfo ci = resolveColumn(rs, c);
            if (ci != null) {
                constants.put(ci, new ExprNodeConstantDesc(ci.getType(), null));
            }
        }
    }
}
Also used : ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) GenericUDFOPNull(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull) GenericUDFOPEqual(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc)

Example 3 with GenericUDFOPNull

use of org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull in project hive by apache.

the class ConstantPropagateProcFactory method foldNegative.

/**
 * Combines the logical not() operator with the child operator if possible.
 * @param desc the expression to be evaluated
 * @return  the new expression to be replaced
 * @throws UDFArgumentException
 */
private static ExprNodeDesc foldNegative(ExprNodeDesc desc) throws UDFArgumentException {
    if (desc instanceof ExprNodeGenericFuncDesc) {
        ExprNodeGenericFuncDesc funcDesc = (ExprNodeGenericFuncDesc) desc;
        GenericUDF udf = funcDesc.getGenericUDF();
        if (udf instanceof GenericUDFOPNot) {
            ExprNodeDesc child = funcDesc.getChildren().get(0);
            if (child instanceof ExprNodeGenericFuncDesc) {
                ExprNodeGenericFuncDesc childDesc = (ExprNodeGenericFuncDesc) child;
                GenericUDF childUDF = childDesc.getGenericUDF();
                List<ExprNodeDesc> grandChildren = child.getChildren();
                if (childUDF instanceof GenericUDFBaseCompare || childUDF instanceof GenericUDFOPNull || childUDF instanceof GenericUDFOPNotNull) {
                    List<ExprNodeDesc> newGrandChildren = new ArrayList<ExprNodeDesc>();
                    for (ExprNodeDesc grandChild : grandChildren) {
                        newGrandChildren.add(foldNegative(grandChild));
                    }
                    return ExprNodeGenericFuncDesc.newInstance(childUDF.negative(), newGrandChildren);
                } else if (childUDF instanceof GenericUDFOPAnd || childUDF instanceof GenericUDFOPOr) {
                    List<ExprNodeDesc> newGrandChildren = new ArrayList<ExprNodeDesc>();
                    for (ExprNodeDesc grandChild : grandChildren) {
                        newGrandChildren.add(foldNegative(ExprNodeGenericFuncDesc.newInstance(new GenericUDFOPNot(), Arrays.asList(grandChild))));
                    }
                    return ExprNodeGenericFuncDesc.newInstance(childUDF.negative(), newGrandChildren);
                } else if (childUDF instanceof GenericUDFOPNot) {
                    return foldNegative(child.getChildren().get(0));
                } else {
                    // For operator like if() that cannot be handled, leave not() as it
                    // is and continue processing the children
                    List<ExprNodeDesc> newGrandChildren = new ArrayList<ExprNodeDesc>();
                    for (ExprNodeDesc grandChild : grandChildren) {
                        newGrandChildren.add(foldNegative(grandChild));
                    }
                    childDesc.setChildren(newGrandChildren);
                    return funcDesc;
                }
            }
        }
    }
    return desc;
}
Also used : GenericUDFOPNull(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull) ArrayList(java.util.ArrayList) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) GenericUDFOPNotNull(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull) GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) GenericUDFBaseCompare(org.apache.hadoop.hive.ql.udf.generic.GenericUDFBaseCompare) List(java.util.List) ArrayList(java.util.ArrayList) GenericUDFOPNot(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNot) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) GenericUDFOPOr(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr) GenericUDFOPAnd(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd)

Example 4 with GenericUDFOPNull

use of org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull in project hive by apache.

the class TestVectorNull method doIsNullOnRandomDataType.

private boolean doIsNullOnRandomDataType(Random random, String functionName, boolean isFilter) throws Exception {
    String typeName;
    if (functionName.equals("not")) {
        typeName = "boolean";
    } else {
        typeName = VectorRandomRowSource.getRandomTypeName(random, SupportedTypes.ALL, /* allowedTypeNameSet */
        null);
        typeName = VectorRandomRowSource.getDecoratedTypeName(random, typeName, SupportedTypes.ALL, /* allowedTypeNameSet */
        null, /* depth */
        0, /* maxDepth */
        2);
    }
    TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeName);
    // ----------------------------------------------------------------------------------------------
    ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(typeInfo);
    // ----------------------------------------------------------------------------------------------
    GenerationSpec generationSpec = GenerationSpec.createSameType(typeInfo);
    List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
    List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
    generationSpecList.add(generationSpec);
    explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
    VectorRandomRowSource rowSource = new VectorRandomRowSource();
    rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
    0, /* allowNull */
    true, /* isUnicodeOk */
    true, explicitDataTypePhysicalVariationList);
    List<String> columns = new ArrayList<String>();
    columns.add("col1");
    ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(typeInfo, "col1", "table", false);
    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
    children.add(col1Expr);
    String[] columnNames = columns.toArray(new String[0]);
    Object[][] randomRows = rowSource.randomRows(100000);
    VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
    final GenericUDF udf;
    final ObjectInspector outputObjectInspector;
    switch(functionName) {
        case "isnull":
            udf = new GenericUDFOPNull();
            break;
        case "isnotnull":
            udf = new GenericUDFOPNotNull();
            break;
        case "not":
            udf = new GenericUDFOPNot();
            break;
        default:
            throw new RuntimeException("Unexpected function name " + functionName);
    }
    ObjectInspector[] argumentOIs = new ObjectInspector[] { objectInspector };
    outputObjectInspector = udf.initialize(argumentOIs);
    TypeInfo outputTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(outputObjectInspector);
    ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, udf, children);
    final int rowCount = randomRows.length;
    Object[][] resultObjectsArray = new Object[NullTestMode.count][];
    for (int i = 0; i < NullTestMode.count; i++) {
        Object[] resultObjects = new Object[rowCount];
        resultObjectsArray[i] = resultObjects;
        NullTestMode nullTestMode = NullTestMode.values()[i];
        switch(nullTestMode) {
            case ROW_MODE:
                if (!doRowCastTest(typeInfo, isFilter, columns, children, udf, exprDesc, randomRows, rowSource.rowStructObjectInspector(), resultObjects)) {
                    return false;
                }
                break;
            case ADAPTOR:
            case VECTOR_EXPRESSION:
                if (!doVectorCastTest(typeInfo, isFilter, columns, columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), children, udf, exprDesc, nullTestMode, batchSource, exprDesc.getWritableObjectInspector(), outputTypeInfo, resultObjects)) {
                    return false;
                }
                break;
            default:
                throw new RuntimeException("Unexpected IF statement test mode " + nullTestMode);
        }
    }
    for (int i = 0; i < rowCount; i++) {
        // Row-mode is the expected value.
        Object expectedResult = resultObjectsArray[0][i];
        for (int v = 1; v < NullTestMode.count; v++) {
            Object vectorResult = resultObjectsArray[v][i];
            NullTestMode nullTestMode = NullTestMode.values()[v];
            if (isFilter && expectedResult == null && vectorResult != null) {
                // This is OK.
                boolean vectorBoolean = ((BooleanWritable) vectorResult).get();
                if (vectorBoolean) {
                    Assert.fail("Row " + i + " typeName " + typeName + " outputTypeName " + outputTypeInfo.getTypeName() + " isFilter " + isFilter + " " + nullTestMode + " result is NOT NULL and true" + " does not match row-mode expected result is NULL which means false here" + " row values " + Arrays.toString(randomRows[i]) + " exprDesc " + exprDesc.toString());
                }
            } else if (expectedResult == null || vectorResult == null) {
                if (expectedResult != null || vectorResult != null) {
                    Assert.fail("Row " + i + " sourceTypeName " + typeName + " isFilter " + isFilter + " " + nullTestMode + " result is NULL " + (vectorResult == null ? "YES" : "NO result " + vectorResult.toString()) + " does not match row-mode expected result is NULL " + (expectedResult == null ? "YES" : "NO result " + expectedResult.toString()) + " row values " + Arrays.toString(randomRows[i]) + " exprDesc " + exprDesc.toString());
                }
            } else {
                if (!expectedResult.equals(vectorResult)) {
                    Assert.fail("Row " + i + " sourceTypeName " + typeName + " isFilter " + isFilter + " " + nullTestMode + " result " + vectorResult.toString() + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result " + expectedResult.toString() + " (" + expectedResult.getClass().getSimpleName() + ")" + " row values " + Arrays.toString(randomRows[i]) + " exprDesc " + exprDesc.toString());
                }
            }
        }
    }
    return true;
}
Also used : ArrayList(java.util.ArrayList) GenericUDFOPNotNull(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull) DataTypePhysicalVariation(org.apache.hadoop.hive.common.type.DataTypePhysicalVariation) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) GenericUDFOPNull(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull) VectorRandomBatchSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) GenerationSpec(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec) GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) BooleanWritable(org.apache.hadoop.io.BooleanWritable) GenericUDFOPNot(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNot) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)

Example 5 with GenericUDFOPNull

use of org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull in project hive by apache.

the class TestVectorizationContext method testNullExpressions.

@Test
public void testNullExpressions() throws HiveException {
    ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(Integer.class, "col1", "table", false);
    ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc(Integer.valueOf(10));
    GenericUDFOPGreaterThan udf = new GenericUDFOPGreaterThan();
    ExprNodeGenericFuncDesc greaterExprDesc = new ExprNodeGenericFuncDesc();
    greaterExprDesc.setTypeInfo(TypeInfoFactory.booleanTypeInfo);
    greaterExprDesc.setGenericUDF(udf);
    List<ExprNodeDesc> children1 = new ArrayList<ExprNodeDesc>(2);
    children1.add(col1Expr);
    children1.add(constDesc);
    greaterExprDesc.setChildren(children1);
    ExprNodeGenericFuncDesc isNullExpr = new ExprNodeGenericFuncDesc();
    isNullExpr.setTypeInfo(TypeInfoFactory.booleanTypeInfo);
    GenericUDFOPNull isNullUdf = new GenericUDFOPNull();
    isNullExpr.setGenericUDF(isNullUdf);
    List<ExprNodeDesc> childOfIsNull = new ArrayList<ExprNodeDesc>();
    childOfIsNull.add(greaterExprDesc);
    isNullExpr.setChildren(childOfIsNull);
    List<String> columns = new ArrayList<String>();
    columns.add("col1");
    columns.add("col2");
    VectorizationContext vc = new VectorizationContext("name", columns);
    VectorExpression ve = vc.getVectorExpression(isNullExpr, VectorExpressionDescriptor.Mode.FILTER);
    assertEquals(ve.getClass(), SelectColumnIsNull.class);
    assertEquals(ve.getChildExpressions()[0].getClass(), LongColGreaterLongScalar.class);
    assertEquals(2, ve.getChildExpressions()[0].getOutputColumnNum());
    ve = vc.getVectorExpression(isNullExpr, VectorExpressionDescriptor.Mode.PROJECTION);
    assertEquals(ve.getClass(), IsNull.class);
    // TODO: HIVE-20985 disabled output column reuse
    // assertEquals(3, ve.getOutputColumnNum());
    assertEquals(4, ve.getOutputColumnNum());
    assertEquals(ve.getChildExpressions()[0].getClass(), LongColGreaterLongScalar.class);
}
Also used : GenericUDFOPGreaterThan(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan) ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) GenericUDFOPNull(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ArrayList(java.util.ArrayList) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) VectorExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) Test(org.junit.Test)

Aggregations

ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)6 GenericUDFOPNull (org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull)6 ExprNodeColumnDesc (org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc)5 ExprNodeGenericFuncDesc (org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)5 ArrayList (java.util.ArrayList)4 ExprNodeConstantDesc (org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc)4 GenericUDF (org.apache.hadoop.hive.ql.udf.generic.GenericUDF)3 GenericUDFOPNot (org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNot)3 GenericUDFOPNotNull (org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull)3 GenericUDFBaseCompare (org.apache.hadoop.hive.ql.udf.generic.GenericUDFBaseCompare)2 List (java.util.List)1 DataTypePhysicalVariation (org.apache.hadoop.hive.common.type.DataTypePhysicalVariation)1 ColumnInfo (org.apache.hadoop.hive.ql.exec.ColumnInfo)1 VectorRandomBatchSource (org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource)1 VectorRandomRowSource (org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)1 GenerationSpec (org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec)1 VectorExpression (org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression)1 ExprNodeFieldDesc (org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc)1 GenericUDFBetween (org.apache.hadoop.hive.ql.udf.generic.GenericUDFBetween)1 GenericUDFIn (org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn)1