Search in sources :

Example 91 with ExprNodeColumnDesc

use of org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc in project hive by apache.

the class TestOrcSplitElimination method createTestSarg.

private void createTestSarg(ObjectInspector inspector, GenericUDF udf, List<ExprNodeDesc> childExpr) {
    childExpr.add(new ExprNodeColumnDesc(Long.class, "userid", "T", false));
    childExpr.add(new ExprNodeConstantDesc(100));
    conf.set("hive.io.filter.expr.serialized", SerializationUtilities.serializeExpression(new ExprNodeGenericFuncDesc(inspector, udf, childExpr)));
}
Also used : ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)

Example 92 with ExprNodeColumnDesc

use of org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc in project hive by apache.

the class TestAccumuloPredicateHandler method testPushdownComparisonOptNotSupported.

@Test
public void testPushdownComparisonOptNotSupported() {
    try {
        ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "field1", null, false);
        List<ExprNodeDesc> children = Lists.newArrayList();
        children.add(column);
        ExprNodeGenericFuncDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPNotNull(), children);
        assertNotNull(node);
        String filterExpr = SerializationUtilities.serializeExpression(node);
        conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
        List<IndexSearchCondition> sConditions = handler.getSearchConditions(conf);
        assertEquals(sConditions.size(), 1);
        IndexSearchCondition sc = sConditions.get(0);
        new PushdownTuple(sc, handler.getPrimitiveComparison(sc.getColumnDesc().getTypeString(), sc), handler.getCompareOp(sc.getComparisonOp(), sc));
        fail("Should fail: compare op not registered for index analyzer. Should leave undesirable residual predicate");
    } catch (RuntimeException e) {
        assertTrue(e.getMessage().contains("Unexpected residual predicate: field1 is not null"));
    } catch (Exception e) {
        fail(StringUtils.stringifyException(e));
    }
}
Also used : IndexSearchCondition(org.apache.hadoop.hive.ql.index.IndexSearchCondition) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) TooManyAccumuloColumnsException(org.apache.hadoop.hive.accumulo.serde.TooManyAccumuloColumnsException) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) GenericUDFOPNotNull(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull) Test(org.junit.Test)

Example 93 with ExprNodeColumnDesc

use of org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc in project hive by apache.

the class TestAccumuloPredicateHandler method testGetRowIDSearchCondition.

@Test
public void testGetRowIDSearchCondition() {
    ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
    ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "hi");
    List<ExprNodeDesc> children = Lists.newArrayList();
    children.add(column);
    children.add(constant);
    ExprNodeGenericFuncDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqual(), children);
    assertNotNull(node);
    String filterExpr = SerializationUtilities.serializeExpression(node);
    conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
    List<IndexSearchCondition> sConditions = handler.getSearchConditions(conf);
    assertEquals(sConditions.size(), 1);
}
Also used : ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) IndexSearchCondition(org.apache.hadoop.hive.ql.index.IndexSearchCondition) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) GenericUDFOPEqual(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) Test(org.junit.Test)

Example 94 with ExprNodeColumnDesc

use of org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc in project hive by apache.

the class TableAccessAnalyzer method getKeyColNames.

private static List<String> getKeyColNames(List<ExprNodeDesc> keys) {
    List<String> colList = new ArrayList<String>();
    for (ExprNodeDesc expr : keys) {
        if (expr instanceof ExprNodeColumnDesc) {
            ExprNodeColumnDesc colExpr = (ExprNodeColumnDesc) expr;
            colList.add(colExpr.getColumn());
        } else if (expr instanceof ExprNodeConstantDesc) {
            continue;
        } else {
            return null;
        }
    }
    return colList;
}
Also used : ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) ArrayList(java.util.ArrayList) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc)

Example 95 with ExprNodeColumnDesc

use of org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc in project hive by apache.

the class TestVectorUDFAdaptor method testGenericUDF.

// test the UDF adaptor for a generic UDF (as opposed to a legacy UDF)
@Test
public void testGenericUDF() {
    // create a syntax tree for a function call 'myisnull(col0, "UNKNOWN")'
    ExprNodeGenericFuncDesc funcDesc;
    GenericUDF genericUDF = new GenericUDFIsNull();
    TypeInfo typeInfoStr = TypeInfoFactory.stringTypeInfo;
    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
    children.add(new ExprNodeColumnDesc(typeInfoStr, "col0", "tablename", false));
    children.add(new ExprNodeConstantDesc(typeInfoStr, "UNKNOWN"));
    VectorUDFArgDesc[] argDescs = new VectorUDFArgDesc[2];
    for (int i = 0; i < 2; i++) {
        argDescs[i] = new VectorUDFArgDesc();
    }
    argDescs[0].setVariable(0);
    argDescs[1].setConstant((ExprNodeConstantDesc) children.get(1));
    funcDesc = new ExprNodeGenericFuncDesc(typeInfoStr, genericUDF, "myisnull", children);
    // create the adaptor for this function call to work in vector mode
    VectorUDFAdaptor vudf = null;
    try {
        vudf = new VectorUDFAdaptor(funcDesc, 3, "String", argDescs);
    } catch (HiveException e) {
        // We should never get here.
        assertTrue(false);
    }
    VectorizedRowBatch b;
    byte[] red = null;
    byte[] unknown = null;
    try {
        red = "red".getBytes("UTF-8");
        unknown = "UNKNOWN".getBytes("UTF-8");
    } catch (Exception e) {
        ;
    }
    BytesColumnVector out;
    // with nulls
    b = getBatchStrDblLongWithStrOut();
    b.cols[0].noNulls = false;
    // set 1st entry to null
    b.cols[0].isNull[0] = true;
    vudf.evaluate(b);
    out = (BytesColumnVector) b.cols[3];
    // verify outputs
    int cmp = StringExpr.compare(red, 0, red.length, out.vector[1], out.start[1], out.length[1]);
    assertEquals(0, cmp);
    cmp = StringExpr.compare(unknown, 0, unknown.length, out.vector[0], out.start[0], out.length[0]);
    assertEquals(0, cmp);
    // output entry should not be null for null input for this particular generic UDF
    assertTrue(out.noNulls || !out.isNull[0]);
}
Also used : ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) ArrayList(java.util.ArrayList) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) GenericUDFIsNull(org.apache.hadoop.hive.ql.exec.vector.udf.generic.GenericUDFIsNull) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) BytesColumnVector(org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) Test(org.junit.Test)

Aggregations

ExprNodeColumnDesc (org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc)186 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)168 ArrayList (java.util.ArrayList)110 ExprNodeGenericFuncDesc (org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)98 ExprNodeConstantDesc (org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc)89 Test (org.junit.Test)68 HashMap (java.util.HashMap)53 ColumnInfo (org.apache.hadoop.hive.ql.exec.ColumnInfo)49 LinkedHashMap (java.util.LinkedHashMap)35 RowSchema (org.apache.hadoop.hive.ql.exec.RowSchema)34 SelectOperator (org.apache.hadoop.hive.ql.exec.SelectOperator)30 ReduceSinkOperator (org.apache.hadoop.hive.ql.exec.ReduceSinkOperator)28 VectorExpression (org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression)26 GroupByOperator (org.apache.hadoop.hive.ql.exec.GroupByOperator)24 Operator (org.apache.hadoop.hive.ql.exec.Operator)24 DynamicValueVectorExpression (org.apache.hadoop.hive.ql.exec.vector.expressions.DynamicValueVectorExpression)24 List (java.util.List)23 JoinOperator (org.apache.hadoop.hive.ql.exec.JoinOperator)22 TableScanOperator (org.apache.hadoop.hive.ql.exec.TableScanOperator)22 NotNullConstraint (org.apache.hadoop.hive.ql.metadata.NotNullConstraint)22