use of org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc in project hive by apache.
the class TestOrcSplitElimination method createTestSarg.
private void createTestSarg(ObjectInspector inspector, GenericUDF udf, List<ExprNodeDesc> childExpr) {
childExpr.add(new ExprNodeColumnDesc(Long.class, "userid", "T", false));
childExpr.add(new ExprNodeConstantDesc(100));
conf.set("hive.io.filter.expr.serialized", SerializationUtilities.serializeExpression(new ExprNodeGenericFuncDesc(inspector, udf, childExpr)));
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc in project hive by apache.
the class TestAccumuloPredicateHandler method testPushdownComparisonOptNotSupported.
@Test
public void testPushdownComparisonOptNotSupported() {
try {
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "field1", null, false);
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(column);
ExprNodeGenericFuncDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPNotNull(), children);
assertNotNull(node);
String filterExpr = SerializationUtilities.serializeExpression(node);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
List<IndexSearchCondition> sConditions = handler.getSearchConditions(conf);
assertEquals(sConditions.size(), 1);
IndexSearchCondition sc = sConditions.get(0);
new PushdownTuple(sc, handler.getPrimitiveComparison(sc.getColumnDesc().getTypeString(), sc), handler.getCompareOp(sc.getComparisonOp(), sc));
fail("Should fail: compare op not registered for index analyzer. Should leave undesirable residual predicate");
} catch (RuntimeException e) {
assertTrue(e.getMessage().contains("Unexpected residual predicate: field1 is not null"));
} catch (Exception e) {
fail(StringUtils.stringifyException(e));
}
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc in project hive by apache.
the class TestAccumuloPredicateHandler method testGetRowIDSearchCondition.
@Test
public void testGetRowIDSearchCondition() {
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "hi");
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeGenericFuncDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqual(), children);
assertNotNull(node);
String filterExpr = SerializationUtilities.serializeExpression(node);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
List<IndexSearchCondition> sConditions = handler.getSearchConditions(conf);
assertEquals(sConditions.size(), 1);
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc in project hive by apache.
the class TableAccessAnalyzer method getKeyColNames.
private static List<String> getKeyColNames(List<ExprNodeDesc> keys) {
List<String> colList = new ArrayList<String>();
for (ExprNodeDesc expr : keys) {
if (expr instanceof ExprNodeColumnDesc) {
ExprNodeColumnDesc colExpr = (ExprNodeColumnDesc) expr;
colList.add(colExpr.getColumn());
} else if (expr instanceof ExprNodeConstantDesc) {
continue;
} else {
return null;
}
}
return colList;
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc in project hive by apache.
the class TestVectorUDFAdaptor method testGenericUDF.
// test the UDF adaptor for a generic UDF (as opposed to a legacy UDF)
@Test
public void testGenericUDF() {
// create a syntax tree for a function call 'myisnull(col0, "UNKNOWN")'
ExprNodeGenericFuncDesc funcDesc;
GenericUDF genericUDF = new GenericUDFIsNull();
TypeInfo typeInfoStr = TypeInfoFactory.stringTypeInfo;
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
children.add(new ExprNodeColumnDesc(typeInfoStr, "col0", "tablename", false));
children.add(new ExprNodeConstantDesc(typeInfoStr, "UNKNOWN"));
VectorUDFArgDesc[] argDescs = new VectorUDFArgDesc[2];
for (int i = 0; i < 2; i++) {
argDescs[i] = new VectorUDFArgDesc();
}
argDescs[0].setVariable(0);
argDescs[1].setConstant((ExprNodeConstantDesc) children.get(1));
funcDesc = new ExprNodeGenericFuncDesc(typeInfoStr, genericUDF, "myisnull", children);
// create the adaptor for this function call to work in vector mode
VectorUDFAdaptor vudf = null;
try {
vudf = new VectorUDFAdaptor(funcDesc, 3, "String", argDescs);
} catch (HiveException e) {
// We should never get here.
assertTrue(false);
}
VectorizedRowBatch b;
byte[] red = null;
byte[] unknown = null;
try {
red = "red".getBytes("UTF-8");
unknown = "UNKNOWN".getBytes("UTF-8");
} catch (Exception e) {
;
}
BytesColumnVector out;
// with nulls
b = getBatchStrDblLongWithStrOut();
b.cols[0].noNulls = false;
// set 1st entry to null
b.cols[0].isNull[0] = true;
vudf.evaluate(b);
out = (BytesColumnVector) b.cols[3];
// verify outputs
int cmp = StringExpr.compare(red, 0, red.length, out.vector[1], out.start[1], out.length[1]);
assertEquals(0, cmp);
cmp = StringExpr.compare(unknown, 0, unknown.length, out.vector[0], out.start[0], out.length[0]);
assertEquals(0, cmp);
// output entry should not be null for null input for this particular generic UDF
assertTrue(out.noNulls || !out.isNull[0]);
}
Aggregations