use of org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual in project hive by apache.
the class TestAccumuloPredicateHandler method testPushdownColumnTypeNotSupported.
@Test(expected = NoSuchPrimitiveComparisonException.class)
public void testPushdownColumnTypeNotSupported() throws SerDeException, NoSuchPrimitiveComparisonException, NoSuchCompareOpException {
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.floatTypeInfo, "field1", null, false);
ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.floatTypeInfo, 5.5f);
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeGenericFuncDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqual(), children);
assertNotNull(node);
String filterExpr = SerializationUtilities.serializeExpression(node);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
List<IndexSearchCondition> sConditions = handler.getSearchConditions(conf);
assertEquals(sConditions.size(), 1);
IndexSearchCondition sc = sConditions.get(0);
handler.getPrimitiveComparison(sc.getColumnDesc().getTypeString(), sc);
}
use of org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual in project hive by apache.
the class TestAccumuloPredicateHandler method testPushdownTuple.
@Test
public void testPushdownTuple() throws SerDeException, NoSuchPrimitiveComparisonException, NoSuchCompareOpException {
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, "field1", null, false);
ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, 5);
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeGenericFuncDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqual(), children);
assertNotNull(node);
String filterExpr = SerializationUtilities.serializeExpression(node);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
List<IndexSearchCondition> sConditions = handler.getSearchConditions(conf);
assertEquals(sConditions.size(), 1);
IndexSearchCondition sc = sConditions.get(0);
PushdownTuple tuple = new PushdownTuple(sConditions.get(0), handler.getPrimitiveComparison(sc.getColumnDesc().getTypeString(), sc), handler.getCompareOp(sc.getComparisonOp(), sc));
byte[] expectedVal = new byte[4];
ByteBuffer.wrap(expectedVal).putInt(5);
assertArrayEquals(tuple.getConstVal(), expectedVal);
assertEquals(tuple.getcOpt().getClass(), Equal.class);
assertEquals(tuple.getpCompare().getClass(), IntCompare.class);
}
use of org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual in project hive by apache.
the class TestOrcSplitElimination method testSplitEliminationComplexExpr.
@Test
public void testSplitEliminationComplexExpr() throws Exception {
ObjectInspector inspector = createIO();
Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector, 100000, CompressionKind.NONE, 10000, 10000);
writeData(writer);
writer.close();
HiveConf.setLongVar(conf, HiveConf.ConfVars.MAPREDMINSPLITSIZE, 1000);
HiveConf.setLongVar(conf, HiveConf.ConfVars.MAPREDMAXSPLITSIZE, 150000);
InputFormat<?, ?> in = new OrcInputFormat();
FileInputFormat.setInputPaths(conf, testFilePath.toString());
// predicate expression: userid <= 100 and subtype <= 1000.0
GenericUDF udf = new GenericUDFOPEqualOrLessThan();
List<ExprNodeDesc> childExpr = Lists.newArrayList();
ExprNodeColumnDesc col = new ExprNodeColumnDesc(Long.class, "userid", "T", false);
ExprNodeConstantDesc con = new ExprNodeConstantDesc(100);
childExpr.add(col);
childExpr.add(con);
ExprNodeGenericFuncDesc en = new ExprNodeGenericFuncDesc(inspector, udf, childExpr);
GenericUDF udf1 = new GenericUDFOPEqualOrLessThan();
List<ExprNodeDesc> childExpr1 = Lists.newArrayList();
ExprNodeColumnDesc col1 = new ExprNodeColumnDesc(Double.class, "subtype", "T", false);
ExprNodeConstantDesc con1 = new ExprNodeConstantDesc(1000.0);
childExpr1.add(col1);
childExpr1.add(con1);
ExprNodeGenericFuncDesc en1 = new ExprNodeGenericFuncDesc(inspector, udf1, childExpr1);
GenericUDF udf2 = new GenericUDFOPAnd();
List<ExprNodeDesc> childExpr2 = Lists.newArrayList();
childExpr2.add(en);
childExpr2.add(en1);
ExprNodeGenericFuncDesc en2 = new ExprNodeGenericFuncDesc(inspector, udf2, childExpr2);
String sargStr = SerializationUtilities.serializeExpression(en2);
conf.set("hive.io.filter.expr.serialized", sargStr);
InputSplit[] splits = in.getSplits(conf, 1);
assertEquals(2, splits.length);
con = new ExprNodeConstantDesc(2);
childExpr.set(1, con);
en = new ExprNodeGenericFuncDesc(inspector, udf, childExpr);
con1 = new ExprNodeConstantDesc(0.0);
childExpr1.set(1, con1);
en1 = new ExprNodeGenericFuncDesc(inspector, udf1, childExpr1);
childExpr2.set(0, en);
childExpr2.set(1, en1);
en2 = new ExprNodeGenericFuncDesc(inspector, udf2, childExpr2);
sargStr = SerializationUtilities.serializeExpression(en2);
conf.set("hive.io.filter.expr.serialized", sargStr);
splits = in.getSplits(conf, 1);
// no stripe will satisfy the predicate
assertEquals(0, splits.length);
con = new ExprNodeConstantDesc(2);
childExpr.set(1, con);
en = new ExprNodeGenericFuncDesc(inspector, udf, childExpr);
con1 = new ExprNodeConstantDesc(1.0);
childExpr1.set(1, con1);
en1 = new ExprNodeGenericFuncDesc(inspector, udf1, childExpr1);
childExpr2.set(0, en);
childExpr2.set(1, en1);
en2 = new ExprNodeGenericFuncDesc(inspector, udf2, childExpr2);
sargStr = SerializationUtilities.serializeExpression(en2);
conf.set("hive.io.filter.expr.serialized", sargStr);
splits = in.getSplits(conf, 1);
// only first stripe will satisfy condition and hence single split
assertEquals(1, splits.length);
udf = new GenericUDFOPEqual();
con = new ExprNodeConstantDesc(13);
childExpr.set(1, con);
en = new ExprNodeGenericFuncDesc(inspector, udf, childExpr);
con1 = new ExprNodeConstantDesc(80.0);
childExpr1.set(1, con1);
en1 = new ExprNodeGenericFuncDesc(inspector, udf1, childExpr1);
childExpr2.set(0, en);
childExpr2.set(1, en1);
en2 = new ExprNodeGenericFuncDesc(inspector, udf2, childExpr2);
sargStr = SerializationUtilities.serializeExpression(en2);
conf.set("hive.io.filter.expr.serialized", sargStr);
splits = in.getSplits(conf, 1);
// first two stripes will satisfy condition and hence single split
assertEquals(2, splits.length);
udf = new GenericUDFOPEqual();
con = new ExprNodeConstantDesc(13);
childExpr.set(1, con);
en = new ExprNodeGenericFuncDesc(inspector, udf, childExpr);
udf1 = new GenericUDFOPEqual();
con1 = new ExprNodeConstantDesc(80.0);
childExpr1.set(1, con1);
en1 = new ExprNodeGenericFuncDesc(inspector, udf1, childExpr1);
childExpr2.set(0, en);
childExpr2.set(1, en1);
en2 = new ExprNodeGenericFuncDesc(inspector, udf2, childExpr2);
sargStr = SerializationUtilities.serializeExpression(en2);
conf.set("hive.io.filter.expr.serialized", sargStr);
splits = in.getSplits(conf, 1);
// only second stripes will satisfy condition and hence single split
assertEquals(1, splits.length);
}
use of org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual in project hive by apache.
the class TestAccumuloPredicateHandler method testGetRowIDSearchCondition.
@Test
public void testGetRowIDSearchCondition() {
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "hi");
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeGenericFuncDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqual(), children);
assertNotNull(node);
String filterExpr = SerializationUtilities.serializeExpression(node);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
List<IndexSearchCondition> sConditions = handler.getSearchConditions(conf);
assertEquals(sConditions.size(), 1);
}
use of org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual in project mongo-hadoop by mongodb.
the class HiveMongoInputFormatTest method testTranslateEqualsOp.
@Test
public void testTranslateEqualsOp() {
// WHERE i = 20
GenericUDFOPEqual equal = new GenericUDFOPEqual();
ExprNodeDesc[] children = { new ExprNodeColumnDesc(new SimpleMockColumnInfo("i")), new ExprNodeConstantDesc(20) };
ExprNodeGenericFuncDesc expr = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, equal, Arrays.asList(children));
Assert.assertEquals(new BasicDBObject("mongo_i", 20), filterForExpr(expr));
}
Aggregations