use of org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc in project hive by apache.
the class ConvertAstToSearchArg method findVariable.
/**
* Find the variable in the expression.
* @param expr the expression to look in
* @return the index of the variable or -1 if there is not exactly one
* variable.
*/
private int findVariable(ExprNodeDesc expr) {
int result = -1;
List<ExprNodeDesc> children = expr.getChildren();
for (int i = 0; i < children.size(); ++i) {
ExprNodeDesc child = children.get(i);
if (child instanceof ExprNodeColumnDesc) {
// if we already found a variable, this isn't a sarg
if (result != -1) {
return -1;
} else {
result = i;
}
}
}
return result;
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc in project hive by apache.
the class ConvertAstToSearchArg method parse.
/**
* Do the recursive parse of the Hive ExprNodeDesc into our ExpressionTree.
* @param expression the Hive ExprNodeDesc
*/
private void parse(ExprNodeDesc expression) {
// handle the special cases.
if (expression.getClass() != ExprNodeGenericFuncDesc.class) {
// if it is a reference to a boolean column, covert it to a truth test.
if (expression instanceof ExprNodeColumnDesc) {
ExprNodeColumnDesc columnDesc = (ExprNodeColumnDesc) expression;
if (columnDesc.getTypeString().equals("boolean")) {
builder.equals(columnDesc.getColumn(), PredicateLeaf.Type.BOOLEAN, true);
return;
}
}
// otherwise, we don't know what to do so make it a maybe
builder.literal(SearchArgument.TruthValue.YES_NO_NULL);
return;
}
// get the kind of expression
ExprNodeGenericFuncDesc expr = (ExprNodeGenericFuncDesc) expression;
Class<?> op = expr.getGenericUDF().getClass();
// handle the logical operators
if (op == GenericUDFOPOr.class) {
builder.startOr();
addChildren(expr);
builder.end();
} else if (op == GenericUDFOPAnd.class) {
builder.startAnd();
addChildren(expr);
builder.end();
} else if (op == GenericUDFOPNot.class) {
builder.startNot();
addChildren(expr);
builder.end();
} else if (op == GenericUDFOPEqual.class) {
createLeaf(PredicateLeaf.Operator.EQUALS, expr);
} else if (op == GenericUDFOPNotEqual.class) {
builder.startNot();
createLeaf(PredicateLeaf.Operator.EQUALS, expr);
builder.end();
} else if (op == GenericUDFOPEqualNS.class) {
createLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS, expr);
} else if (op == GenericUDFOPGreaterThan.class) {
builder.startNot();
createLeaf(PredicateLeaf.Operator.LESS_THAN_EQUALS, expr);
builder.end();
} else if (op == GenericUDFOPEqualOrGreaterThan.class) {
builder.startNot();
createLeaf(PredicateLeaf.Operator.LESS_THAN, expr);
builder.end();
} else if (op == GenericUDFOPLessThan.class) {
createLeaf(PredicateLeaf.Operator.LESS_THAN, expr);
} else if (op == GenericUDFOPEqualOrLessThan.class) {
createLeaf(PredicateLeaf.Operator.LESS_THAN_EQUALS, expr);
} else if (op == GenericUDFIn.class) {
createLeaf(PredicateLeaf.Operator.IN, expr, 0);
} else if (op == GenericUDFBetween.class) {
createLeaf(PredicateLeaf.Operator.BETWEEN, expr, 1);
} else if (op == GenericUDFOPNull.class) {
createLeaf(PredicateLeaf.Operator.IS_NULL, expr, 0);
} else if (op == GenericUDFOPNotNull.class) {
builder.startNot();
createLeaf(PredicateLeaf.Operator.IS_NULL, expr, 0);
builder.end();
// otherwise, we didn't understand it, so mark it maybe
} else {
builder.literal(SearchArgument.TruthValue.YES_NO_NULL);
}
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc in project hive by apache.
the class TestVectorFilterOperator method getAVectorFilterOperator.
private VectorFilterOperator getAVectorFilterOperator() throws HiveException {
ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(Long.class, "col1", "table", false);
List<String> columns = new ArrayList<String>();
columns.add("col1");
FilterDesc fdesc = new FilterDesc();
fdesc.setPredicate(col1Expr);
VectorFilterDesc vectorDesc = new VectorFilterDesc();
Operator<? extends OperatorDesc> filterOp = OperatorFactory.get(new CompilationOpContext(), fdesc);
VectorizationContext vc = new VectorizationContext("name", columns);
return (VectorFilterOperator) Vectorizer.vectorizeFilterOperator(filterOp, vc, vectorDesc);
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc in project hive by apache.
the class TestKeyWrapperFactory method setup.
@Before
public void setup() throws Exception {
SessionState ss = new SessionState(new HiveConf());
SessionState.setCurrentSessionState(ss);
ArrayList<Text> col1 = new ArrayList<Text>();
col1.add(new Text("0"));
col1.add(new Text("1"));
col1.add(new Text("2"));
col1.add(new Text("3"));
TypeInfo col1Type = TypeInfoFactory.getListTypeInfo(TypeInfoFactory.stringTypeInfo);
ArrayList<Text> cola = new ArrayList<Text>();
cola.add(new Text("a"));
cola.add(new Text("b"));
cola.add(new Text("c"));
TypeInfo colaType = TypeInfoFactory.getListTypeInfo(TypeInfoFactory.stringTypeInfo);
try {
ArrayList<Object> data = new ArrayList<Object>();
data.add(col1);
data.add(cola);
ArrayList<String> names = new ArrayList<String>();
names.add("col1");
names.add("cola");
ArrayList<TypeInfo> typeInfos = new ArrayList<TypeInfo>();
typeInfos.add(col1Type);
typeInfos.add(colaType);
TypeInfo dataType = TypeInfoFactory.getStructTypeInfo(names, typeInfos);
InspectableObject r = new InspectableObject();
ObjectInspector[] oi = new ObjectInspector[1];
r.o = data;
oi[0] = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(dataType);
try {
// get a evaluator for a simple field expression
ExprNodeDesc exprDesc = new ExprNodeColumnDesc(colaType, "cola", "", false);
ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(exprDesc);
ExprNodeEvaluator[] evals = new ExprNodeEvaluator[1];
evals[0] = eval;
ObjectInspector resultOI = eval.initialize(oi[0]);
ObjectInspector[] resultOIs = new ObjectInspector[1];
resultOIs[0] = resultOI;
factory = new KeyWrapperFactory(evals, oi, resultOIs);
} catch (Throwable e) {
e.printStackTrace();
throw e;
}
} catch (Throwable e) {
e.printStackTrace();
throw new RuntimeException(e);
}
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc in project hive by apache.
the class TestOperators method testScriptOperator.
public void testScriptOperator() throws Throwable {
try {
System.out.println("Testing Script Operator");
// col1
ExprNodeDesc exprDesc1 = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "col1", "", false);
// col2
ExprNodeDesc expr1 = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "col0", "", false);
ExprNodeDesc expr2 = new ExprNodeConstantDesc("1");
ExprNodeDesc exprDesc2 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", expr1, expr2);
// select operator to project these two columns
ArrayList<ExprNodeDesc> earr = new ArrayList<ExprNodeDesc>();
earr.add(exprDesc1);
earr.add(exprDesc2);
ArrayList<String> outputCols = new ArrayList<String>();
for (int i = 0; i < earr.size(); i++) {
outputCols.add("_col" + i);
}
SelectDesc selectCtx = new SelectDesc(earr, outputCols);
Operator<SelectDesc> op = OperatorFactory.get(new CompilationOpContext(), SelectDesc.class);
op.setConf(selectCtx);
// scriptOperator to echo the output of the select
TableDesc scriptOutput = PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "a,b");
TableDesc scriptInput = PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "a,b");
ScriptDesc sd = new ScriptDesc("cat", scriptOutput, TextRecordWriter.class, scriptInput, TextRecordReader.class, TextRecordReader.class, PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key"));
Operator<ScriptDesc> sop = OperatorFactory.getAndMakeChild(sd, op);
// Collect operator to observe the output of the script
CollectDesc cd = new CollectDesc(Integer.valueOf(10));
CollectOperator cdop = (CollectOperator) OperatorFactory.getAndMakeChild(cd, sop);
op.initialize(new JobConf(TestOperators.class), new ObjectInspector[] { r[0].oi });
// evaluate on row
for (int i = 0; i < 5; i++) {
op.process(r[i].o, 0);
}
op.close(false);
InspectableObject io = new InspectableObject();
for (int i = 0; i < 5; i++) {
cdop.retrieve(io);
System.out.println("[" + i + "] io.o=" + io.o);
System.out.println("[" + i + "] io.oi=" + io.oi);
StructObjectInspector soi = (StructObjectInspector) io.oi;
assert (soi != null);
StructField a = soi.getStructFieldRef("a");
StructField b = soi.getStructFieldRef("b");
assertEquals("" + (i + 1), ((PrimitiveObjectInspector) a.getFieldObjectInspector()).getPrimitiveJavaObject(soi.getStructFieldData(io.o, a)));
assertEquals((i) + "1", ((PrimitiveObjectInspector) b.getFieldObjectInspector()).getPrimitiveJavaObject(soi.getStructFieldData(io.o, b)));
}
System.out.println("Script Operator ok");
} catch (Throwable e) {
e.printStackTrace();
throw e;
}
}
Aggregations