use of org.apache.hadoop.hive.ql.plan.FilterDesc in project hive by apache.
the class HiveFilterVisitor method visit.
/**
* TODO: 1) isSamplingPred 2) sampleDesc 3) isSortedFilter.
*/
@Override
OpAttr visit(HiveFilter filterRel) throws SemanticException {
OpAttr inputOpAf = hiveOpConverter.dispatch(filterRel.getInput());
if (LOG.isDebugEnabled()) {
LOG.debug("Translating operator rel#" + filterRel.getId() + ":" + filterRel.getRelTypeName() + " with row type: [" + filterRel.getRowType() + "]");
}
ExprNodeDesc filCondExpr = filterRel.getCondition().accept(new ExprNodeConverter(inputOpAf.tabAlias, filterRel.getInput().getRowType(), inputOpAf.vcolsInCalcite, filterRel.getCluster().getTypeFactory(), true));
FilterDesc filDesc = new FilterDesc(filCondExpr, false);
ArrayList<ColumnInfo> cinfoLst = HiveOpConverterUtils.createColInfos(inputOpAf.inputs.get(0));
FilterOperator filOp = (FilterOperator) OperatorFactory.getAndMakeChild(filDesc, new RowSchema(cinfoLst), inputOpAf.inputs.get(0));
if (LOG.isDebugEnabled()) {
LOG.debug("Generated " + filOp + " with row schema: [" + filOp.getSchema() + "]");
}
return inputOpAf.clone(filOp);
}
use of org.apache.hadoop.hive.ql.plan.FilterDesc in project hive by apache.
the class TestSharedWorkOptimizer method getFilterOp.
private Operator<? extends OperatorDesc> getFilterOp(int constVal) {
ExprNodeDesc pred = new ExprNodeConstantDesc(constVal);
FilterDesc fd = new FilterDesc(pred, true);
Operator<? extends OperatorDesc> op = OperatorFactory.get(cCtx, fd);
return op;
}
use of org.apache.hadoop.hive.ql.plan.FilterDesc in project hive by apache.
the class TestVectorFilterOperator method getAVectorFilterOperator.
private VectorFilterOperator getAVectorFilterOperator() throws HiveException {
ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(Long.class, "col1", "table", false);
List<String> columns = new ArrayList<String>();
columns.add("col1");
FilterDesc fdesc = new FilterDesc();
fdesc.setPredicate(col1Expr);
VectorFilterDesc vectorDesc = new VectorFilterDesc();
Operator<? extends OperatorDesc> filterOp = OperatorFactory.get(new CompilationOpContext(), fdesc);
VectorizationContext vc = new VectorizationContext("name", columns);
return (VectorFilterOperator) Vectorizer.vectorizeFilterOperator(filterOp, vc, vectorDesc);
}
use of org.apache.hadoop.hive.ql.plan.FilterDesc in project hive by apache.
the class TestExecDriver method populateMapRedPlan2.
@SuppressWarnings("unchecked")
private void populateMapRedPlan2(Table src) throws Exception {
ArrayList<String> outputColumns = new ArrayList<String>();
for (int i = 0; i < 2; i++) {
outputColumns.add("_col" + i);
}
// map-side work
Operator<ReduceSinkDesc> op1 = OperatorFactory.get(ctx, PlanUtils.getReduceSinkDesc(Utilities.makeList(getStringColumn("key")), Utilities.makeList(getStringColumn("key"), getStringColumn("value")), outputColumns, false, -1, 1, -1, AcidUtils.Operation.NOT_ACID, NullOrdering.NULLS_LAST));
addMapWork(mr, src, "a", op1);
ReduceWork rWork = new ReduceWork();
rWork.setNumReduceTasks(Integer.valueOf(1));
rWork.setKeyDesc(op1.getConf().getKeySerializeInfo());
rWork.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo());
mr.setReduceWork(rWork);
// reduce side work
Operator<FileSinkDesc> op4 = OperatorFactory.get(ctx, new FileSinkDesc(new Path(TMPDIR + File.separator + "mapredplan2.out"), Utilities.defaultTd, false));
Operator<FilterDesc> op3 = OperatorFactory.get(getTestFilterDesc("0"), op4);
List<ExprNodeDesc> cols = new ArrayList<ExprNodeDesc>();
cols.add(getStringColumn(Utilities.ReduceField.KEY + ".reducesinkkey" + 0));
cols.add(getStringColumn(Utilities.ReduceField.VALUE.toString() + "." + outputColumns.get(1)));
Operator<SelectDesc> op2 = OperatorFactory.get(new SelectDesc(cols, outputColumns), op3);
rWork.setReducer(op2);
}
use of org.apache.hadoop.hive.ql.plan.FilterDesc in project hive by apache.
the class TestExecDriver method populateMapPlan2.
@SuppressWarnings("unchecked")
private void populateMapPlan2(Table src) throws Exception {
Operator<FileSinkDesc> op3 = OperatorFactory.get(ctx, new FileSinkDesc(new Path(TMPDIR + File.separator + "mapplan2.out"), Utilities.defaultTd, false));
Operator<ScriptDesc> op2 = OperatorFactory.get(new ScriptDesc("cat", PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key,value"), TextRecordWriter.class, PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key,value"), TextRecordReader.class, TextRecordReader.class, PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key")), op3);
Operator<FilterDesc> op1 = OperatorFactory.get(getTestFilterDesc("key"), op2);
addMapWork(mr, src, "a", op1);
}
Aggregations