Search in sources :

Example 11 with FilterDesc

use of org.apache.hadoop.hive.ql.plan.FilterDesc in project hive by apache.

the class HiveFilterVisitor method visit.

/**
 * TODO: 1) isSamplingPred 2) sampleDesc 3) isSortedFilter.
 */
@Override
OpAttr visit(HiveFilter filterRel) throws SemanticException {
    OpAttr inputOpAf = hiveOpConverter.dispatch(filterRel.getInput());
    if (LOG.isDebugEnabled()) {
        LOG.debug("Translating operator rel#" + filterRel.getId() + ":" + filterRel.getRelTypeName() + " with row type: [" + filterRel.getRowType() + "]");
    }
    ExprNodeDesc filCondExpr = filterRel.getCondition().accept(new ExprNodeConverter(inputOpAf.tabAlias, filterRel.getInput().getRowType(), inputOpAf.vcolsInCalcite, filterRel.getCluster().getTypeFactory(), true));
    FilterDesc filDesc = new FilterDesc(filCondExpr, false);
    ArrayList<ColumnInfo> cinfoLst = HiveOpConverterUtils.createColInfos(inputOpAf.inputs.get(0));
    FilterOperator filOp = (FilterOperator) OperatorFactory.getAndMakeChild(filDesc, new RowSchema(cinfoLst), inputOpAf.inputs.get(0));
    if (LOG.isDebugEnabled()) {
        LOG.debug("Generated " + filOp + " with row schema: [" + filOp.getSchema() + "]");
    }
    return inputOpAf.clone(filOp);
}
Also used : FilterDesc(org.apache.hadoop.hive.ql.plan.FilterDesc) FilterOperator(org.apache.hadoop.hive.ql.exec.FilterOperator) RowSchema(org.apache.hadoop.hive.ql.exec.RowSchema) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) OpAttr(org.apache.hadoop.hive.ql.optimizer.calcite.translator.opconventer.HiveOpConverter.OpAttr) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) ExprNodeConverter(org.apache.hadoop.hive.ql.optimizer.calcite.translator.ExprNodeConverter)

Example 12 with FilterDesc

use of org.apache.hadoop.hive.ql.plan.FilterDesc in project hive by apache.

the class TestSharedWorkOptimizer method getFilterOp.

private Operator<? extends OperatorDesc> getFilterOp(int constVal) {
    ExprNodeDesc pred = new ExprNodeConstantDesc(constVal);
    FilterDesc fd = new FilterDesc(pred, true);
    Operator<? extends OperatorDesc> op = OperatorFactory.get(cCtx, fd);
    return op;
}
Also used : FilterDesc(org.apache.hadoop.hive.ql.plan.FilterDesc) ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc)

Example 13 with FilterDesc

use of org.apache.hadoop.hive.ql.plan.FilterDesc in project hive by apache.

the class TestVectorFilterOperator method getAVectorFilterOperator.

private VectorFilterOperator getAVectorFilterOperator() throws HiveException {
    ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(Long.class, "col1", "table", false);
    List<String> columns = new ArrayList<String>();
    columns.add("col1");
    FilterDesc fdesc = new FilterDesc();
    fdesc.setPredicate(col1Expr);
    VectorFilterDesc vectorDesc = new VectorFilterDesc();
    Operator<? extends OperatorDesc> filterOp = OperatorFactory.get(new CompilationOpContext(), fdesc);
    VectorizationContext vc = new VectorizationContext("name", columns);
    return (VectorFilterOperator) Vectorizer.vectorizeFilterOperator(filterOp, vc, vectorDesc);
}
Also used : VectorFilterDesc(org.apache.hadoop.hive.ql.plan.VectorFilterDesc) FilterDesc(org.apache.hadoop.hive.ql.plan.FilterDesc) VectorFilterDesc(org.apache.hadoop.hive.ql.plan.VectorFilterDesc) CompilationOpContext(org.apache.hadoop.hive.ql.CompilationOpContext) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ArrayList(java.util.ArrayList)

Example 14 with FilterDesc

use of org.apache.hadoop.hive.ql.plan.FilterDesc in project hive by apache.

the class TestExecDriver method populateMapRedPlan2.

@SuppressWarnings("unchecked")
private void populateMapRedPlan2(Table src) throws Exception {
    ArrayList<String> outputColumns = new ArrayList<String>();
    for (int i = 0; i < 2; i++) {
        outputColumns.add("_col" + i);
    }
    // map-side work
    Operator<ReduceSinkDesc> op1 = OperatorFactory.get(ctx, PlanUtils.getReduceSinkDesc(Utilities.makeList(getStringColumn("key")), Utilities.makeList(getStringColumn("key"), getStringColumn("value")), outputColumns, false, -1, 1, -1, AcidUtils.Operation.NOT_ACID, NullOrdering.NULLS_LAST));
    addMapWork(mr, src, "a", op1);
    ReduceWork rWork = new ReduceWork();
    rWork.setNumReduceTasks(Integer.valueOf(1));
    rWork.setKeyDesc(op1.getConf().getKeySerializeInfo());
    rWork.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo());
    mr.setReduceWork(rWork);
    // reduce side work
    Operator<FileSinkDesc> op4 = OperatorFactory.get(ctx, new FileSinkDesc(new Path(TMPDIR + File.separator + "mapredplan2.out"), Utilities.defaultTd, false));
    Operator<FilterDesc> op3 = OperatorFactory.get(getTestFilterDesc("0"), op4);
    List<ExprNodeDesc> cols = new ArrayList<ExprNodeDesc>();
    cols.add(getStringColumn(Utilities.ReduceField.KEY + ".reducesinkkey" + 0));
    cols.add(getStringColumn(Utilities.ReduceField.VALUE.toString() + "." + outputColumns.get(1)));
    Operator<SelectDesc> op2 = OperatorFactory.get(new SelectDesc(cols, outputColumns), op3);
    rWork.setReducer(op2);
}
Also used : Path(org.apache.hadoop.fs.Path) FileSinkDesc(org.apache.hadoop.hive.ql.plan.FileSinkDesc) ArrayList(java.util.ArrayList) ReduceWork(org.apache.hadoop.hive.ql.plan.ReduceWork) FilterDesc(org.apache.hadoop.hive.ql.plan.FilterDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) SelectDesc(org.apache.hadoop.hive.ql.plan.SelectDesc) ReduceSinkDesc(org.apache.hadoop.hive.ql.plan.ReduceSinkDesc)

Example 15 with FilterDesc

use of org.apache.hadoop.hive.ql.plan.FilterDesc in project hive by apache.

the class TestExecDriver method populateMapPlan2.

@SuppressWarnings("unchecked")
private void populateMapPlan2(Table src) throws Exception {
    Operator<FileSinkDesc> op3 = OperatorFactory.get(ctx, new FileSinkDesc(new Path(TMPDIR + File.separator + "mapplan2.out"), Utilities.defaultTd, false));
    Operator<ScriptDesc> op2 = OperatorFactory.get(new ScriptDesc("cat", PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key,value"), TextRecordWriter.class, PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key,value"), TextRecordReader.class, TextRecordReader.class, PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key")), op3);
    Operator<FilterDesc> op1 = OperatorFactory.get(getTestFilterDesc("key"), op2);
    addMapWork(mr, src, "a", op1);
}
Also used : Path(org.apache.hadoop.fs.Path) ScriptDesc(org.apache.hadoop.hive.ql.plan.ScriptDesc) FilterDesc(org.apache.hadoop.hive.ql.plan.FilterDesc) FileSinkDesc(org.apache.hadoop.hive.ql.plan.FileSinkDesc)

Aggregations

FilterDesc (org.apache.hadoop.hive.ql.plan.FilterDesc)33 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)23 FilterOperator (org.apache.hadoop.hive.ql.exec.FilterOperator)16 RowSchema (org.apache.hadoop.hive.ql.exec.RowSchema)14 ArrayList (java.util.ArrayList)13 ReduceSinkOperator (org.apache.hadoop.hive.ql.exec.ReduceSinkOperator)13 TableScanOperator (org.apache.hadoop.hive.ql.exec.TableScanOperator)13 Operator (org.apache.hadoop.hive.ql.exec.Operator)11 ExprNodeConstantDesc (org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc)9 GroupByOperator (org.apache.hadoop.hive.ql.exec.GroupByOperator)8 UnionOperator (org.apache.hadoop.hive.ql.exec.UnionOperator)8 JoinOperator (org.apache.hadoop.hive.ql.exec.JoinOperator)7 SelectOperator (org.apache.hadoop.hive.ql.exec.SelectOperator)7 ExprNodeGenericFuncDesc (org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)7 List (java.util.List)6 Path (org.apache.hadoop.fs.Path)5 LimitOperator (org.apache.hadoop.hive.ql.exec.LimitOperator)5 ExprNodeColumnDesc (org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc)5 AbstractMapJoinOperator (org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator)4 AppMasterEventOperator (org.apache.hadoop.hive.ql.exec.AppMasterEventOperator)4