Search in sources :

Example 11 with TableScanDesc

use of org.apache.hadoop.hive.ql.plan.TableScanDesc in project hive by apache.

the class TestRuntimeStatsPersistence method getTsOp.

private Operator<TableScanDesc> getTsOp(int i) {
    Table tblMetadata = new Table("db", "table");
    TableScanDesc desc = new TableScanDesc("alias", /*+ cCtx.nextOperatorId()*/
    tblMetadata);
    List<ExprNodeDesc> as = Lists.newArrayList(new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, Integer.valueOf(i)), new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, "c1", "aa", false));
    ExprNodeGenericFuncDesc f1 = new ExprNodeGenericFuncDesc(TypeInfoFactory.intTypeInfo, udf, as);
    desc.setFilterExpr(f1);
    Operator<TableScanDesc> ts = OperatorFactory.get(cCtx, desc);
    return ts;
}
Also used : ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) Table(org.apache.hadoop.hive.ql.metadata.Table) TableScanDesc(org.apache.hadoop.hive.ql.plan.TableScanDesc) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc)

Example 12 with TableScanDesc

use of org.apache.hadoop.hive.ql.plan.TableScanDesc in project hive by apache.

the class SharedWorkOptimizer method compareOperator.

private static boolean compareOperator(ParseContext pctx, Operator<?> op1, Operator<?> op2) throws SemanticException {
    if (!op1.getClass().getName().equals(op2.getClass().getName())) {
        return false;
    }
    // TODO: move this to logicalEquals
    if (op1 instanceof ReduceSinkOperator) {
        ReduceSinkDesc op1Conf = ((ReduceSinkOperator) op1).getConf();
        ReduceSinkDesc op2Conf = ((ReduceSinkOperator) op2).getConf();
        if (StringUtils.equals(op1Conf.getKeyColString(), op2Conf.getKeyColString()) && StringUtils.equals(op1Conf.getValueColsString(), op2Conf.getValueColsString()) && StringUtils.equals(op1Conf.getParitionColsString(), op2Conf.getParitionColsString()) && op1Conf.getTag() == op2Conf.getTag() && StringUtils.equals(op1Conf.getOrder(), op2Conf.getOrder()) && StringUtils.equals(op1Conf.getNullOrder(), op2Conf.getNullOrder()) && op1Conf.getTopN() == op2Conf.getTopN() && canDeduplicateReduceTraits(op1Conf, op2Conf)) {
            return true;
        } else {
            return false;
        }
    }
    // TODO: move this to logicalEquals
    if (op1 instanceof TableScanOperator) {
        TableScanOperator tsOp1 = (TableScanOperator) op1;
        TableScanOperator tsOp2 = (TableScanOperator) op2;
        TableScanDesc op1Conf = tsOp1.getConf();
        TableScanDesc op2Conf = tsOp2.getConf();
        Table tableMeta1 = op1Conf.getTableMetadata();
        Table tableMeta2 = op2Conf.getTableMetadata();
        if (StringUtils.equals(tableMeta1.getFullyQualifiedName(), tableMeta2.getFullyQualifiedName()) && op1Conf.getNeededColumns().equals(op2Conf.getNeededColumns()) && StringUtils.equals(op1Conf.getFilterExprString(), op2Conf.getFilterExprString()) && pctx.getPrunedPartitions(tsOp1).getPartitions().equals(pctx.getPrunedPartitions(tsOp2).getPartitions()) && op1Conf.getRowLimit() == op2Conf.getRowLimit() && Objects.equals(op1Conf.getIncludedBuckets(), op2Conf.getIncludedBuckets()) && Objects.equals(op1Conf.getOpProps(), op2Conf.getOpProps())) {
            return true;
        } else {
            return false;
        }
    }
    return op1.logicalEquals(op2);
}
Also used : TableScanOperator(org.apache.hadoop.hive.ql.exec.TableScanOperator) Table(org.apache.hadoop.hive.ql.metadata.Table) ReduceSinkOperator(org.apache.hadoop.hive.ql.exec.ReduceSinkOperator) TableScanDesc(org.apache.hadoop.hive.ql.plan.TableScanDesc) ReduceSinkDesc(org.apache.hadoop.hive.ql.plan.ReduceSinkDesc)

Example 13 with TableScanDesc

use of org.apache.hadoop.hive.ql.plan.TableScanDesc in project hive by apache.

the class HiveInputFormat method pushAsOf.

protected static void pushAsOf(Configuration jobConf, TableScanOperator ts) {
    TableScanDesc scanDesc = ts.getConf();
    if (scanDesc.getAsOfTimestamp() != null) {
        ZoneId timeZone = SessionState.get() == null ? new HiveConf().getLocalTimeZone() : SessionState.get().getConf().getLocalTimeZone();
        TimestampTZ time = TimestampTZUtil.parse(PlanUtils.stripQuotes(scanDesc.getAsOfTimestamp()), timeZone);
        jobConf.set(TableScanDesc.AS_OF_TIMESTAMP, Long.toString(time.toEpochMilli()));
    }
    if (scanDesc.getAsOfVersion() != null) {
        jobConf.set(TableScanDesc.AS_OF_VERSION, scanDesc.getAsOfVersion());
    }
}
Also used : TimestampTZ(org.apache.hadoop.hive.common.type.TimestampTZ) ZoneId(java.time.ZoneId) TableScanDesc(org.apache.hadoop.hive.ql.plan.TableScanDesc) HiveConf(org.apache.hadoop.hive.conf.HiveConf)

Example 14 with TableScanDesc

use of org.apache.hadoop.hive.ql.plan.TableScanDesc in project hive by apache.

the class DotExporter method nodeLabel.

private String nodeLabel(Operator<?> n) {
    List<String> rows = new ArrayList<String>();
    rows.add(nodeName0(n));
    if ((n instanceof TableScanOperator)) {
        TableScanOperator ts = (TableScanOperator) n;
        TableScanDesc conf = ts.getConf();
        rows.add(vBox(conf.getTableName(), conf.getAlias()));
    }
    if ((n instanceof FilterOperator)) {
        FilterOperator fil = (FilterOperator) n;
        FilterDesc conf = fil.getConf();
        rows.add(vBox("filter:", escape(conf.getPredicateString())));
    }
    return vBox(rows);
}
Also used : FilterOperator(org.apache.hadoop.hive.ql.exec.FilterOperator) FilterDesc(org.apache.hadoop.hive.ql.plan.FilterDesc) TableScanOperator(org.apache.hadoop.hive.ql.exec.TableScanOperator) ArrayList(java.util.ArrayList) TableScanDesc(org.apache.hadoop.hive.ql.plan.TableScanDesc)

Example 15 with TableScanDesc

use of org.apache.hadoop.hive.ql.plan.TableScanDesc in project hive by apache.

the class Vectorizer method vectorizeTableScanOperator.

public static Operator<? extends OperatorDesc> vectorizeTableScanOperator(Operator<? extends OperatorDesc> tableScanOp, VectorizationContext vContext) throws HiveException {
    TableScanDesc tableScanDesc = (TableScanDesc) tableScanOp.getConf();
    VectorTableScanDesc vectorTableScanDesc = new VectorTableScanDesc();
    tableScanDesc.setVectorDesc(vectorTableScanDesc);
    vectorTableScanDesc.setProjectedOutputColumns(ArrayUtils.toPrimitive(vContext.getProjectedColumns().toArray(new Integer[0])));
    return tableScanOp;
}
Also used : TableScanDesc(org.apache.hadoop.hive.ql.plan.TableScanDesc) VectorTableScanDesc(org.apache.hadoop.hive.ql.plan.VectorTableScanDesc) VectorTableScanDesc(org.apache.hadoop.hive.ql.plan.VectorTableScanDesc)

Aggregations

TableScanDesc (org.apache.hadoop.hive.ql.plan.TableScanDesc)28 ArrayList (java.util.ArrayList)12 TableScanOperator (org.apache.hadoop.hive.ql.exec.TableScanOperator)12 Table (org.apache.hadoop.hive.ql.metadata.Table)8 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)7 ExprNodeGenericFuncDesc (org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)7 ColumnInfo (org.apache.hadoop.hive.ql.exec.ColumnInfo)6 HashMap (java.util.HashMap)5 LinkedHashMap (java.util.LinkedHashMap)4 RowSchema (org.apache.hadoop.hive.ql.exec.RowSchema)4 ExprNodeConstantDesc (org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc)4 Serializable (java.io.Serializable)3 List (java.util.List)3 Map (java.util.Map)3 Path (org.apache.hadoop.fs.Path)3 HiveConf (org.apache.hadoop.hive.conf.HiveConf)3 FileSinkOperator (org.apache.hadoop.hive.ql.exec.FileSinkOperator)3 Operator (org.apache.hadoop.hive.ql.exec.Operator)3 ReduceSinkOperator (org.apache.hadoop.hive.ql.exec.ReduceSinkOperator)3 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)3