use of org.apache.hadoop.hive.ql.plan.TableScanDesc in project hive by apache.
the class TestRuntimeStatsPersistence method getTsOp.
private Operator<TableScanDesc> getTsOp(int i) {
Table tblMetadata = new Table("db", "table");
TableScanDesc desc = new TableScanDesc("alias", /*+ cCtx.nextOperatorId()*/
tblMetadata);
List<ExprNodeDesc> as = Lists.newArrayList(new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, Integer.valueOf(i)), new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, "c1", "aa", false));
ExprNodeGenericFuncDesc f1 = new ExprNodeGenericFuncDesc(TypeInfoFactory.intTypeInfo, udf, as);
desc.setFilterExpr(f1);
Operator<TableScanDesc> ts = OperatorFactory.get(cCtx, desc);
return ts;
}
use of org.apache.hadoop.hive.ql.plan.TableScanDesc in project hive by apache.
the class SharedWorkOptimizer method compareOperator.
private static boolean compareOperator(ParseContext pctx, Operator<?> op1, Operator<?> op2) throws SemanticException {
if (!op1.getClass().getName().equals(op2.getClass().getName())) {
return false;
}
// TODO: move this to logicalEquals
if (op1 instanceof ReduceSinkOperator) {
ReduceSinkDesc op1Conf = ((ReduceSinkOperator) op1).getConf();
ReduceSinkDesc op2Conf = ((ReduceSinkOperator) op2).getConf();
if (StringUtils.equals(op1Conf.getKeyColString(), op2Conf.getKeyColString()) && StringUtils.equals(op1Conf.getValueColsString(), op2Conf.getValueColsString()) && StringUtils.equals(op1Conf.getParitionColsString(), op2Conf.getParitionColsString()) && op1Conf.getTag() == op2Conf.getTag() && StringUtils.equals(op1Conf.getOrder(), op2Conf.getOrder()) && StringUtils.equals(op1Conf.getNullOrder(), op2Conf.getNullOrder()) && op1Conf.getTopN() == op2Conf.getTopN() && canDeduplicateReduceTraits(op1Conf, op2Conf)) {
return true;
} else {
return false;
}
}
// TODO: move this to logicalEquals
if (op1 instanceof TableScanOperator) {
TableScanOperator tsOp1 = (TableScanOperator) op1;
TableScanOperator tsOp2 = (TableScanOperator) op2;
TableScanDesc op1Conf = tsOp1.getConf();
TableScanDesc op2Conf = tsOp2.getConf();
Table tableMeta1 = op1Conf.getTableMetadata();
Table tableMeta2 = op2Conf.getTableMetadata();
if (StringUtils.equals(tableMeta1.getFullyQualifiedName(), tableMeta2.getFullyQualifiedName()) && op1Conf.getNeededColumns().equals(op2Conf.getNeededColumns()) && StringUtils.equals(op1Conf.getFilterExprString(), op2Conf.getFilterExprString()) && pctx.getPrunedPartitions(tsOp1).getPartitions().equals(pctx.getPrunedPartitions(tsOp2).getPartitions()) && op1Conf.getRowLimit() == op2Conf.getRowLimit() && Objects.equals(op1Conf.getIncludedBuckets(), op2Conf.getIncludedBuckets()) && Objects.equals(op1Conf.getOpProps(), op2Conf.getOpProps())) {
return true;
} else {
return false;
}
}
return op1.logicalEquals(op2);
}
use of org.apache.hadoop.hive.ql.plan.TableScanDesc in project hive by apache.
the class HiveInputFormat method pushAsOf.
protected static void pushAsOf(Configuration jobConf, TableScanOperator ts) {
TableScanDesc scanDesc = ts.getConf();
if (scanDesc.getAsOfTimestamp() != null) {
ZoneId timeZone = SessionState.get() == null ? new HiveConf().getLocalTimeZone() : SessionState.get().getConf().getLocalTimeZone();
TimestampTZ time = TimestampTZUtil.parse(PlanUtils.stripQuotes(scanDesc.getAsOfTimestamp()), timeZone);
jobConf.set(TableScanDesc.AS_OF_TIMESTAMP, Long.toString(time.toEpochMilli()));
}
if (scanDesc.getAsOfVersion() != null) {
jobConf.set(TableScanDesc.AS_OF_VERSION, scanDesc.getAsOfVersion());
}
}
use of org.apache.hadoop.hive.ql.plan.TableScanDesc in project hive by apache.
the class DotExporter method nodeLabel.
private String nodeLabel(Operator<?> n) {
List<String> rows = new ArrayList<String>();
rows.add(nodeName0(n));
if ((n instanceof TableScanOperator)) {
TableScanOperator ts = (TableScanOperator) n;
TableScanDesc conf = ts.getConf();
rows.add(vBox(conf.getTableName(), conf.getAlias()));
}
if ((n instanceof FilterOperator)) {
FilterOperator fil = (FilterOperator) n;
FilterDesc conf = fil.getConf();
rows.add(vBox("filter:", escape(conf.getPredicateString())));
}
return vBox(rows);
}
use of org.apache.hadoop.hive.ql.plan.TableScanDesc in project hive by apache.
the class Vectorizer method vectorizeTableScanOperator.
public static Operator<? extends OperatorDesc> vectorizeTableScanOperator(Operator<? extends OperatorDesc> tableScanOp, VectorizationContext vContext) throws HiveException {
TableScanDesc tableScanDesc = (TableScanDesc) tableScanOp.getConf();
VectorTableScanDesc vectorTableScanDesc = new VectorTableScanDesc();
tableScanDesc.setVectorDesc(vectorTableScanDesc);
vectorTableScanDesc.setProjectedOutputColumns(ArrayUtils.toPrimitive(vContext.getProjectedColumns().toArray(new Integer[0])));
return tableScanOp;
}
Aggregations