use of org.apache.hadoop.hive.ql.stats.IStatsProcessor in project hive by apache.
the class StatsTask method initialize.
@Override
public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext ctx, CompilationOpContext opContext) {
super.initialize(queryState, queryPlan, ctx, opContext);
if (work.getBasicStatsWork() != null) {
BasicStatsTask task = new BasicStatsTask(conf, work.getBasicStatsWork());
task.followedColStats = work.hasColStats();
processors.add(0, task);
} else if (work.isFooterScan()) {
BasicStatsNoJobTask t = new BasicStatsNoJobTask(conf, work.getBasicStatsNoJobWork());
processors.add(0, t);
}
if (work.hasColStats()) {
processors.add(new ColStatsProcessor(work.getColStats(), conf));
}
for (IStatsProcessor p : processors) {
p.initialize(opContext);
}
}
use of org.apache.hadoop.hive.ql.stats.IStatsProcessor in project hive by apache.
the class StatsTask method execute.
@Override
public int execute(DriverContext driverContext) {
if (driverContext.getCtx().getExplainAnalyze() == AnalyzeState.RUNNING) {
return 0;
}
if (work.isAggregating() && work.isFooterScan()) {
throw new RuntimeException("Can not have both basic stats work and stats no job work!");
}
int ret = 0;
try {
if (work.isFooterScan()) {
work.getBasicStatsNoJobWork().setPartitions(work.getPartitions());
}
Hive db = getHive();
Table tbl = getTable(db);
for (IStatsProcessor task : processors) {
task.setDpPartSpecs(dpPartSpecs);
ret = task.process(db, tbl);
if (ret != 0) {
return ret;
}
}
} catch (Exception e) {
LOG.error("Failed to run stats task", e);
return 1;
}
return 0;
}
Aggregations