use of org.apache.hadoop.hive.ql.plan.mapper.PlanMapper in project hive by apache.
the class ReExecDriver method run.
@Override
public CommandProcessorResponse run() {
executionIndex = 0;
int maxExecutuions = 1 + coreDriver.getConf().getIntVar(ConfVars.HIVE_QUERY_MAX_REEXECUTION_COUNT);
while (true) {
executionIndex++;
for (IReExecutionPlugin p : plugins) {
p.beforeExecute(executionIndex, explainReOptimization);
}
coreDriver.getContext().setExecutionIndex(executionIndex);
LOG.info("Execution #{} of query", executionIndex);
CommandProcessorResponse cpr = coreDriver.run();
boolean shouldReExecute = explainReOptimization && executionIndex == 1;
shouldReExecute |= cpr.getResponseCode() != 0 && shouldReExecute();
if (executionIndex >= maxExecutuions || !shouldReExecute) {
return cpr;
}
LOG.info("Preparing to re-execute query");
prepareToReExecute();
PlanMapper oldPlanMapper = coreDriver.getPlanMapper();
CommandProcessorResponse compile_resp = coreDriver.compileAndRespond(currentQuery);
if (compile_resp.failed()) {
// FIXME: somehow place pointers that re-execution compilation have failed; the query have been successfully compiled before?
return compile_resp;
}
PlanMapper newPlanMapper = coreDriver.getPlanMapper();
if (!explainReOptimization && !shouldReExecuteAfterCompile(oldPlanMapper, newPlanMapper)) {
// FIXME: retain old error; or create a new one?
return cpr;
}
}
}
use of org.apache.hadoop.hive.ql.plan.mapper.PlanMapper in project hive by apache.
the class ReOptimizePlugin method prepareToReExecute.
@Override
public void prepareToReExecute() {
statsReaderHook.setCollectOnSuccess(true);
PlanMapper pm = coreDriver.getContext().getPlanMapper();
coreDriver.setRuntimeStatsSource(new SimpleRuntimeStatsSource(pm));
retryPossible = false;
}
use of org.apache.hadoop.hive.ql.plan.mapper.PlanMapper in project hive by apache.
the class TestCounterMapping method testUsageOfRuntimeInfo.
@Test
public void testUsageOfRuntimeInfo() throws ParseException {
IDriver driver = createDriver();
String query = "select sum(u) from tu where u>1";
PlanMapper pm1 = getMapperForQuery(driver, query);
List<FilterOperator> filters1 = pm1.getAll(FilterOperator.class);
filters1.sort(OPERATOR_ID_COMPARATOR.reversed());
FilterOperator filter1 = filters1.get(0);
driver = createDriver();
((ReExecDriver) driver).setRuntimeStatsSource(new SimpleRuntimeStatsSource(pm1));
PlanMapper pm2 = getMapperForQuery(driver, query);
List<FilterOperator> filters2 = pm2.getAll(FilterOperator.class);
filters2.sort(OPERATOR_ID_COMPARATOR.reversed());
FilterOperator filter2 = filters2.get(0);
assertEquals("original check", 7, filter1.getStatistics().getNumRows());
assertEquals("optimized check", 6, filter2.getStatistics().getNumRows());
}
use of org.apache.hadoop.hive.ql.plan.mapper.PlanMapper in project hive by apache.
the class TestCounterMapping method getMapperForQuery.
private PlanMapper getMapperForQuery(IDriver driver, String query) {
int ret;
ret = driver.run(query).getResponseCode();
assertEquals("Checking command success", 0, ret);
PlanMapper pm0 = driver.getContext().getPlanMapper();
return pm0;
}
use of org.apache.hadoop.hive.ql.plan.mapper.PlanMapper in project hive by apache.
the class TestReOptimization method testExplainSupport.
@Test
public void testExplainSupport() throws Exception {
IDriver driver = createDriver("overlay,reoptimize");
String query = "explain reoptimization select 1 from tu join tv on (tu.id_uv=tv.id_uv) where u<10 and v>1";
PlanMapper pm = getMapperForQuery(driver, query);
List<String> res = new ArrayList<>();
List<String> res1 = new ArrayList<>();
while (driver.getResults(res1)) {
res.addAll(res1);
}
assertEquals("2TS", 2, res.stream().filter(line -> line.contains("TS_")).count());
assertEquals("2TS(runtime)", 2, res.stream().filter(line -> line.contains("TS") && line.contains("runtime")).count());
}
Aggregations