use of org.apache.hadoop.hive.ql.plan.mapper.PlanMapper in project hive by apache.
the class ReOptimizePlugin method prepareToReExecute.
@Override
public void prepareToReExecute() {
statsReaderHook.setCollectOnSuccess(true);
PlanMapper pm = coreDriver.getContext().getPlanMapper();
coreDriver.setRuntimeStatsSource(new SimpleRuntimeStatsSource(pm));
retryPossible = false;
}
use of org.apache.hadoop.hive.ql.plan.mapper.PlanMapper in project hive by apache.
the class RuntimeStatsPersistenceCheckerHook method run.
@Override
public void run(HookContext hookContext) throws Exception {
PlanMapper pm = ((PrivateHookContext) hookContext).getContext().getPlanMapper();
List<OpTreeSignature> sigs = pm.getAll(OpTreeSignature.class);
for (OpTreeSignature sig : sigs) {
try {
OpTreeSignature sig2 = persistenceLoop(sig, OpTreeSignature.class);
sig.getSig().proveEquals(sig2.getSig());
} catch (Exception e) {
throw new RuntimeException("while checking the signature of: " + sig.getSig(), e);
}
}
for (OpTreeSignature sig : sigs) {
try {
OpTreeSignature sig2 = persistenceLoop(sig, OpTreeSignature.class);
if (!sig.equals(sig2)) {
throw new RuntimeException("signature mismatch");
}
} catch (Exception e) {
throw new RuntimeException("while checking the signature of: " + sig.getSig(), e);
}
}
LOG.debug("signature checked: " + sigs.size());
}
use of org.apache.hadoop.hive.ql.plan.mapper.PlanMapper in project hive by apache.
the class ReExecDriver method run.
@Override
public CommandProcessorResponse run() throws CommandProcessorException {
executionIndex = 0;
int maxExecutions = 1 + coreDriver.getConf().getIntVar(ConfVars.HIVE_QUERY_MAX_REEXECUTION_COUNT);
while (true) {
executionIndex++;
for (IReExecutionPlugin p : plugins) {
p.beforeExecute(executionIndex, explainReOptimization);
}
coreDriver.getContext().setExecutionIndex(executionIndex);
LOG.info("Execution #{} of query", executionIndex);
CommandProcessorResponse cpr = null;
CommandProcessorException cpe = null;
try {
cpr = coreDriver.run();
} catch (CommandProcessorException e) {
cpe = e;
}
PlanMapper oldPlanMapper = coreDriver.getPlanMapper();
boolean success = cpr != null;
plugins.forEach(p -> p.afterExecute(oldPlanMapper, success));
boolean shouldReExecute = explainReOptimization && executionIndex == 1;
shouldReExecute |= cpr == null && plugins.stream().anyMatch(p -> p.shouldReExecute(executionIndex));
if (executionIndex >= maxExecutions || !shouldReExecute) {
if (cpr != null) {
return cpr;
} else {
throw cpe;
}
}
LOG.info("Preparing to re-execute query");
plugins.forEach(IReExecutionPlugin::prepareToReExecute);
try {
coreDriver.compileAndRespond(currentQuery);
} catch (CommandProcessorException e) {
LOG.error("Recompilation of the query failed; this is unexpected.");
// FIXME: somehow place pointers that re-execution compilation have failed; the query have been successfully compiled before?
throw e;
}
PlanMapper newPlanMapper = coreDriver.getPlanMapper();
if (!explainReOptimization && !plugins.stream().anyMatch(p -> p.shouldReExecuteAfterCompile(executionIndex, oldPlanMapper, newPlanMapper))) {
LOG.info("re-running the query would probably not yield better results; returning with last error");
// FIXME: retain old error; or create a new one?
return cpr;
}
}
}
use of org.apache.hadoop.hive.ql.plan.mapper.PlanMapper in project hive by apache.
the class TestOperatorCmp method testDifferentFiltersAreNotMatched.
@Test
public void testDifferentFiltersAreNotMatched() throws ParseException, CommandProcessorException {
IDriver driver = createDriver();
PlanMapper pm0 = getMapperForQuery(driver, "select u from tu where id_uv = 1 group by u");
PlanMapper pm1 = getMapperForQuery(driver, "select u from tu where id_uv = 2 group by u");
assertHelper(AssertHelperOp.NOT_SAME, pm0, pm1, FilterOperator.class);
}
use of org.apache.hadoop.hive.ql.plan.mapper.PlanMapper in project hive by apache.
the class TestCounterMapping method testInConversion.
@Test
public void testInConversion() throws ParseException, CommandProcessorException {
String query = "explain select sum(id_uv) from tu where u in (1,2) group by u";
HiveConf conf = env_setup.getTestCtx().hiveConf;
conf.setIntVar(ConfVars.HIVEPOINTLOOKUPOPTIMIZERMIN, 10);
IDriver driver = createDriver();
PlanMapper pm = getMapperForQuery(driver, query);
List<FilterOperator> fos = pm.getAll(FilterOperator.class);
OpTreeSignature filterSig = pm.lookup(OpTreeSignature.class, fos.get(0));
Object pred = filterSig.getSig().getSigMap().get("getPredicateString");
assertEquals("((u = 1) or (u = 2)) (type: boolean)", pred);
}
Aggregations