Search in sources :

Example 16 with PlanMapper

use of org.apache.hadoop.hive.ql.plan.mapper.PlanMapper in project hive by apache.

the class ReOptimizePlugin method prepareToReExecute.

@Override
public void prepareToReExecute() {
    statsReaderHook.setCollectOnSuccess(true);
    PlanMapper pm = coreDriver.getContext().getPlanMapper();
    coreDriver.setRuntimeStatsSource(new SimpleRuntimeStatsSource(pm));
    retryPossible = false;
}
Also used : SimpleRuntimeStatsSource(org.apache.hadoop.hive.ql.plan.mapper.SimpleRuntimeStatsSource) PlanMapper(org.apache.hadoop.hive.ql.plan.mapper.PlanMapper)

Example 17 with PlanMapper

use of org.apache.hadoop.hive.ql.plan.mapper.PlanMapper in project hive by apache.

the class RuntimeStatsPersistenceCheckerHook method run.

@Override
public void run(HookContext hookContext) throws Exception {
    PlanMapper pm = ((PrivateHookContext) hookContext).getContext().getPlanMapper();
    List<OpTreeSignature> sigs = pm.getAll(OpTreeSignature.class);
    for (OpTreeSignature sig : sigs) {
        try {
            OpTreeSignature sig2 = persistenceLoop(sig, OpTreeSignature.class);
            sig.getSig().proveEquals(sig2.getSig());
        } catch (Exception e) {
            throw new RuntimeException("while checking the signature of: " + sig.getSig(), e);
        }
    }
    for (OpTreeSignature sig : sigs) {
        try {
            OpTreeSignature sig2 = persistenceLoop(sig, OpTreeSignature.class);
            if (!sig.equals(sig2)) {
                throw new RuntimeException("signature mismatch");
            }
        } catch (Exception e) {
            throw new RuntimeException("while checking the signature of: " + sig.getSig(), e);
        }
    }
    LOG.debug("signature checked: " + sigs.size());
}
Also used : OpTreeSignature(org.apache.hadoop.hive.ql.optimizer.signature.OpTreeSignature) PlanMapper(org.apache.hadoop.hive.ql.plan.mapper.PlanMapper) IOException(java.io.IOException)

Example 18 with PlanMapper

use of org.apache.hadoop.hive.ql.plan.mapper.PlanMapper in project hive by apache.

the class ReExecDriver method run.

@Override
public CommandProcessorResponse run() throws CommandProcessorException {
    executionIndex = 0;
    int maxExecutions = 1 + coreDriver.getConf().getIntVar(ConfVars.HIVE_QUERY_MAX_REEXECUTION_COUNT);
    while (true) {
        executionIndex++;
        for (IReExecutionPlugin p : plugins) {
            p.beforeExecute(executionIndex, explainReOptimization);
        }
        coreDriver.getContext().setExecutionIndex(executionIndex);
        LOG.info("Execution #{} of query", executionIndex);
        CommandProcessorResponse cpr = null;
        CommandProcessorException cpe = null;
        try {
            cpr = coreDriver.run();
        } catch (CommandProcessorException e) {
            cpe = e;
        }
        PlanMapper oldPlanMapper = coreDriver.getPlanMapper();
        boolean success = cpr != null;
        plugins.forEach(p -> p.afterExecute(oldPlanMapper, success));
        boolean shouldReExecute = explainReOptimization && executionIndex == 1;
        shouldReExecute |= cpr == null && plugins.stream().anyMatch(p -> p.shouldReExecute(executionIndex));
        if (executionIndex >= maxExecutions || !shouldReExecute) {
            if (cpr != null) {
                return cpr;
            } else {
                throw cpe;
            }
        }
        LOG.info("Preparing to re-execute query");
        plugins.forEach(IReExecutionPlugin::prepareToReExecute);
        try {
            coreDriver.compileAndRespond(currentQuery);
        } catch (CommandProcessorException e) {
            LOG.error("Recompilation of the query failed; this is unexpected.");
            // FIXME: somehow place pointers that re-execution compilation have failed; the query have been successfully compiled before?
            throw e;
        }
        PlanMapper newPlanMapper = coreDriver.getPlanMapper();
        if (!explainReOptimization && !plugins.stream().anyMatch(p -> p.shouldReExecuteAfterCompile(executionIndex, oldPlanMapper, newPlanMapper))) {
            LOG.info("re-running the query would probably not yield better results; returning with last error");
            // FIXME: retain old error; or create a new one?
            return cpr;
        }
    }
}
Also used : CommandProcessorException(org.apache.hadoop.hive.ql.processors.CommandProcessorException) CommandProcessorResponse(org.apache.hadoop.hive.ql.processors.CommandProcessorResponse) PlanMapper(org.apache.hadoop.hive.ql.plan.mapper.PlanMapper)

Example 19 with PlanMapper

use of org.apache.hadoop.hive.ql.plan.mapper.PlanMapper in project hive by apache.

the class TestOperatorCmp method testDifferentFiltersAreNotMatched.

@Test
public void testDifferentFiltersAreNotMatched() throws ParseException, CommandProcessorException {
    IDriver driver = createDriver();
    PlanMapper pm0 = getMapperForQuery(driver, "select u from tu where id_uv = 1 group by u");
    PlanMapper pm1 = getMapperForQuery(driver, "select u from tu where id_uv = 2 group by u");
    assertHelper(AssertHelperOp.NOT_SAME, pm0, pm1, FilterOperator.class);
}
Also used : PlanMapper(org.apache.hadoop.hive.ql.plan.mapper.PlanMapper) IDriver(org.apache.hadoop.hive.ql.IDriver) Test(org.junit.Test)

Example 20 with PlanMapper

use of org.apache.hadoop.hive.ql.plan.mapper.PlanMapper in project hive by apache.

the class TestCounterMapping method testInConversion.

@Test
public void testInConversion() throws ParseException, CommandProcessorException {
    String query = "explain select sum(id_uv) from tu where u in (1,2) group by u";
    HiveConf conf = env_setup.getTestCtx().hiveConf;
    conf.setIntVar(ConfVars.HIVEPOINTLOOKUPOPTIMIZERMIN, 10);
    IDriver driver = createDriver();
    PlanMapper pm = getMapperForQuery(driver, query);
    List<FilterOperator> fos = pm.getAll(FilterOperator.class);
    OpTreeSignature filterSig = pm.lookup(OpTreeSignature.class, fos.get(0));
    Object pred = filterSig.getSig().getSigMap().get("getPredicateString");
    assertEquals("((u = 1) or (u = 2)) (type: boolean)", pred);
}
Also used : FilterOperator(org.apache.hadoop.hive.ql.exec.FilterOperator) OpTreeSignature(org.apache.hadoop.hive.ql.optimizer.signature.OpTreeSignature) PlanMapper(org.apache.hadoop.hive.ql.plan.mapper.PlanMapper) IDriver(org.apache.hadoop.hive.ql.IDriver) HiveConf(org.apache.hadoop.hive.conf.HiveConf) Test(org.junit.Test)

Aggregations

PlanMapper (org.apache.hadoop.hive.ql.plan.mapper.PlanMapper)26 IDriver (org.apache.hadoop.hive.ql.IDriver)17 Test (org.junit.Test)17 FilterOperator (org.apache.hadoop.hive.ql.exec.FilterOperator)12 OpTreeSignature (org.apache.hadoop.hive.ql.optimizer.signature.OpTreeSignature)5 OperatorStats (org.apache.hadoop.hive.ql.stats.OperatorStats)4 Statistics (org.apache.hadoop.hive.ql.plan.Statistics)3 EquivGroup (org.apache.hadoop.hive.ql.plan.mapper.PlanMapper.EquivGroup)3 HiveFilter (org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveFilter)2 CommandProcessorResponse (org.apache.hadoop.hive.ql.processors.CommandProcessorResponse)2 Ignore (org.junit.Ignore)2 IOException (java.io.IOException)1 HiveConf (org.apache.hadoop.hive.conf.HiveConf)1 CommonJoinOperator (org.apache.hadoop.hive.ql.exec.CommonJoinOperator)1 ColStatistics (org.apache.hadoop.hive.ql.plan.ColStatistics)1 SimpleRuntimeStatsSource (org.apache.hadoop.hive.ql.plan.mapper.SimpleRuntimeStatsSource)1 StatsSource (org.apache.hadoop.hive.ql.plan.mapper.StatsSource)1 CommandProcessorException (org.apache.hadoop.hive.ql.processors.CommandProcessorException)1 ReExecDriver (org.apache.hadoop.hive.ql.reexec.ReExecDriver)1