Search in sources :

Example 81 with CommandProcessorException

use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.

the class TestMetastoreVersion method testVersionMatching.

/**
 * Test that with verification enabled, hive works when the correct schema is already populated
 * @throws Exception
 */
@Test
public void testVersionMatching() throws Exception {
    System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString(), "false");
    hiveConf = new HiveConf(this.getClass());
    SessionState.start(new CliSessionState(hiveConf));
    driver = DriverFactory.newDriver(hiveConf);
    try {
        driver.run("show tables");
        assert false;
    } catch (CommandProcessorException e) {
    // this is expected
    }
    ObjectStore.setSchemaVerified(false);
    hiveConf.setBoolVar(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION, true);
    hiveConf = new HiveConf(this.getClass());
    setVersion(hiveConf, metastoreSchemaInfo.getHiveSchemaVersion());
    driver = DriverFactory.newDriver(hiveConf);
    driver.run("show tables");
}
Also used : CommandProcessorException(org.apache.hadoop.hive.ql.processors.CommandProcessorException) HiveConf(org.apache.hadoop.hive.conf.HiveConf) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState) Test(org.junit.Test)

Example 82 with CommandProcessorException

use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.

the class CliDriver method processLocalCmd.

CommandProcessorResponse processLocalCmd(String cmd, CommandProcessor proc, CliSessionState ss) throws CommandProcessorException {
    boolean escapeCRLF = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CLI_PRINT_ESCAPE_CRLF);
    CommandProcessorResponse response = new CommandProcessorResponse();
    if (proc != null) {
        if (proc instanceof IDriver) {
            IDriver qp = (IDriver) proc;
            PrintStream out = ss.out;
            long start = System.currentTimeMillis();
            if (ss.getIsVerbose()) {
                out.println(cmd);
            }
            // Set HDFS CallerContext to queryId and reset back to sessionId after the query is done
            ShimLoader.getHadoopShims().setHadoopQueryContext(qp.getQueryState().getQueryId());
            try {
                response = qp.run(cmd);
            } catch (CommandProcessorException e) {
                qp.close();
                ShimLoader.getHadoopShims().setHadoopSessionContext(ss.getSessionId());
                throw e;
            }
            // query has run capture the time
            long end = System.currentTimeMillis();
            double timeTaken = (end - start) / 1000.0;
            ArrayList<String> res = new ArrayList<String>();
            printHeader(qp, out);
            // print the results
            int counter = 0;
            try {
                if (out instanceof FetchConverter) {
                    ((FetchConverter) out).fetchStarted();
                }
                while (qp.getResults(res)) {
                    for (String r : res) {
                        if (escapeCRLF) {
                            r = EscapeCRLFHelper.escapeCRLF(r);
                        }
                        out.println(r);
                    }
                    counter += res.size();
                    res.clear();
                    if (out.checkError()) {
                        break;
                    }
                }
            } catch (IOException e) {
                console.printError("Failed with exception " + e.getClass().getName() + ":" + e.getMessage(), "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
                throw new CommandProcessorException(1);
            } finally {
                qp.close();
                ShimLoader.getHadoopShims().setHadoopSessionContext(ss.getSessionId());
                if (out instanceof FetchConverter) {
                    ((FetchConverter) out).fetchFinished();
                }
                console.printInfo("Time taken: " + timeTaken + " seconds" + (counter == 0 ? "" : ", Fetched: " + counter + " row(s)"));
            }
        } else {
            String firstToken = tokenizeCmd(cmd.trim())[0];
            String cmd_1 = getFirstCmd(cmd.trim(), firstToken.length());
            if (ss.getIsVerbose()) {
                ss.out.println(firstToken + " " + cmd_1);
            }
            try {
                CommandProcessorResponse res = proc.run(cmd_1);
                if (res.getMessage() != null) {
                    console.printInfo(res.getMessage());
                }
                return res;
            } catch (CommandProcessorException e) {
                ss.out.println("Query returned non-zero code: " + e.getResponseCode() + ", cause: " + e.getMessage());
                throw e;
            }
        }
    }
    return response;
}
Also used : CachingPrintStream(org.apache.hadoop.hive.common.io.CachingPrintStream) PrintStream(java.io.PrintStream) CommandProcessorException(org.apache.hadoop.hive.ql.processors.CommandProcessorException) CommandProcessorResponse(org.apache.hadoop.hive.ql.processors.CommandProcessorResponse) ArrayList(java.util.ArrayList) IOException(java.io.IOException) FetchConverter(org.apache.hadoop.hive.common.io.FetchConverter) IDriver(org.apache.hadoop.hive.ql.IDriver)

Example 83 with CommandProcessorException

use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.

the class CliDriver method run.

public int run(String[] args) throws Exception {
    OptionsProcessor oproc = new OptionsProcessor();
    if (!oproc.process_stage1(args)) {
        return 1;
    }
    // NOTE: It is critical to do this here so that log4j is reinitialized
    // before any of the other core hive classes are loaded
    boolean logInitFailed = false;
    String logInitDetailMessage;
    try {
        logInitDetailMessage = LogUtils.initHiveLog4j();
    } catch (LogInitializationException e) {
        logInitFailed = true;
        logInitDetailMessage = e.getMessage();
    }
    CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class));
    ss.in = System.in;
    try {
        ss.out = new SessionStream(System.out, true, StandardCharsets.UTF_8.name());
        ss.info = new SessionStream(System.err, true, StandardCharsets.UTF_8.name());
        ss.err = new CachingPrintStream(System.err, true, StandardCharsets.UTF_8.name());
    } catch (UnsupportedEncodingException e) {
        return 3;
    }
    if (!oproc.process_stage2(ss)) {
        return 2;
    }
    if (!ss.getIsSilent()) {
        if (logInitFailed) {
            System.err.println(logInitDetailMessage);
        } else {
            SessionState.getConsole().printInfo(logInitDetailMessage);
        }
    }
    // set all properties specified via command line
    HiveConf conf = ss.getConf();
    for (Map.Entry<Object, Object> item : ss.cmdProperties.entrySet()) {
        conf.set((String) item.getKey(), (String) item.getValue());
        ss.getOverriddenConfigurations().put((String) item.getKey(), (String) item.getValue());
    }
    // read prompt configuration and substitute variables.
    prompt = conf.getVar(HiveConf.ConfVars.CLIPROMPT);
    prompt = new VariableSubstitution(new HiveVariableSource() {

        @Override
        public Map<String, String> getHiveVariable() {
            return SessionState.get().getHiveVariables();
        }
    }).substitute(conf, prompt);
    prompt2 = spacesForString(prompt);
    if (HiveConf.getBoolVar(conf, ConfVars.HIVE_CLI_TEZ_SESSION_ASYNC)) {
        // Start the session in a fire-and-forget manner. When the asynchronously initialized parts of
        // the session are needed, the corresponding getters and other methods will wait as needed.
        SessionState.beginStart(ss, console);
    } else {
        SessionState.start(ss);
    }
    ss.updateThreadName();
    // Initialize metadata provider class and trimmer
    CalcitePlanner.warmup();
    // Create views registry
    HiveMaterializedViewsRegistry.get().init();
    // init metastore client cache
    if (HiveConf.getBoolVar(conf, ConfVars.MSC_CACHE_ENABLED)) {
        HiveMetaStoreClientWithLocalCache.init(conf);
    }
    // execute cli driver work
    try {
        executeDriver(ss, conf, oproc);
        return 0;
    } catch (CommandProcessorException e) {
        return e.getResponseCode();
    } finally {
        ss.resetThreadName();
        ss.close();
    }
}
Also used : SessionState(org.apache.hadoop.hive.ql.session.SessionState) CommandProcessorException(org.apache.hadoop.hive.ql.processors.CommandProcessorException) VariableSubstitution(org.apache.hadoop.hive.conf.VariableSubstitution) HiveVariableSource(org.apache.hadoop.hive.conf.HiveVariableSource) UnsupportedEncodingException(java.io.UnsupportedEncodingException) CachingPrintStream(org.apache.hadoop.hive.common.io.CachingPrintStream) SessionStream(org.apache.hadoop.hive.common.io.SessionStream) LogInitializationException(org.apache.hadoop.hive.common.LogUtils.LogInitializationException) HiveConf(org.apache.hadoop.hive.conf.HiveConf) Map(java.util.Map)

Example 84 with CommandProcessorException

use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.

the class TestScheduledQueryIntegration method testScheduledQueryExecutionImpersonation.

@Test
public void testScheduledQueryExecutionImpersonation() throws ParseException, Exception {
    envSetup.getTestCtx().hiveConf.setVar(HiveConf.ConfVars.HIVE_SCHEDULED_QUERIES_EXECUTOR_IDLE_SLEEP_TIME, "1s");
    envSetup.getTestCtx().hiveConf.setVar(HiveConf.ConfVars.HIVE_SCHEDULED_QUERIES_EXECUTOR_PROGRESS_REPORT_INTERVAL, "1s");
    setupAuthorization();
    try (ScheduledQueryExecutionService schqS = ScheduledQueryExecutionService.startScheduledQueryExecutorService(envSetup.getTestCtx().hiveConf)) {
        runAsUser("user1", "create scheduled query s1 cron '* * * * * ? *' defined as create table tx1 as select 12 as i", true);
        Thread.sleep(20000);
    }
    // table exists - and owner is able to select from it
    runAsUser("user1", "select * from tx1");
    // other user can't drop it
    try {
        runAsUser("user2", "drop table tx1");
        fail("should have failed");
    } catch (CommandProcessorException cpe) {
        assertEquals(40000, cpe.getResponseCode());
    }
    // but the owner can drop it
    runAsUser("user1", "drop table tx1");
}
Also used : CommandProcessorException(org.apache.hadoop.hive.ql.processors.CommandProcessorException) ScheduledQueryExecutionService(org.apache.hadoop.hive.ql.scheduled.ScheduledQueryExecutionService) Test(org.junit.Test)

Example 85 with CommandProcessorException

use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.

the class HiveCommandOperation method runInternal.

@Override
public void runInternal() throws HiveSQLException {
    setState(OperationState.RUNNING);
    try {
        String command = getStatement().trim();
        String[] tokens = statement.split("\\s");
        String commandArgs = command.substring(tokens[0].length()).trim();
        CommandProcessorResponse response = commandProcessor.run(commandArgs);
        Schema schema = response.getSchema();
        if (schema != null) {
            setHasResultSet(true);
            resultSchema = new TableSchema(schema);
        } else {
            setHasResultSet(false);
            resultSchema = new TableSchema();
        }
        if (response.getMessage() != null) {
            log.info(response.getMessage());
        }
    } catch (CommandProcessorException e) {
        setState(OperationState.ERROR);
        throw toSQLException("Error while processing statement", e);
    } catch (Exception e) {
        setState(OperationState.ERROR);
        throw new HiveSQLException("Error running query: " + e.toString(), e);
    }
    setState(OperationState.FINISHED);
}
Also used : CommandProcessorException(org.apache.hadoop.hive.ql.processors.CommandProcessorException) TableSchema(org.apache.hive.service.cli.TableSchema) CommandProcessorResponse(org.apache.hadoop.hive.ql.processors.CommandProcessorResponse) TableSchema(org.apache.hive.service.cli.TableSchema) Schema(org.apache.hadoop.hive.metastore.api.Schema) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) IOException(java.io.IOException) FileNotFoundException(java.io.FileNotFoundException) CommandProcessorException(org.apache.hadoop.hive.ql.processors.CommandProcessorException) UnsupportedEncodingException(java.io.UnsupportedEncodingException)

Aggregations

CommandProcessorException (org.apache.hadoop.hive.ql.processors.CommandProcessorException)85 Test (org.junit.Test)42 IOException (java.io.IOException)14 CommandProcessorResponse (org.apache.hadoop.hive.ql.processors.CommandProcessorResponse)14 Driver (org.apache.hadoop.hive.ql.Driver)12 ArrayList (java.util.ArrayList)10 HiveConf (org.apache.hadoop.hive.conf.HiveConf)10 QTestProcessExecResult (org.apache.hadoop.hive.ql.QTestProcessExecResult)9 Path (org.apache.hadoop.fs.Path)8 FileSystem (org.apache.hadoop.fs.FileSystem)7 CliSessionState (org.apache.hadoop.hive.cli.CliSessionState)6 File (java.io.File)5 IDriver (org.apache.hadoop.hive.ql.IDriver)5 FileNotFoundException (java.io.FileNotFoundException)4 LockException (org.apache.hadoop.hive.ql.lockmgr.LockException)4 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)4 UnsupportedEncodingException (java.io.UnsupportedEncodingException)3 Map (java.util.Map)3 Nullable (javax.annotation.Nullable)3 Database (org.apache.hadoop.hive.metastore.api.Database)3