use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.
the class TestMetastoreVersion method testVersionMatching.
/**
* Test that with verification enabled, hive works when the correct schema is already populated
* @throws Exception
*/
@Test
public void testVersionMatching() throws Exception {
System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString(), "false");
hiveConf = new HiveConf(this.getClass());
SessionState.start(new CliSessionState(hiveConf));
driver = DriverFactory.newDriver(hiveConf);
try {
driver.run("show tables");
assert false;
} catch (CommandProcessorException e) {
// this is expected
}
ObjectStore.setSchemaVerified(false);
hiveConf.setBoolVar(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION, true);
hiveConf = new HiveConf(this.getClass());
setVersion(hiveConf, metastoreSchemaInfo.getHiveSchemaVersion());
driver = DriverFactory.newDriver(hiveConf);
driver.run("show tables");
}
use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.
the class CliDriver method processLocalCmd.
CommandProcessorResponse processLocalCmd(String cmd, CommandProcessor proc, CliSessionState ss) throws CommandProcessorException {
boolean escapeCRLF = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CLI_PRINT_ESCAPE_CRLF);
CommandProcessorResponse response = new CommandProcessorResponse();
if (proc != null) {
if (proc instanceof IDriver) {
IDriver qp = (IDriver) proc;
PrintStream out = ss.out;
long start = System.currentTimeMillis();
if (ss.getIsVerbose()) {
out.println(cmd);
}
// Set HDFS CallerContext to queryId and reset back to sessionId after the query is done
ShimLoader.getHadoopShims().setHadoopQueryContext(qp.getQueryState().getQueryId());
try {
response = qp.run(cmd);
} catch (CommandProcessorException e) {
qp.close();
ShimLoader.getHadoopShims().setHadoopSessionContext(ss.getSessionId());
throw e;
}
// query has run capture the time
long end = System.currentTimeMillis();
double timeTaken = (end - start) / 1000.0;
ArrayList<String> res = new ArrayList<String>();
printHeader(qp, out);
// print the results
int counter = 0;
try {
if (out instanceof FetchConverter) {
((FetchConverter) out).fetchStarted();
}
while (qp.getResults(res)) {
for (String r : res) {
if (escapeCRLF) {
r = EscapeCRLFHelper.escapeCRLF(r);
}
out.println(r);
}
counter += res.size();
res.clear();
if (out.checkError()) {
break;
}
}
} catch (IOException e) {
console.printError("Failed with exception " + e.getClass().getName() + ":" + e.getMessage(), "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
throw new CommandProcessorException(1);
} finally {
qp.close();
ShimLoader.getHadoopShims().setHadoopSessionContext(ss.getSessionId());
if (out instanceof FetchConverter) {
((FetchConverter) out).fetchFinished();
}
console.printInfo("Time taken: " + timeTaken + " seconds" + (counter == 0 ? "" : ", Fetched: " + counter + " row(s)"));
}
} else {
String firstToken = tokenizeCmd(cmd.trim())[0];
String cmd_1 = getFirstCmd(cmd.trim(), firstToken.length());
if (ss.getIsVerbose()) {
ss.out.println(firstToken + " " + cmd_1);
}
try {
CommandProcessorResponse res = proc.run(cmd_1);
if (res.getMessage() != null) {
console.printInfo(res.getMessage());
}
return res;
} catch (CommandProcessorException e) {
ss.out.println("Query returned non-zero code: " + e.getResponseCode() + ", cause: " + e.getMessage());
throw e;
}
}
}
return response;
}
use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.
the class CliDriver method run.
public int run(String[] args) throws Exception {
OptionsProcessor oproc = new OptionsProcessor();
if (!oproc.process_stage1(args)) {
return 1;
}
// NOTE: It is critical to do this here so that log4j is reinitialized
// before any of the other core hive classes are loaded
boolean logInitFailed = false;
String logInitDetailMessage;
try {
logInitDetailMessage = LogUtils.initHiveLog4j();
} catch (LogInitializationException e) {
logInitFailed = true;
logInitDetailMessage = e.getMessage();
}
CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class));
ss.in = System.in;
try {
ss.out = new SessionStream(System.out, true, StandardCharsets.UTF_8.name());
ss.info = new SessionStream(System.err, true, StandardCharsets.UTF_8.name());
ss.err = new CachingPrintStream(System.err, true, StandardCharsets.UTF_8.name());
} catch (UnsupportedEncodingException e) {
return 3;
}
if (!oproc.process_stage2(ss)) {
return 2;
}
if (!ss.getIsSilent()) {
if (logInitFailed) {
System.err.println(logInitDetailMessage);
} else {
SessionState.getConsole().printInfo(logInitDetailMessage);
}
}
// set all properties specified via command line
HiveConf conf = ss.getConf();
for (Map.Entry<Object, Object> item : ss.cmdProperties.entrySet()) {
conf.set((String) item.getKey(), (String) item.getValue());
ss.getOverriddenConfigurations().put((String) item.getKey(), (String) item.getValue());
}
// read prompt configuration and substitute variables.
prompt = conf.getVar(HiveConf.ConfVars.CLIPROMPT);
prompt = new VariableSubstitution(new HiveVariableSource() {
@Override
public Map<String, String> getHiveVariable() {
return SessionState.get().getHiveVariables();
}
}).substitute(conf, prompt);
prompt2 = spacesForString(prompt);
if (HiveConf.getBoolVar(conf, ConfVars.HIVE_CLI_TEZ_SESSION_ASYNC)) {
// Start the session in a fire-and-forget manner. When the asynchronously initialized parts of
// the session are needed, the corresponding getters and other methods will wait as needed.
SessionState.beginStart(ss, console);
} else {
SessionState.start(ss);
}
ss.updateThreadName();
// Initialize metadata provider class and trimmer
CalcitePlanner.warmup();
// Create views registry
HiveMaterializedViewsRegistry.get().init();
// init metastore client cache
if (HiveConf.getBoolVar(conf, ConfVars.MSC_CACHE_ENABLED)) {
HiveMetaStoreClientWithLocalCache.init(conf);
}
// execute cli driver work
try {
executeDriver(ss, conf, oproc);
return 0;
} catch (CommandProcessorException e) {
return e.getResponseCode();
} finally {
ss.resetThreadName();
ss.close();
}
}
use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.
the class TestScheduledQueryIntegration method testScheduledQueryExecutionImpersonation.
@Test
public void testScheduledQueryExecutionImpersonation() throws ParseException, Exception {
envSetup.getTestCtx().hiveConf.setVar(HiveConf.ConfVars.HIVE_SCHEDULED_QUERIES_EXECUTOR_IDLE_SLEEP_TIME, "1s");
envSetup.getTestCtx().hiveConf.setVar(HiveConf.ConfVars.HIVE_SCHEDULED_QUERIES_EXECUTOR_PROGRESS_REPORT_INTERVAL, "1s");
setupAuthorization();
try (ScheduledQueryExecutionService schqS = ScheduledQueryExecutionService.startScheduledQueryExecutorService(envSetup.getTestCtx().hiveConf)) {
runAsUser("user1", "create scheduled query s1 cron '* * * * * ? *' defined as create table tx1 as select 12 as i", true);
Thread.sleep(20000);
}
// table exists - and owner is able to select from it
runAsUser("user1", "select * from tx1");
// other user can't drop it
try {
runAsUser("user2", "drop table tx1");
fail("should have failed");
} catch (CommandProcessorException cpe) {
assertEquals(40000, cpe.getResponseCode());
}
// but the owner can drop it
runAsUser("user1", "drop table tx1");
}
use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.
the class HiveCommandOperation method runInternal.
@Override
public void runInternal() throws HiveSQLException {
setState(OperationState.RUNNING);
try {
String command = getStatement().trim();
String[] tokens = statement.split("\\s");
String commandArgs = command.substring(tokens[0].length()).trim();
CommandProcessorResponse response = commandProcessor.run(commandArgs);
Schema schema = response.getSchema();
if (schema != null) {
setHasResultSet(true);
resultSchema = new TableSchema(schema);
} else {
setHasResultSet(false);
resultSchema = new TableSchema();
}
if (response.getMessage() != null) {
log.info(response.getMessage());
}
} catch (CommandProcessorException e) {
setState(OperationState.ERROR);
throw toSQLException("Error while processing statement", e);
} catch (Exception e) {
setState(OperationState.ERROR);
throw new HiveSQLException("Error running query: " + e.toString(), e);
}
setState(OperationState.FINISHED);
}
Aggregations