use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.
the class TestReplicationScenariosAcidTables method testDumpAcidTableWithPartitionDirMissing.
@Test
public void testDumpAcidTableWithPartitionDirMissing() throws Throwable {
String dbName = testName.getMethodName();
primary.run("CREATE DATABASE " + dbName + " WITH DBPROPERTIES ( '" + SOURCE_OF_REPLICATION + "' = '1,2,3')").run("CREATE TABLE " + dbName + ".normal (a int) PARTITIONED BY (part int)" + " STORED AS ORC TBLPROPERTIES ('transactional'='true')").run("INSERT INTO " + dbName + ".normal partition (part= 124) values (1)");
Path path = new Path(primary.warehouseRoot, dbName.toLowerCase() + ".db");
path = new Path(path, "normal");
path = new Path(path, "part=124");
FileSystem fs = path.getFileSystem(conf);
fs.delete(path);
try {
primary.runCommand("REPL DUMP " + dbName + " with ('hive.repl.dump.include.acid.tables' = 'true')");
assert false;
} catch (CommandProcessorException e) {
Assert.assertEquals(e.getResponseCode(), ErrorMsg.FILE_NOT_FOUND.getErrorCode());
}
primary.run("DROP TABLE " + dbName + ".normal");
primary.run("drop database " + dbName);
}
use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.
the class TestSemanticAnalyzerHookLoading method testHookLoading.
@Test
public void testHookLoading() throws Exception {
HiveConf conf = new HiveConf(this.getClass());
conf.set(ConfVars.SEMANTIC_ANALYZER_HOOK.varname, DummySemanticAnalyzerHook.class.getName());
conf.set(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
SessionState.start(conf);
run("drop table testDL", conf);
try {
run("create table testDL (a int) as select * from tbl2", conf);
assert false;
} catch (CommandProcessorException e) {
assertEquals(40000, e.getResponseCode());
}
run("create table testDL (a int)", conf);
Map<String, String> params = Hive.get(conf).getTable(Warehouse.DEFAULT_DATABASE_NAME, "testDL").getParameters();
assertEquals(DummyCreateTableHook.class.getName(), params.get("createdBy"));
assertEquals("Open Source rocks!!", params.get("Message"));
run("drop table testDL", conf);
}
use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.
the class TestCliDriverMethods method testThatCliDriverDoesNotStripComments.
// Test that CliDriver does not strip comments starting with '--'
@Test
public void testThatCliDriverDoesNotStripComments() throws Exception {
// We need to overwrite System.out and System.err as that is what is used in ShellCmdExecutor
// So save old values...
PrintStream oldOut = System.out;
PrintStream oldErr = System.err;
// Capture stdout and stderr
ByteArrayOutputStream dataOut = new ByteArrayOutputStream();
SessionStream out = new SessionStream(dataOut);
System.setOut(out);
ByteArrayOutputStream dataErr = new ByteArrayOutputStream();
SessionStream err = new SessionStream(dataErr);
System.setErr(err);
CliSessionState ss = new CliSessionState(new HiveConf());
ss.out = out;
ss.err = err;
// Save output as yo cannot print it while System.out and System.err are weird
String message;
String errors;
try {
CliSessionState.start(ss);
CliDriver cliDriver = new CliDriver();
// issue a command with bad options
cliDriver.processCmd("!ls --abcdefghijklmnopqrstuvwxyz123456789");
assertTrue("Comments with '--; should not have been stripped, so command should fail", false);
} catch (CommandProcessorException e) {
// this is expected to happen
} finally {
// restore System.out and System.err
System.setOut(oldOut);
System.setErr(oldErr);
}
message = dataOut.toString("UTF-8");
errors = dataErr.toString("UTF-8");
assertTrue("Comments with '--; should not have been stripped," + " so we should have got an error in the output: '" + errors + "'.", errors.contains("option"));
// message kept around in for debugging
assertNotNull(message);
}
use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.
the class CliDriver method executeDriver.
/**
* Execute the cli work
* @param ss CliSessionState of the CLI driver
* @param conf HiveConf for the driver session
* @param oproc Operation processor of the CLI invocation
* @return status of the CLI command execution
* @throws Exception
*/
private CommandProcessorResponse executeDriver(CliSessionState ss, HiveConf conf, OptionsProcessor oproc) throws Exception {
CliDriver cli = new CliDriver();
cli.setHiveVariables(oproc.getHiveVariables());
// use the specified database if specified
cli.processSelectDatabase(ss);
// Execute -i init files (always in silent mode)
cli.processInitFiles(ss);
if (ss.execString != null) {
return cli.processLine(ss.execString);
}
try {
if (ss.fileName != null) {
return cli.processFile(ss.fileName);
}
} catch (FileNotFoundException e) {
System.err.println("Could not open input file for reading. (" + e.getMessage() + ")");
throw new CommandProcessorException(3);
}
if ("mr".equals(HiveConf.getVar(conf, ConfVars.HIVE_EXECUTION_ENGINE))) {
console.printInfo(HiveConf.generateMrDeprecationWarning());
}
setupConsoleReader();
String line;
CommandProcessorResponse response = new CommandProcessorResponse();
StringBuilder prefix = new StringBuilder();
String curDB = getFormattedDb(conf, ss);
String curPrompt = prompt + curDB;
String dbSpaces = spacesForString(curDB);
while ((line = reader.readLine(curPrompt + "> ")) != null) {
if (!prefix.toString().equals("")) {
prefix.append('\n');
}
if (line.trim().startsWith("--")) {
continue;
}
if (line.trim().endsWith(";") && !line.trim().endsWith("\\;")) {
line = prefix + line;
response = cli.processLine(line, true);
prefix.setLength(0);
;
curDB = getFormattedDb(conf, ss);
curPrompt = prompt + curDB;
dbSpaces = dbSpaces.length() == curDB.length() ? dbSpaces : spacesForString(curDB);
} else {
prefix.append(line);
curPrompt = prompt2 + dbSpaces;
continue;
}
}
return response;
}
use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.
the class CliDriver method processLine.
/**
* Processes a line of semicolon separated commands
*
* @param line
* The commands to process
* @param allowInterrupting
* When true the function will handle SIG_INT (Ctrl+C) by interrupting the processing and
* returning -1
* @return 0 if ok
*/
public CommandProcessorResponse processLine(String line, boolean allowInterrupting) throws CommandProcessorException {
SignalHandler oldSignal = null;
Signal interruptSignal = null;
if (allowInterrupting) {
// Remember all threads that were running at the time we started line processing.
// Hook up the custom Ctrl+C handler while processing this line
interruptSignal = new Signal("INT");
oldSignal = Signal.handle(interruptSignal, new SignalHandler() {
private boolean interruptRequested;
@Override
public void handle(Signal signal) {
boolean initialRequest = !interruptRequested;
interruptRequested = true;
// Kill the VM on second ctrl+c
if (!initialRequest) {
console.printInfo("Exiting the JVM");
System.exit(127);
}
// Interrupt the CLI thread to stop the current statement and return
// to prompt
console.printInfo("Interrupting... Be patient, this might take some time.");
console.printInfo("Press Ctrl+C again to kill JVM");
// First, kill any running MR jobs
HadoopJobExecHelper.killRunningJobs();
TezJobExecHelper.killRunningJobs();
HiveInterruptUtils.interrupt();
}
});
}
try {
CommandProcessorResponse lastRet = new CommandProcessorResponse();
CommandProcessorResponse ret;
// we can not use "split" function directly as ";" may be quoted
List<String> commands = splitSemiColon(line);
StringBuilder command = new StringBuilder();
for (String oneCmd : commands) {
if (StringUtils.endsWith(oneCmd, "\\")) {
command.append(StringUtils.chop(oneCmd) + ";");
continue;
} else {
command.append(oneCmd);
}
if (StringUtils.isBlank(command.toString())) {
continue;
}
try {
ret = processCmd(command.toString());
lastRet = ret;
} catch (CommandProcessorException e) {
boolean ignoreErrors = HiveConf.getBoolVar(conf, HiveConf.ConfVars.CLIIGNOREERRORS);
if (!ignoreErrors) {
throw e;
}
} finally {
command.setLength(0);
}
}
return lastRet;
} finally {
// Once we are done processing the line, restore the old handler
if (oldSignal != null && interruptSignal != null) {
Signal.handle(interruptSignal, oldSignal);
}
}
}
Aggregations