use of org.apache.hadoop.hive.common.io.SessionStream in project hive by apache.
the class CliDriver method run.
public int run(String[] args) throws Exception {
OptionsProcessor oproc = new OptionsProcessor();
if (!oproc.process_stage1(args)) {
return 1;
}
// NOTE: It is critical to do this here so that log4j is reinitialized
// before any of the other core hive classes are loaded
boolean logInitFailed = false;
String logInitDetailMessage;
try {
logInitDetailMessage = LogUtils.initHiveLog4j();
} catch (LogInitializationException e) {
logInitFailed = true;
logInitDetailMessage = e.getMessage();
}
CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class));
ss.in = System.in;
try {
ss.out = new SessionStream(System.out, true, StandardCharsets.UTF_8.name());
ss.info = new SessionStream(System.err, true, StandardCharsets.UTF_8.name());
ss.err = new CachingPrintStream(System.err, true, StandardCharsets.UTF_8.name());
} catch (UnsupportedEncodingException e) {
return 3;
}
if (!oproc.process_stage2(ss)) {
return 2;
}
if (!ss.getIsSilent()) {
if (logInitFailed) {
System.err.println(logInitDetailMessage);
} else {
SessionState.getConsole().printInfo(logInitDetailMessage);
}
}
// set all properties specified via command line
HiveConf conf = ss.getConf();
for (Map.Entry<Object, Object> item : ss.cmdProperties.entrySet()) {
conf.set((String) item.getKey(), (String) item.getValue());
ss.getOverriddenConfigurations().put((String) item.getKey(), (String) item.getValue());
}
// read prompt configuration and substitute variables.
prompt = conf.getVar(HiveConf.ConfVars.CLIPROMPT);
prompt = new VariableSubstitution(new HiveVariableSource() {
@Override
public Map<String, String> getHiveVariable() {
return SessionState.get().getHiveVariables();
}
}).substitute(conf, prompt);
prompt2 = spacesForString(prompt);
if (HiveConf.getBoolVar(conf, ConfVars.HIVE_CLI_TEZ_SESSION_ASYNC)) {
// Start the session in a fire-and-forget manner. When the asynchronously initialized parts of
// the session are needed, the corresponding getters and other methods will wait as needed.
SessionState.beginStart(ss, console);
} else {
SessionState.start(ss);
}
ss.updateThreadName();
// Initialize metadata provider class and trimmer
CalcitePlanner.warmup();
// Create views registry
HiveMaterializedViewsRegistry.get().init();
// init metastore client cache
if (HiveConf.getBoolVar(conf, ConfVars.MSC_CACHE_ENABLED)) {
HiveMetaStoreClientWithLocalCache.init(conf);
}
// execute cli driver work
try {
executeDriver(ss, conf, oproc);
return 0;
} catch (CommandProcessorException e) {
return e.getResponseCode();
} finally {
ss.resetThreadName();
ss.close();
}
}
use of org.apache.hadoop.hive.common.io.SessionStream in project hive by apache.
the class TestCliDriverMethods method headerPrintingTestDriver.
/**
* Do the actual testing against a mocked CliDriver based on what type of schema
*
* @param mockSchema
* Schema to throw against test
* @return Output that would have been sent to the user
* @throws CommandProcessorException
* @throws CommandNeedRetryException
* won't actually be thrown
*/
private PrintStream headerPrintingTestDriver(Schema mockSchema) throws CommandProcessorException {
CliDriver cliDriver = new CliDriver();
// We want the driver to try to print the header...
Configuration conf = mock(Configuration.class);
when(conf.getBoolean(eq(ConfVars.HIVE_CLI_PRINT_HEADER.varname), anyBoolean())).thenReturn(true);
cliDriver.setConf(conf);
IDriver proc = mock(IDriver.class);
CommandProcessorResponse cpr = mock(CommandProcessorResponse.class);
QueryState queryState = new QueryState.Builder().withGenerateNewQueryId(true).build();
when(proc.run(anyString())).thenReturn(cpr);
when(proc.getQueryState()).thenReturn(queryState);
// and then see what happens based on the provided schema
when(proc.getSchema()).thenReturn(mockSchema);
CliSessionState mockSS = mock(CliSessionState.class);
SessionStream mockOut = mock(SessionStream.class);
mockSS.out = mockOut;
cliDriver.processLocalCmd("use default;", proc, mockSS);
return mockOut;
}
use of org.apache.hadoop.hive.common.io.SessionStream in project hive by apache.
the class HCatCli method main.
@SuppressWarnings("static-access")
public static void main(String[] args) {
try {
LogUtils.initHiveLog4j();
} catch (LogInitializationException e) {
}
LOG = LoggerFactory.getLogger(HCatCli.class);
CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class));
ss.in = System.in;
try {
ss.out = new SessionStream(System.out, true, "UTF-8");
ss.err = new SessionStream(System.err, true, "UTF-8");
} catch (UnsupportedEncodingException e) {
System.exit(1);
}
HiveConf conf = ss.getConf();
HiveConf.setVar(conf, ConfVars.SEMANTIC_ANALYZER_HOOK, HCatSemanticAnalyzer.class.getName());
String engine = HiveConf.getVar(conf, ConfVars.HIVE_EXECUTION_ENGINE);
final String MR_ENGINE = "mr";
if (!MR_ENGINE.equalsIgnoreCase(engine)) {
HiveConf.setVar(conf, ConfVars.HIVE_EXECUTION_ENGINE, MR_ENGINE);
LOG.info("Forcing " + ConfVars.HIVE_EXECUTION_ENGINE + " to " + MR_ENGINE);
}
Options options = new Options();
// -e 'quoted-query-string'
options.addOption(OptionBuilder.hasArg().withArgName("exec").withDescription("hcat command given from command line").create('e'));
// -f <query-file>
options.addOption(OptionBuilder.hasArg().withArgName("file").withDescription("hcat commands in file").create('f'));
// -g
options.addOption(OptionBuilder.hasArg().withArgName("group").withDescription("group for the db/table specified in CREATE statement").create('g'));
// -p
options.addOption(OptionBuilder.hasArg().withArgName("perms").withDescription("permissions for the db/table specified in CREATE statement").create('p'));
// -D
options.addOption(OptionBuilder.hasArgs(2).withArgName("property=value").withValueSeparator().withDescription("use hadoop value for given property").create('D'));
// [-h|--help]
options.addOption(new Option("h", "help", false, "Print help information"));
Parser parser = new GnuParser();
CommandLine cmdLine = null;
try {
cmdLine = parser.parse(options, args);
} catch (ParseException e) {
printUsage(options, System.err);
// Note, we print to System.err instead of ss.err, because if we can't parse our
// commandline, we haven't even begun, and therefore cannot be expected to have
// reasonably constructed or started the SessionState.
System.exit(1);
}
// -D : process these first, so that we can instantiate SessionState appropriately.
setConfProperties(conf, cmdLine.getOptionProperties("D"));
// -h
if (cmdLine.hasOption('h')) {
printUsage(options, ss.out);
sysExit(ss, 0);
}
// -e
String execString = (String) cmdLine.getOptionValue('e');
// -f
String fileName = (String) cmdLine.getOptionValue('f');
if (execString != null && fileName != null) {
ss.err.println("The '-e' and '-f' options cannot be specified simultaneously");
printUsage(options, ss.err);
sysExit(ss, 1);
}
// -p
String perms = (String) cmdLine.getOptionValue('p');
if (perms != null) {
validatePermissions(ss, conf, perms);
}
// -g
String grp = (String) cmdLine.getOptionValue('g');
if (grp != null) {
conf.set(HCatConstants.HCAT_GROUP, grp);
}
// Now that the properties are in, we can instantiate SessionState.
SessionState.start(ss);
// all done parsing, let's run stuff!
if (execString != null) {
// remove the leading and trailing quotes. hcatalog can miss on some cases.
if (execString.length() > 1 && execString.startsWith("\"") && execString.endsWith("\"")) {
execString = execString.substring(1, execString.length() - 1);
}
sysExit(ss, processLine(execString));
}
try {
if (fileName != null) {
sysExit(ss, processFile(fileName));
}
} catch (FileNotFoundException e) {
ss.err.println("Input file not found. (" + e.getMessage() + ")");
sysExit(ss, 1);
} catch (IOException e) {
ss.err.println("Could not open input file for reading. (" + e.getMessage() + ")");
sysExit(ss, 1);
}
// -h
printUsage(options, ss.err);
sysExit(ss, 1);
}
use of org.apache.hadoop.hive.common.io.SessionStream in project hive by apache.
the class HiveCommandOperation method setupSessionIO.
private void setupSessionIO(SessionState sessionState) {
try {
log.info("Putting temp output to file " + sessionState.getTmpOutputFile() + " and error output to file " + sessionState.getTmpErrOutputFile());
// hive server's session input stream is not used
sessionState.in = null;
// open a per-session file in auto-flush mode for writing temp results and tmp error output
sessionState.out = new SessionStream(new FileOutputStream(sessionState.getTmpOutputFile()), true, StandardCharsets.UTF_8.name());
sessionState.err = new SessionStream(new FileOutputStream(sessionState.getTmpErrOutputFile()), true, StandardCharsets.UTF_8.name());
} catch (IOException e) {
log.error("Error in creating temp output file", e);
// Close file streams to avoid resource leaking
ServiceUtils.cleanup(log, parentSession.getSessionState().out, parentSession.getSessionState().err);
try {
sessionState.in = null;
sessionState.out = new SessionStream(System.out, true, StandardCharsets.UTF_8.name());
sessionState.err = new SessionStream(System.err, true, StandardCharsets.UTF_8.name());
} catch (UnsupportedEncodingException ee) {
log.error("Error creating PrintStream", e);
sessionState.out = null;
sessionState.err = null;
}
}
}
Aggregations