use of org.apache.hadoop.hive.common.LogUtils.LogInitializationException in project SQLWindowing by hbutani.
the class WindowingHiveCliDriver method run.
public static int run(String[] args) throws Exception {
OptionsProcessor oproc = new OptionsProcessor();
if (!oproc.process_stage1(args)) {
return 1;
}
// NOTE: It is critical to do this here so that log4j is reinitialized
// before any of the other core hive classes are loaded
boolean logInitFailed = false;
String logInitDetailMessage;
try {
logInitDetailMessage = LogUtils.initHiveLog4j();
} catch (LogInitializationException e) {
logInitFailed = true;
logInitDetailMessage = e.getMessage();
}
CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class));
ss.in = System.in;
try {
ss.out = new PrintStream(System.out, true, "UTF-8");
ss.info = new PrintStream(System.err, true, "UTF-8");
ss.err = new CachingPrintStream(System.err, true, "UTF-8");
} catch (UnsupportedEncodingException e) {
return 3;
}
if (!oproc.process_stage2(ss)) {
return 2;
}
if (!ss.getIsSilent()) {
if (logInitFailed) {
System.err.println(logInitDetailMessage);
} else {
SessionState.getConsole().printInfo(logInitDetailMessage);
}
}
// set all properties specified via command line
HiveConf conf = ss.getConf();
for (Map.Entry<Object, Object> item : ss.cmdProperties.entrySet()) {
conf.set((String) item.getKey(), (String) item.getValue());
ss.getOverriddenConfigurations().put((String) item.getKey(), (String) item.getValue());
}
SessionState.start(ss);
// connect to Hive Server
if (ss.getHost() != null) {
ss.connect();
if (ss.isRemoteMode()) {
prompt = "[" + ss.getHost() + ':' + ss.getPort() + "] " + prompt;
char[] spaces = new char[prompt.length()];
Arrays.fill(spaces, ' ');
prompt2 = new String(spaces);
}
}
// CLI remote mode is a thin client: only load auxJars in local mode
if (!ss.isRemoteMode() && !ShimLoader.getHadoopShims().usesJobShell()) {
// hadoop-20 and above - we need to augment classpath using hiveconf
// components
// see also: code in ExecDriver.java
ClassLoader loader = conf.getClassLoader();
String auxJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS);
if (StringUtils.isNotBlank(auxJars)) {
loader = Utilities.addToClassPath(loader, StringUtils.split(auxJars, ","));
}
conf.setClassLoader(loader);
Thread.currentThread().setContextClassLoader(loader);
}
WindowingHiveCliDriver cli = new WindowingHiveCliDriver();
cli.setHiveVariables(oproc.getHiveVariables());
// use the specified database if specified
cli.processSelectDatabase(ss);
// Execute -i init files (always in silent mode)
cli.processInitFiles(ss);
cli.setupWindowing();
if (ss.execString != null) {
return cli.processLine(ss.execString);
}
try {
if (ss.fileName != null) {
return cli.processFile(ss.fileName);
}
} catch (FileNotFoundException e) {
System.err.println("Could not open input file for reading. (" + e.getMessage() + ")");
return 3;
}
ConsoleReader reader = new ConsoleReader();
reader.setBellEnabled(false);
// true)));
for (Completor completor : getCommandCompletor()) {
reader.addCompletor(completor);
}
String line;
final String HISTORYFILE = ".hivehistory";
String historyDirectory = System.getProperty("user.home");
try {
if ((new File(historyDirectory)).exists()) {
String historyFile = historyDirectory + File.separator + HISTORYFILE;
reader.setHistory(new History(new File(historyFile)));
} else {
System.err.println("WARNING: Directory for Hive history file: " + historyDirectory + " does not exist. History will not be available during this session.");
}
} catch (Exception e) {
System.err.println("WARNING: Encountered an error while trying to initialize Hive's " + "history file. History will not be available during this session.");
System.err.println(e.getMessage());
}
int ret = 0;
String prefix = "";
String curDB = getFormattedDb(conf, ss);
String curPrompt = prompt + curDB;
String dbSpaces = spacesForString(curDB);
while ((line = reader.readLine(curPrompt + "> ")) != null) {
if (!prefix.equals("")) {
prefix += '\n';
}
if (line.trim().endsWith(";") && !line.trim().endsWith("\\;")) {
line = prefix + line;
ret = cli.processLine(line, true);
prefix = "";
curDB = getFormattedDb(conf, ss);
curPrompt = prompt + curDB;
dbSpaces = dbSpaces.length() == curDB.length() ? dbSpaces : spacesForString(curDB);
} else {
prefix = prefix + line;
curPrompt = prompt2 + dbSpaces;
continue;
}
}
ss.close();
return ret;
}
use of org.apache.hadoop.hive.common.LogUtils.LogInitializationException in project hive by apache.
the class HCatCli method main.
@SuppressWarnings("static-access")
public static void main(String[] args) {
try {
LogUtils.initHiveLog4j();
} catch (LogInitializationException e) {
}
LOG = LoggerFactory.getLogger(HCatCli.class);
CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class));
ss.in = System.in;
try {
ss.out = new SessionStream(System.out, true, "UTF-8");
ss.err = new SessionStream(System.err, true, "UTF-8");
} catch (UnsupportedEncodingException e) {
System.exit(1);
}
HiveConf conf = ss.getConf();
HiveConf.setVar(conf, ConfVars.SEMANTIC_ANALYZER_HOOK, HCatSemanticAnalyzer.class.getName());
String engine = HiveConf.getVar(conf, ConfVars.HIVE_EXECUTION_ENGINE);
final String MR_ENGINE = "mr";
if (!MR_ENGINE.equalsIgnoreCase(engine)) {
HiveConf.setVar(conf, ConfVars.HIVE_EXECUTION_ENGINE, MR_ENGINE);
LOG.info("Forcing " + ConfVars.HIVE_EXECUTION_ENGINE + " to " + MR_ENGINE);
}
Options options = new Options();
// -e 'quoted-query-string'
options.addOption(OptionBuilder.hasArg().withArgName("exec").withDescription("hcat command given from command line").create('e'));
// -f <query-file>
options.addOption(OptionBuilder.hasArg().withArgName("file").withDescription("hcat commands in file").create('f'));
// -g
options.addOption(OptionBuilder.hasArg().withArgName("group").withDescription("group for the db/table specified in CREATE statement").create('g'));
// -p
options.addOption(OptionBuilder.hasArg().withArgName("perms").withDescription("permissions for the db/table specified in CREATE statement").create('p'));
// -D
options.addOption(OptionBuilder.hasArgs(2).withArgName("property=value").withValueSeparator().withDescription("use hadoop value for given property").create('D'));
// [-h|--help]
options.addOption(new Option("h", "help", false, "Print help information"));
Parser parser = new GnuParser();
CommandLine cmdLine = null;
try {
cmdLine = parser.parse(options, args);
} catch (ParseException e) {
printUsage(options, System.err);
// Note, we print to System.err instead of ss.err, because if we can't parse our
// commandline, we haven't even begun, and therefore cannot be expected to have
// reasonably constructed or started the SessionState.
System.exit(1);
}
// -D : process these first, so that we can instantiate SessionState appropriately.
setConfProperties(conf, cmdLine.getOptionProperties("D"));
// -h
if (cmdLine.hasOption('h')) {
printUsage(options, ss.out);
sysExit(ss, 0);
}
// -e
String execString = (String) cmdLine.getOptionValue('e');
// -f
String fileName = (String) cmdLine.getOptionValue('f');
if (execString != null && fileName != null) {
ss.err.println("The '-e' and '-f' options cannot be specified simultaneously");
printUsage(options, ss.err);
sysExit(ss, 1);
}
// -p
String perms = (String) cmdLine.getOptionValue('p');
if (perms != null) {
validatePermissions(ss, conf, perms);
}
// -g
String grp = (String) cmdLine.getOptionValue('g');
if (grp != null) {
conf.set(HCatConstants.HCAT_GROUP, grp);
}
// Now that the properties are in, we can instantiate SessionState.
SessionState.start(ss);
// all done parsing, let's run stuff!
if (execString != null) {
// remove the leading and trailing quotes. hcatalog can miss on some cases.
if (execString.length() > 1 && execString.startsWith("\"") && execString.endsWith("\"")) {
execString = execString.substring(1, execString.length() - 1);
}
sysExit(ss, processLine(execString));
}
try {
if (fileName != null) {
sysExit(ss, processFile(fileName));
}
} catch (FileNotFoundException e) {
ss.err.println("Input file not found. (" + e.getMessage() + ")");
sysExit(ss, 1);
} catch (IOException e) {
ss.err.println("Could not open input file for reading. (" + e.getMessage() + ")");
sysExit(ss, 1);
}
// -h
printUsage(options, ss.err);
sysExit(ss, 1);
}
use of org.apache.hadoop.hive.common.LogUtils.LogInitializationException in project hive by apache.
the class TestHiveHistory method testSimpleQuery.
/**
* Check history file output for this query.
*/
@Test
public void testSimpleQuery() {
new LineageInfo();
try {
// before any of the other core hive classes are loaded
try {
LogUtils.initHiveLog4j();
} catch (LogInitializationException e) {
}
HiveConf hconf = new HiveConf(SessionState.class);
hconf.setBoolVar(ConfVars.HIVE_SESSION_HISTORY_ENABLED, true);
CliSessionState ss = new CliSessionState(hconf);
ss.in = System.in;
try {
ss.out = new SessionStream(System.out, true, "UTF-8");
ss.err = new SessionStream(System.err, true, "UTF-8");
} catch (UnsupportedEncodingException e) {
System.exit(3);
}
SessionState.start(ss);
String cmd = "select a.key+1 from src a";
IDriver d = DriverFactory.newDriver(conf);
d.run(cmd);
HiveHistoryViewer hv = new HiveHistoryViewer(SessionState.get().getHiveHistory().getHistFileName());
Map<String, QueryInfo> jobInfoMap = hv.getJobInfoMap();
Map<String, TaskInfo> taskInfoMap = hv.getTaskInfoMap();
if (jobInfoMap.size() != 1) {
fail("jobInfo Map size not 1");
}
if (taskInfoMap.size() != 1) {
fail("jobInfo Map size not 1");
}
cmd = (String) jobInfoMap.keySet().toArray()[0];
QueryInfo ji = jobInfoMap.get(cmd);
if (!ji.hm.get(Keys.QUERY_NUM_TASKS.name()).equals("1")) {
fail("Wrong number of tasks");
}
} catch (Exception e) {
e.printStackTrace();
fail("Failed");
}
}
use of org.apache.hadoop.hive.common.LogUtils.LogInitializationException in project hive by apache.
the class CliDriver method run.
public int run(String[] args) throws Exception {
OptionsProcessor oproc = new OptionsProcessor();
if (!oproc.process_stage1(args)) {
return 1;
}
// NOTE: It is critical to do this here so that log4j is reinitialized
// before any of the other core hive classes are loaded
boolean logInitFailed = false;
String logInitDetailMessage;
try {
logInitDetailMessage = LogUtils.initHiveLog4j();
} catch (LogInitializationException e) {
logInitFailed = true;
logInitDetailMessage = e.getMessage();
}
CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class));
ss.in = System.in;
try {
ss.out = new SessionStream(System.out, true, StandardCharsets.UTF_8.name());
ss.info = new SessionStream(System.err, true, StandardCharsets.UTF_8.name());
ss.err = new CachingPrintStream(System.err, true, StandardCharsets.UTF_8.name());
} catch (UnsupportedEncodingException e) {
return 3;
}
if (!oproc.process_stage2(ss)) {
return 2;
}
if (!ss.getIsSilent()) {
if (logInitFailed) {
System.err.println(logInitDetailMessage);
} else {
SessionState.getConsole().printInfo(logInitDetailMessage);
}
}
// set all properties specified via command line
HiveConf conf = ss.getConf();
for (Map.Entry<Object, Object> item : ss.cmdProperties.entrySet()) {
conf.set((String) item.getKey(), (String) item.getValue());
ss.getOverriddenConfigurations().put((String) item.getKey(), (String) item.getValue());
}
// read prompt configuration and substitute variables.
prompt = conf.getVar(HiveConf.ConfVars.CLIPROMPT);
prompt = new VariableSubstitution(new HiveVariableSource() {
@Override
public Map<String, String> getHiveVariable() {
return SessionState.get().getHiveVariables();
}
}).substitute(conf, prompt);
prompt2 = spacesForString(prompt);
if (HiveConf.getBoolVar(conf, ConfVars.HIVE_CLI_TEZ_SESSION_ASYNC)) {
// Start the session in a fire-and-forget manner. When the asynchronously initialized parts of
// the session are needed, the corresponding getters and other methods will wait as needed.
SessionState.beginStart(ss, console);
} else {
SessionState.start(ss);
}
ss.updateThreadName();
// Initialize metadata provider class and trimmer
CalcitePlanner.warmup();
// Create views registry
HiveMaterializedViewsRegistry.get().init();
// init metastore client cache
if (HiveConf.getBoolVar(conf, ConfVars.MSC_CACHE_ENABLED)) {
HiveMetaStoreClientWithLocalCache.init(conf);
}
// execute cli driver work
try {
executeDriver(ss, conf, oproc);
return 0;
} catch (CommandProcessorException e) {
return e.getResponseCode();
} finally {
ss.resetThreadName();
ss.close();
}
}
use of org.apache.hadoop.hive.common.LogUtils.LogInitializationException in project hive by apache.
the class ClearDanglingScratchDir method main.
public static void main(String[] args) throws Exception {
try {
LogUtils.initHiveLog4j();
} catch (LogInitializationException e) {
}
Options opts = createOptions();
CommandLine cli = new GnuParser().parse(opts, args);
if (cli.hasOption('h')) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("cleardanglingscratchdir" + " (clear scratch dir left behind by dead HiveCli or HiveServer2)", opts);
return;
}
boolean dryRun = false;
boolean verbose = false;
if (cli.hasOption("r")) {
dryRun = true;
SessionState.getConsole().printInfo("dry-run mode on");
}
if (cli.hasOption("v")) {
verbose = true;
}
HiveConf conf = new HiveConf();
String rootHDFSDir;
if (cli.hasOption("s")) {
rootHDFSDir = cli.getOptionValue("s");
} else {
rootHDFSDir = HiveConf.getVar(conf, HiveConf.ConfVars.SCRATCHDIR);
}
ClearDanglingScratchDir clearDanglingScratchDirMain = new ClearDanglingScratchDir(dryRun, verbose, true, rootHDFSDir, conf);
clearDanglingScratchDirMain.run();
}
Aggregations