use of org.apache.hadoop.hive.cli.CliSessionState in project SQLWindowing by hbutani.
the class WindowingHiveCliDriver method run.
public static int run(String[] args) throws Exception {
OptionsProcessor oproc = new OptionsProcessor();
if (!oproc.process_stage1(args)) {
return 1;
}
// NOTE: It is critical to do this here so that log4j is reinitialized
// before any of the other core hive classes are loaded
boolean logInitFailed = false;
String logInitDetailMessage;
try {
logInitDetailMessage = LogUtils.initHiveLog4j();
} catch (LogInitializationException e) {
logInitFailed = true;
logInitDetailMessage = e.getMessage();
}
CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class));
ss.in = System.in;
try {
ss.out = new PrintStream(System.out, true, "UTF-8");
ss.info = new PrintStream(System.err, true, "UTF-8");
ss.err = new CachingPrintStream(System.err, true, "UTF-8");
} catch (UnsupportedEncodingException e) {
return 3;
}
if (!oproc.process_stage2(ss)) {
return 2;
}
if (!ss.getIsSilent()) {
if (logInitFailed) {
System.err.println(logInitDetailMessage);
} else {
SessionState.getConsole().printInfo(logInitDetailMessage);
}
}
// set all properties specified via command line
HiveConf conf = ss.getConf();
for (Map.Entry<Object, Object> item : ss.cmdProperties.entrySet()) {
conf.set((String) item.getKey(), (String) item.getValue());
ss.getOverriddenConfigurations().put((String) item.getKey(), (String) item.getValue());
}
SessionState.start(ss);
// connect to Hive Server
if (ss.getHost() != null) {
ss.connect();
if (ss.isRemoteMode()) {
prompt = "[" + ss.getHost() + ':' + ss.getPort() + "] " + prompt;
char[] spaces = new char[prompt.length()];
Arrays.fill(spaces, ' ');
prompt2 = new String(spaces);
}
}
// CLI remote mode is a thin client: only load auxJars in local mode
if (!ss.isRemoteMode() && !ShimLoader.getHadoopShims().usesJobShell()) {
// hadoop-20 and above - we need to augment classpath using hiveconf
// components
// see also: code in ExecDriver.java
ClassLoader loader = conf.getClassLoader();
String auxJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS);
if (StringUtils.isNotBlank(auxJars)) {
loader = Utilities.addToClassPath(loader, StringUtils.split(auxJars, ","));
}
conf.setClassLoader(loader);
Thread.currentThread().setContextClassLoader(loader);
}
WindowingHiveCliDriver cli = new WindowingHiveCliDriver();
cli.setHiveVariables(oproc.getHiveVariables());
// use the specified database if specified
cli.processSelectDatabase(ss);
// Execute -i init files (always in silent mode)
cli.processInitFiles(ss);
cli.setupWindowing();
if (ss.execString != null) {
return cli.processLine(ss.execString);
}
try {
if (ss.fileName != null) {
return cli.processFile(ss.fileName);
}
} catch (FileNotFoundException e) {
System.err.println("Could not open input file for reading. (" + e.getMessage() + ")");
return 3;
}
ConsoleReader reader = new ConsoleReader();
reader.setBellEnabled(false);
// true)));
for (Completor completor : getCommandCompletor()) {
reader.addCompletor(completor);
}
String line;
final String HISTORYFILE = ".hivehistory";
String historyDirectory = System.getProperty("user.home");
try {
if ((new File(historyDirectory)).exists()) {
String historyFile = historyDirectory + File.separator + HISTORYFILE;
reader.setHistory(new History(new File(historyFile)));
} else {
System.err.println("WARNING: Directory for Hive history file: " + historyDirectory + " does not exist. History will not be available during this session.");
}
} catch (Exception e) {
System.err.println("WARNING: Encountered an error while trying to initialize Hive's " + "history file. History will not be available during this session.");
System.err.println(e.getMessage());
}
int ret = 0;
String prefix = "";
String curDB = getFormattedDb(conf, ss);
String curPrompt = prompt + curDB;
String dbSpaces = spacesForString(curDB);
while ((line = reader.readLine(curPrompt + "> ")) != null) {
if (!prefix.equals("")) {
prefix += '\n';
}
if (line.trim().endsWith(";") && !line.trim().endsWith("\\;")) {
line = prefix + line;
ret = cli.processLine(line, true);
prefix = "";
curDB = getFormattedDb(conf, ss);
curPrompt = prompt + curDB;
dbSpaces = dbSpaces.length() == curDB.length() ? dbSpaces : spacesForString(curDB);
} else {
prefix = prefix + line;
curPrompt = prompt2 + dbSpaces;
continue;
}
}
ss.close();
return ret;
}
use of org.apache.hadoop.hive.cli.CliSessionState in project hive by apache.
the class TestDbNotificationListener method connectToMetastore.
@SuppressWarnings("rawtypes")
@BeforeClass
public static void connectToMetastore() throws Exception {
HiveConf conf = new HiveConf();
conf.setVar(HiveConf.ConfVars.METASTORE_TRANSACTIONAL_EVENT_LISTENERS, DbNotificationListener.class.getName());
conf.setVar(HiveConf.ConfVars.METASTORE_EVENT_LISTENERS, MockMetaStoreEventListener.class.getName());
conf.setVar(HiveConf.ConfVars.METASTORE_EVENT_DB_LISTENER_TTL, String.valueOf(EVENTS_TTL) + "s");
conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
conf.setBoolVar(HiveConf.ConfVars.FIRE_EVENTS_FOR_DML, true);
conf.setVar(HiveConf.ConfVars.DYNAMICPARTITIONINGMODE, "nonstrict");
conf.setVar(HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL, DummyRawStoreFailEvent.class.getName());
Class dbNotificationListener = Class.forName("org.apache.hive.hcatalog.listener.DbNotificationListener");
Class[] classes = dbNotificationListener.getDeclaredClasses();
for (Class c : classes) {
if (c.getName().endsWith("CleanerThread")) {
Field sleepTimeField = c.getDeclaredField("sleepTime");
sleepTimeField.setAccessible(true);
sleepTimeField.set(null, CLEANUP_SLEEP_TIME * 1000);
}
}
conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
SessionState.start(new CliSessionState(conf));
msClient = new HiveMetaStoreClient(conf);
driver = DriverFactory.newDriver(conf);
md = MessageFactory.getInstance().getDeserializer();
bcompat = new ReplicationV1CompatRule(msClient, conf, testsToSkipForReplV1BackwardCompatTesting);
}
use of org.apache.hadoop.hive.cli.CliSessionState in project hive by apache.
the class TestSequenceFileReadWrite method setup.
@Before
public void setup() throws Exception {
dataDir = new File(System.getProperty("java.io.tmpdir") + File.separator + TestSequenceFileReadWrite.class.getCanonicalName() + "-" + System.currentTimeMillis());
hiveConf = new HiveConf(this.getClass());
warehouseDir = HCatUtil.makePathASafeFileName(dataDir + File.separator + "warehouse");
inputFileName = HCatUtil.makePathASafeFileName(dataDir + File.separator + "input.data");
hiveConf = new HiveConf(this.getClass());
hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, warehouseDir);
hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
driver = DriverFactory.newDriver(hiveConf);
SessionState.start(new CliSessionState(hiveConf));
if (!(new File(warehouseDir).mkdirs())) {
throw new RuntimeException("Could not create " + warehouseDir);
}
int numRows = 3;
input = new String[numRows];
for (int i = 0; i < numRows; i++) {
String col1 = "a" + i;
String col2 = "b" + i;
input[i] = i + "," + col1 + "," + col2;
}
HcatTestUtils.createTestDataFile(inputFileName, input);
server = new PigServer(ExecType.LOCAL);
}
use of org.apache.hadoop.hive.cli.CliSessionState in project hive by apache.
the class TestMetaStoreMetrics method before.
@BeforeClass
public static void before() throws Exception {
hiveConf = new HiveConf(TestMetaStoreMetrics.class);
hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
hiveConf.setBoolVar(HiveConf.ConfVars.METASTORE_METRICS, true);
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
// Increments one HMS connection
MetaStoreTestUtils.startMetaStoreWithRetry(hiveConf);
// Increments one HMS connection (Hive.get())
SessionState.start(new CliSessionState(hiveConf));
driver = DriverFactory.newDriver(hiveConf);
}
use of org.apache.hadoop.hive.cli.CliSessionState in project hive by apache.
the class TestMetastoreVersion method testVersionCompatibility.
/**
* Store higher version in metastore and verify that hive works with the compatible
* version
* @throws Exception
*/
public void testVersionCompatibility() throws Exception {
System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString(), "false");
hiveConf = new HiveConf(this.getClass());
SessionState.start(new CliSessionState(hiveConf));
driver = DriverFactory.newDriver(hiveConf);
driver.run("show tables");
System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString(), "true");
hiveConf = new HiveConf(this.getClass());
setVersion(hiveConf, "3.9000.0");
SessionState.start(new CliSessionState(hiveConf));
driver = DriverFactory.newDriver(hiveConf);
CommandProcessorResponse proc = driver.run("show tables");
assertEquals(0, proc.getResponseCode());
}
Aggregations