use of org.apache.hadoop.hive.cli.CliSessionState in project hive by apache.
the class TestReplicationScenarios method setUpBeforeClass.
// if verifySetup is set to true, all the test setup we do will perform additional
// verifications as well, which is useful to verify that our setup occurred
// correctly when developing and debugging tests. These verifications, however
// do not test any new functionality for replication, and thus, are not relevant
// for testing replication itself. For steady state, we want this to be false.
@BeforeClass
public static void setUpBeforeClass() throws Exception {
hconf = new HiveConf(TestReplicationScenarios.class);
String metastoreUri = System.getProperty("test." + HiveConf.ConfVars.METASTOREURIS.varname);
if (metastoreUri != null) {
hconf.setVar(HiveConf.ConfVars.METASTOREURIS, metastoreUri);
useExternalMS = true;
return;
}
hconf.setVar(HiveConf.ConfVars.METASTORE_TRANSACTIONAL_EVENT_LISTENERS, // turn on db notification listener on metastore
DBNOTIF_LISTENER_CLASSNAME);
hconf.setBoolVar(HiveConf.ConfVars.REPLCMENABLED, true);
hconf.setBoolVar(HiveConf.ConfVars.FIRE_EVENTS_FOR_DML, true);
hconf.setVar(HiveConf.ConfVars.REPLCMDIR, TEST_PATH + "/cmroot/");
msPort = MetaStoreUtils.startMetaStore(hconf);
hconf.setVar(HiveConf.ConfVars.REPLDIR, TEST_PATH + "/hrepl/");
hconf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + msPort);
hconf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
hconf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
hconf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
hconf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " ");
System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " ");
Path testPath = new Path(TEST_PATH);
FileSystem fs = FileSystem.get(testPath.toUri(), hconf);
fs.mkdirs(testPath);
driver = new Driver(hconf);
SessionState.start(new CliSessionState(hconf));
metaStoreClient = new HiveMetaStoreClient(hconf);
}
use of org.apache.hadoop.hive.cli.CliSessionState in project hive by apache.
the class FolderPermissionBase method baseSetup.
public static void baseSetup() throws Exception {
MiniDFSShim dfs = ShimLoader.getHadoopShims().getMiniDfs(conf, 4, true, null);
fs = dfs.getFileSystem();
baseDfsDir = new Path(new Path(fs.getUri()), "/base");
fs.mkdirs(baseDfsDir);
warehouseDir = new Path(baseDfsDir, "warehouse");
fs.mkdirs(warehouseDir);
conf.setVar(ConfVars.METASTOREWAREHOUSE, warehouseDir.toString());
// Assuming the tests are run either in C or D drive in Windows OS!
dataFileDir = conf.get("test.data.files").replace('\\', '/').replace("c:", "").replace("C:", "").replace("D:", "").replace("d:", "");
dataFilePath = new Path(dataFileDir, "kv1.txt");
// Set up scratch directory
Path scratchDir = new Path(baseDfsDir, "scratchdir");
conf.setVar(HiveConf.ConfVars.SCRATCHDIR, scratchDir.toString());
//set hive conf vars
conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
conf.setBoolVar(HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS, true);
conf.setVar(HiveConf.ConfVars.DYNAMICPARTITIONINGMODE, "nonstrict");
int port = MetaStoreUtils.findFreePort();
MetaStoreUtils.startMetaStore(port, ShimLoader.getHadoopThriftAuthBridge());
SessionState.start(new CliSessionState(conf));
driver = new Driver(conf);
setupDataTable();
}
use of org.apache.hadoop.hive.cli.CliSessionState in project phoenix by apache.
the class HiveTestUtil method startSessionState.
private CliSessionState startSessionState() throws IOException {
HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER, "org.apache.hadoop.hive.ql.security.HadoopDefaultAuthenticator");
String execEngine = conf.get("hive.execution.engine");
conf.set("hive.execution.engine", "mr");
CliSessionState ss = new CliSessionState(conf);
assert ss != null;
ss.in = System.in;
ss.out = System.out;
ss.err = System.out;
SessionState oldSs = SessionState.get();
if (oldSs != null && clusterType == MiniClusterType.tez) {
oldSs.close();
}
if (oldSs != null && oldSs.out != null && oldSs.out != System.out) {
oldSs.out.close();
}
SessionState.start(ss);
isSessionStateStarted = true;
conf.set("hive.execution.engine", execEngine);
return ss;
}
use of org.apache.hadoop.hive.cli.CliSessionState in project phoenix by apache.
the class HiveTestUtil method cliInit.
public String cliInit(String tname, boolean recreate) throws Exception {
if (recreate) {
cleanUp();
createSources();
}
HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER, "org.apache.hadoop.hive.ql.security.HadoopDefaultAuthenticator");
Utilities.clearWorkMap();
CliSessionState ss = new CliSessionState(conf);
assert ss != null;
ss.in = System.in;
String outFileExtension = getOutFileExtension(tname);
String stdoutName = null;
if (outDir != null) {
File qf = new File(outDir, tname);
stdoutName = qf.getName().concat(outFileExtension);
} else {
stdoutName = tname + outFileExtension;
}
File outf = new File(logDir, stdoutName);
OutputStream fo = new BufferedOutputStream(new FileOutputStream(outf));
if (qSortQuerySet.contains(tname)) {
ss.out = new SortPrintStream(fo, "UTF-8");
} else if (qHashQuerySet.contains(tname)) {
ss.out = new DigestPrintStream(fo, "UTF-8");
} else if (qSortNHashQuerySet.contains(tname)) {
ss.out = new SortAndDigestPrintStream(fo, "UTF-8");
} else {
ss.out = new PrintStream(fo, true, "UTF-8");
}
ss.err = new CachingPrintStream(fo, true, "UTF-8");
ss.setIsSilent(true);
SessionState oldSs = SessionState.get();
if (oldSs != null && clusterType == MiniClusterType.tez) {
oldSs.close();
}
if (oldSs != null && oldSs.out != null && oldSs.out != System.out) {
oldSs.out.close();
}
SessionState.start(ss);
cliDriver = new CliDriver();
cliDriver.processInitFiles(ss);
return outf.getAbsolutePath();
}
use of org.apache.hadoop.hive.cli.CliSessionState in project ambrose by twitter.
the class AmbroseHiveFinishHook method getLastCmd.
private String getLastCmd() {
CliSessionState cliss = (CliSessionState) SessionState.get();
Scanner scanner = null;
try {
scanner = new Scanner(new File(cliss.fileName));
} catch (FileNotFoundException e) {
LOG.error("Can't find Hive script", e);
}
if (scanner == null) {
return null;
}
Pattern delim = Pattern.compile(";");
scanner.useDelimiter(delim);
String lastLine = null;
while (scanner.hasNext()) {
String line = StringUtils.trim(scanner.next().replaceAll("\\n", ""));
if (line.length() != 0 && !line.startsWith("--")) {
lastLine = line;
}
}
return lastLine;
}
Aggregations