use of org.apache.hadoop.hive.cli.CliDriver in project hive by apache.
the class JavaAction method main.
public static void main(String[] args) throws Exception {
HiveConf conf = new HiveConf();
conf.addResource(new Path("file:///", System.getProperty("oozie.action.conf.xml")));
conf.setVar(ConfVars.SEMANTIC_ANALYZER_HOOK, HCatSemanticAnalyzer.class.getName());
conf.setBoolVar(ConfVars.METASTORE_USE_THRIFT_SASL, true);
SessionState.start(new CliSessionState(conf));
new CliDriver().processLine(args[0]);
}
use of org.apache.hadoop.hive.cli.CliDriver in project hive by apache.
the class QTestUtil method cliInit.
public String cliInit(String tname, boolean recreate) throws Exception {
if (recreate) {
cleanUp(tname);
createSources(tname);
}
HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER, "org.apache.hadoop.hive.ql.security.DummyAuthenticator");
Utilities.clearWorkMap(conf);
CliSessionState ss = createSessionState();
assert ss != null;
ss.in = System.in;
String outFileExtension = getOutFileExtension(tname);
String stdoutName = null;
if (outDir != null) {
// TODO: why is this needed?
File qf = new File(outDir, tname);
stdoutName = qf.getName().concat(outFileExtension);
} else {
stdoutName = tname + outFileExtension;
}
File outf = new File(logDir, stdoutName);
OutputStream fo = new BufferedOutputStream(new FileOutputStream(outf));
if (qSortQuerySet.contains(tname)) {
ss.out = new SortPrintStream(fo, "UTF-8");
} else if (qHashQuerySet.contains(tname)) {
ss.out = new DigestPrintStream(fo, "UTF-8");
} else if (qSortNHashQuerySet.contains(tname)) {
ss.out = new SortAndDigestPrintStream(fo, "UTF-8");
} else {
ss.out = new PrintStream(fo, true, "UTF-8");
}
ss.err = new CachingPrintStream(fo, true, "UTF-8");
ss.setIsSilent(true);
SessionState oldSs = SessionState.get();
boolean canReuseSession = !qNoSessionReuseQuerySet.contains(tname);
if (oldSs != null && canReuseSession && clusterType.getCoreClusterType() == CoreClusterType.TEZ) {
// Copy the tezSessionState from the old CliSessionState.
tezSessionState = oldSs.getTezSession();
oldSs.setTezSession(null);
ss.setTezSession(tezSessionState);
oldSs.close();
}
if (oldSs != null && clusterType.getCoreClusterType() == CoreClusterType.SPARK) {
sparkSession = oldSs.getSparkSession();
ss.setSparkSession(sparkSession);
oldSs.setSparkSession(null);
oldSs.close();
}
if (oldSs != null && oldSs.out != null && oldSs.out != System.out) {
oldSs.out.close();
}
SessionState.start(ss);
cliDriver = new CliDriver();
if (tname.equals("init_file.q")) {
ss.initFiles.add(AbstractCliConfig.HIVE_ROOT + "/data/scripts/test_init_file.sql");
}
cliDriver.processInitFiles(ss);
return outf.getAbsolutePath();
}
use of org.apache.hadoop.hive.cli.CliDriver in project hive by apache.
the class QTestUtil method createSources.
public void createSources(String tname) throws Exception {
boolean canReuseSession = (tname == null) || !qNoSessionReuseQuerySet.contains(tname);
if (!isSessionStateStarted) {
startSessionState(canReuseSession);
}
if (cliDriver == null) {
cliDriver = new CliDriver();
}
cliDriver.processLine("set test.data.dir=" + testFiles + ";");
File scriptFile = new File(this.initScript);
if (!scriptFile.isFile()) {
LOG.info("No init script detected. Skipping");
return;
}
conf.setBoolean("hive.test.init.phase", true);
String initCommands = readEntireFileIntoString(scriptFile);
LOG.info("Initial setup (" + initScript + "):\n" + initCommands);
int result = cliDriver.processLine(initCommands);
LOG.info("Result from cliDrriver.processLine in createSources=" + result);
if (result != 0) {
Assert.fail("Failed during createSources processLine with code=" + result);
}
conf.setBoolean("hive.test.init.phase", false);
}
use of org.apache.hadoop.hive.cli.CliDriver in project hive by apache.
the class QTestUtil method cleanUp.
public void cleanUp(String tname) throws Exception {
boolean canReuseSession = (tname == null) || !qNoSessionReuseQuerySet.contains(tname);
if (!isSessionStateStarted) {
startSessionState(canReuseSession);
}
if (System.getenv(QTEST_LEAVE_FILES) != null) {
return;
}
clearTablesCreatedDuringTests();
clearUDFsCreatedDuringTests();
clearKeysCreatedInTests();
File cleanupFile = new File(cleanupScript);
if (cleanupFile.isFile()) {
String cleanupCommands = readEntireFileIntoString(cleanupFile);
LOG.info("Cleanup (" + cleanupScript + "):\n" + cleanupCommands);
if (cliDriver == null) {
cliDriver = new CliDriver();
}
SessionState.get().getConf().setBoolean("hive.test.shutdown.phase", true);
int result = cliDriver.processLine(cleanupCommands);
if (result != 0) {
LOG.error("Failed during cleanup processLine with code={}. Ignoring", result);
// TODO Convert this to an Assert.fail once HIVE-14682 is fixed
}
SessionState.get().getConf().setBoolean("hive.test.shutdown.phase", false);
} else {
LOG.info("No cleanup script detected. Skipping.");
}
// delete any contents in the warehouse dir
Path p = new Path(testWarehouse);
FileSystem fs = p.getFileSystem(conf);
try {
FileStatus[] ls = fs.listStatus(p);
for (int i = 0; (ls != null) && (i < ls.length); i++) {
fs.delete(ls[i].getPath(), true);
}
} catch (FileNotFoundException e) {
// Best effort
}
// TODO: Clean up all the other paths that are created.
FunctionRegistry.unregisterTemporaryUDF("test_udaf");
FunctionRegistry.unregisterTemporaryUDF("test_error");
}
use of org.apache.hadoop.hive.cli.CliDriver in project phoenix by apache.
the class HiveTestUtil method cliInit.
public String cliInit(String tname, boolean recreate) throws Exception {
if (recreate) {
cleanUp();
createSources();
}
HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER, "org.apache.hadoop.hive.ql.security.HadoopDefaultAuthenticator");
Utilities.clearWorkMap();
CliSessionState ss = new CliSessionState(conf);
assert ss != null;
ss.in = System.in;
String outFileExtension = getOutFileExtension(tname);
String stdoutName = null;
if (outDir != null) {
File qf = new File(outDir, tname);
stdoutName = qf.getName().concat(outFileExtension);
} else {
stdoutName = tname + outFileExtension;
}
File outf = new File(logDir, stdoutName);
OutputStream fo = new BufferedOutputStream(new FileOutputStream(outf));
if (qSortQuerySet.contains(tname)) {
ss.out = new SortPrintStream(fo, "UTF-8");
} else if (qHashQuerySet.contains(tname)) {
ss.out = new DigestPrintStream(fo, "UTF-8");
} else if (qSortNHashQuerySet.contains(tname)) {
ss.out = new SortAndDigestPrintStream(fo, "UTF-8");
} else {
ss.out = new PrintStream(fo, true, "UTF-8");
}
ss.err = new CachingPrintStream(fo, true, "UTF-8");
ss.setIsSilent(true);
SessionState oldSs = SessionState.get();
if (oldSs != null && clusterType == MiniClusterType.tez) {
oldSs.close();
}
if (oldSs != null && oldSs.out != null && oldSs.out != System.out) {
oldSs.out.close();
}
SessionState.start(ss);
cliDriver = new CliDriver();
cliDriver.processInitFiles(ss);
return outf.getAbsolutePath();
}
Aggregations