use of org.apache.hadoop.hive.cli.CliSessionState in project hive by apache.
the class TestMetastoreVersion method testVersionMatching.
/**
* Test that with verification enabled, hive works when the correct schema is already populated
* @throws Exception
*/
@Test
public void testVersionMatching() throws Exception {
System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString(), "false");
hiveConf = new HiveConf(this.getClass());
SessionState.start(new CliSessionState(hiveConf));
driver = DriverFactory.newDriver(hiveConf);
try {
driver.run("show tables");
assert false;
} catch (CommandProcessorException e) {
// this is expected
}
ObjectStore.setSchemaVerified(false);
hiveConf.setBoolVar(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION, true);
hiveConf = new HiveConf(this.getClass());
setVersion(hiveConf, metastoreSchemaInfo.getHiveSchemaVersion());
driver = DriverFactory.newDriver(hiveConf);
driver.run("show tables");
}
use of org.apache.hadoop.hive.cli.CliSessionState in project hive by apache.
the class BaseReplicationScenariosAcidTables method prepareInc2AcidData.
void prepareInc2AcidData(String dbName, HiveConf hiveConf) throws Throwable {
IDriver driver = DriverFactory.newDriver(hiveConf);
SessionState.start(new CliSessionState(hiveConf));
runUsingDriver(driver, "use " + dbName);
runUsingDriver(driver, "insert into t1 values (3)");
runUsingDriver(driver, "insert into t5 values (4444)");
}
use of org.apache.hadoop.hive.cli.CliSessionState in project hive by apache.
the class TestHiveHistory method testQueryloglocParentDirNotExist.
@Test
public void testQueryloglocParentDirNotExist() throws Exception {
String parentTmpDir = tmpdir + "/HIVE2654";
Path parentDirPath = new Path(parentTmpDir);
try {
fs.delete(parentDirPath, true);
} catch (Exception e) {
}
try {
String actualDir = parentTmpDir + "/test";
HiveConf conf = new HiveConf(SessionState.class);
conf.set(HiveConf.ConfVars.HIVEHISTORYFILELOC.toString(), actualDir);
SessionState ss = new CliSessionState(conf);
HiveHistory hiveHistory = new HiveHistoryImpl(ss);
Path actualPath = new Path(actualDir);
if (!fs.exists(actualPath)) {
fail("Query location path is not exist :" + actualPath.toString());
}
} finally {
try {
fs.delete(parentDirPath, true);
} catch (Exception e) {
}
}
}
use of org.apache.hadoop.hive.cli.CliSessionState in project hive by apache.
the class TestHiveHistory method testSimpleQuery.
/**
* Check history file output for this query.
*/
@Test
public void testSimpleQuery() {
new LineageInfo();
try {
// before any of the other core hive classes are loaded
try {
LogUtils.initHiveLog4j();
} catch (LogInitializationException e) {
}
HiveConf hconf = new HiveConf(SessionState.class);
hconf.setBoolVar(ConfVars.HIVE_SESSION_HISTORY_ENABLED, true);
CliSessionState ss = new CliSessionState(hconf);
ss.in = System.in;
try {
ss.out = new SessionStream(System.out, true, "UTF-8");
ss.err = new SessionStream(System.err, true, "UTF-8");
} catch (UnsupportedEncodingException e) {
System.exit(3);
}
SessionState.start(ss);
String cmd = "select a.key+1 from src a";
IDriver d = DriverFactory.newDriver(conf);
d.run(cmd);
HiveHistoryViewer hv = new HiveHistoryViewer(SessionState.get().getHiveHistory().getHistFileName());
Map<String, QueryInfo> jobInfoMap = hv.getJobInfoMap();
Map<String, TaskInfo> taskInfoMap = hv.getTaskInfoMap();
if (jobInfoMap.size() != 1) {
fail("jobInfo Map size not 1");
}
if (taskInfoMap.size() != 1) {
fail("jobInfo Map size not 1");
}
cmd = (String) jobInfoMap.keySet().toArray()[0];
QueryInfo ji = jobInfoMap.get(cmd);
if (!ji.hm.get(Keys.QUERY_NUM_TASKS.name()).equals("1")) {
fail("Wrong number of tasks");
}
} catch (Exception e) {
e.printStackTrace();
fail("Failed");
}
}
use of org.apache.hadoop.hive.cli.CliSessionState in project hive by apache.
the class TestCommands method setUpBeforeClass.
@BeforeClass
public static void setUpBeforeClass() throws Exception {
TestHCatClient.startMetaStoreServer();
hconf = TestHCatClient.getConf();
hconf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname, "");
hconf.set(HiveConf.ConfVars.REPL_RUN_DATA_COPY_TASKS_ON_TARGET.varname, "false");
hconf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
TEST_PATH = System.getProperty("test.warehouse.dir", "/tmp") + Path.SEPARATOR + TestCommands.class.getCanonicalName() + "-" + System.currentTimeMillis();
Path testPath = new Path(TEST_PATH);
FileSystem fs = FileSystem.get(testPath.toUri(), hconf);
fs.mkdirs(testPath);
driver = DriverFactory.newDriver(hconf);
SessionState.start(new CliSessionState(hconf));
client = HCatClient.create(hconf);
}
Aggregations