use of org.apache.hadoop.hive.ql.processors.CommandProcessorResponse in project hive by apache.
the class TestAcidOnTez method runStatementOnDriver.
/**
* Run statement with customized hive conf
*/
private List<String> runStatementOnDriver(String stmt, HiveConf conf) throws Exception {
Driver driver = new Driver(conf);
CommandProcessorResponse cpr = driver.run(stmt);
if (cpr.getResponseCode() != 0) {
throw new RuntimeException(stmt + " failed: " + cpr);
}
List<String> rs = new ArrayList<String>();
driver.getResults(rs);
return rs;
}
use of org.apache.hadoop.hive.ql.processors.CommandProcessorResponse in project hive by apache.
the class TestDDLWithRemoteMetastoreSecondNamenode method executeQuery.
private void executeQuery(String query) throws CommandNeedRetryException {
CommandProcessorResponse result = driver.run(query);
assertNotNull("driver.run() was expected to return result for query: " + query, result);
assertEquals("Execution of (" + query + ") failed with exit status: " + result.getResponseCode() + ", " + result.getErrorMessage() + ", query: " + query, result.getResponseCode(), 0);
}
use of org.apache.hadoop.hive.ql.processors.CommandProcessorResponse in project hive by apache.
the class TestReplicationScenarios method run.
private static boolean run(String cmd, boolean errorOnFail) throws RuntimeException {
boolean success = false;
try {
CommandProcessorResponse ret = driver.run(cmd);
success = (ret.getException() == null);
if (!success) {
LOG.warn("Error {} : {} running [{}].", ret.getErrorCode(), ret.getErrorMessage(), cmd);
}
} catch (CommandNeedRetryException e) {
if (errorOnFail) {
throw new RuntimeException(e);
} else {
LOG.warn(e.getMessage(), e);
// do nothing else
}
}
return success;
}
use of org.apache.hadoop.hive.ql.processors.CommandProcessorResponse in project hive by apache.
the class TestSemanticAnalyzerHookLoading method testHookLoading.
public void testHookLoading() throws Exception {
HiveConf conf = new HiveConf(this.getClass());
conf.set(ConfVars.SEMANTIC_ANALYZER_HOOK.varname, DummySemanticAnalyzerHook.class.getName());
conf.set(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
SessionState.start(conf);
Driver driver = new Driver(conf);
driver.run("drop table testDL");
CommandProcessorResponse resp = driver.run("create table testDL (a int) as select * from tbl2");
assertEquals(40000, resp.getResponseCode());
resp = driver.run("create table testDL (a int)");
assertEquals(0, resp.getResponseCode());
assertNull(resp.getErrorMessage());
Map<String, String> params = Hive.get(conf).getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "testDL").getParameters();
assertEquals(DummyCreateTableHook.class.getName(), params.get("createdBy"));
assertEquals("Open Source rocks!!", params.get("Message"));
driver.run("drop table testDL");
}
use of org.apache.hadoop.hive.ql.processors.CommandProcessorResponse in project hive by apache.
the class FolderPermissionBase method testInsertStaticDualPartition.
@Test
public void testInsertStaticDualPartition() throws Exception {
String tableName = "dualstaticpart";
CommandProcessorResponse ret = driver.run("CREATE TABLE " + tableName + " (key string, value string) partitioned by (part1 string, part2 string)");
Assert.assertEquals(0, ret.getResponseCode());
assertExistence(warehouseDir + "/" + tableName);
setPermission(warehouseDir + "/" + tableName);
//insert into test
ret = driver.run("insert into table " + tableName + " partition(part1='1', part2='1') select key,value from mysrc where part1='1' and part2='1'");
Assert.assertEquals(0, ret.getResponseCode());
verifyPermission(warehouseDir + "/" + tableName);
verifyPermission(warehouseDir + "/" + tableName + "/part1=1");
verifyPermission(warehouseDir + "/" + tableName + "/part1=1/part2=1");
Assert.assertTrue(listStatus(warehouseDir + "/" + tableName + "/part1=1/part2=1").size() > 0);
for (String child : listStatus(warehouseDir + "/" + tableName + "/part1=1/part2=1")) {
verifyPermission(child);
}
//insert overwrite test
setPermission(warehouseDir + "/" + tableName, 1);
setPermission(warehouseDir + "/" + tableName + "/part1=1", 1);
setPermission(warehouseDir + "/" + tableName + "/part1=1/part2=1", 1);
ret = driver.run("insert overwrite table " + tableName + " partition(part1='1', part2='1') select key,value from mysrc where part1='1' and part2='1'");
Assert.assertEquals(0, ret.getResponseCode());
verifyPermission(warehouseDir + "/" + tableName, 1);
verifyPermission(warehouseDir + "/" + tableName + "/part1=1", 1);
verifyPermission(warehouseDir + "/" + tableName + "/part1=1/part2=1", 1);
Assert.assertTrue(listStatus(warehouseDir + "/" + tableName + "/part1=1/part2=1").size() > 0);
for (String child : listStatus(warehouseDir + "/" + tableName + "/part1=1/part2=1")) {
verifyPermission(child, 1);
}
}
Aggregations