use of org.apache.hadoop.hive.ql.processors.CommandProcessorResponse in project hive by apache.
the class AbstractHCatLoaderTest method executeStatementOnDriver.
/**
* Execute Hive CLI statement
* @param cmd arbitrary statement to execute
*/
static void executeStatementOnDriver(String cmd, Driver driver) throws IOException, CommandNeedRetryException {
LOG.debug("Executing: " + cmd);
CommandProcessorResponse cpr = driver.run(cmd);
if (cpr.getResponseCode() != 0) {
throw new IOException("Failed to execute \"" + cmd + "\". Driver returned " + cpr.getResponseCode() + " Error: " + cpr.getErrorMessage());
}
}
use of org.apache.hadoop.hive.ql.processors.CommandProcessorResponse in project hive by apache.
the class TestHCatLoaderComplexSchema method createTable.
private void createTable(String tablename, String schema, String partitionedBy) throws IOException, CommandNeedRetryException {
String createTable;
createTable = "create table " + tablename + "(" + schema + ") ";
if ((partitionedBy != null) && (!partitionedBy.trim().isEmpty())) {
createTable = createTable + "partitioned by (" + partitionedBy + ") ";
}
createTable = createTable + "stored as " + storageFormat;
LOG.info("Creating table:\n {}", createTable);
CommandProcessorResponse result = driver.run(createTable);
int retCode = result.getResponseCode();
if (retCode != 0) {
throw new IOException("Failed to create table. [" + createTable + "], return code from hive driver : [" + retCode + " " + result.getErrorMessage() + "]");
}
}
use of org.apache.hadoop.hive.ql.processors.CommandProcessorResponse in project hive by apache.
the class AbstractHCatStorerTest method testDateCharTypes.
/**
* Create a data file with datatypes added in 0.13. Read it with Pig and use Pig + HCatStorer to
* write to a Hive table. Then read it using Pig and Hive and make sure results match.
*/
@Test
public void testDateCharTypes() throws Exception {
final String tblName = "junit_date_char";
AbstractHCatLoaderTest.dropTable(tblName, driver);
AbstractHCatLoaderTest.createTable(tblName, "id int, char5 char(5), varchar10 varchar(10), dec52 decimal(5,2)", null, driver, storageFormat);
int NUM_ROWS = 5;
String[] rows = new String[NUM_ROWS];
for (int i = 0; i < NUM_ROWS; i++) {
// since the file is read by Pig, we need to make sure the values are in format that Pig
// understands
// otherwise it will turn the value to NULL on read
rows[i] = i + "\txxxxx\tyyy\t" + 5.2;
}
HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, rows);
LOG.debug("File=" + INPUT_FILE_NAME);
// dumpFile(INPUT_FILE_NAME);
PigServer server = createPigServer(true);
int queryNumber = 1;
logAndRegister(server, "A = load '" + INPUT_FILE_NAME + "' as (id:int, char5:chararray, varchar10:chararray, dec52:bigdecimal);", queryNumber++);
logAndRegister(server, "store A into '" + tblName + "' using " + HCatStorer.class.getName() + "();", queryNumber++);
logAndRegister(server, "B = load '" + tblName + "' using " + HCatLoader.class.getName() + "();", queryNumber);
CommandProcessorResponse cpr = driver.run("select * from " + tblName);
LOG.debug("cpr.respCode=" + cpr.getResponseCode() + " cpr.errMsg=" + cpr.getErrorMessage());
List l = new ArrayList();
driver.getResults(l);
LOG.debug("Dumping rows via SQL from " + tblName);
/*
* Unfortunately Timestamp.toString() adjusts the value for local TZ and 't' is a String thus
* the timestamp in 't' doesn't match rawData
*/
for (Object t : l) {
LOG.debug(t == null ? null : t.toString());
}
Iterator<Tuple> itr = server.openIterator("B");
int numRowsRead = 0;
while (itr.hasNext()) {
Tuple t = itr.next();
StringBuilder rowFromPig = new StringBuilder();
for (int i = 0; i < t.size(); i++) {
rowFromPig.append(t.get(i)).append("\t");
}
rowFromPig.setLength(rowFromPig.length() - 1);
assertEquals("Comparing Pig to Raw data", rows[numRowsRead], rowFromPig.toString());
// see comment at "Dumping rows via SQL..." for why this doesn't work (for all types)
// assertEquals("Comparing Pig to Hive", rowFromPig.toString(), l.get(numRowsRead));
numRowsRead++;
}
assertEquals("Expected " + NUM_ROWS + " rows; got " + numRowsRead + " file=" + INPUT_FILE_NAME, NUM_ROWS, numRowsRead);
}
use of org.apache.hadoop.hive.ql.processors.CommandProcessorResponse in project hive by apache.
the class TestCompactor method executeStatementOnDriver.
/**
* Execute Hive CLI statement
* @param cmd arbitrary statement to execute
*/
static void executeStatementOnDriver(String cmd, Driver driver) throws IOException, CommandNeedRetryException {
LOG.debug("Executing: " + cmd);
CommandProcessorResponse cpr = driver.run(cmd);
if (cpr.getResponseCode() != 0) {
throw new IOException("Failed to execute \"" + cmd + "\". Driver returned: " + cpr);
}
}
use of org.apache.hadoop.hive.ql.processors.CommandProcessorResponse in project hive by apache.
the class TestClientSideAuthorizationProvider method testSimplePrivileges.
public void testSimplePrivileges() throws Exception {
String dbName = getTestDbName();
String tblName = getTestTableName();
String userName = ugi.getUserName();
allowCreateDatabase(userName);
CommandProcessorResponse ret = driver.run("create database " + dbName);
assertEquals(0, ret.getResponseCode());
Database db = msc.getDatabase(dbName);
String dbLocn = db.getLocationUri();
disallowCreateDatabase(userName);
validateCreateDb(db, dbName);
disallowCreateInDb(dbName, userName, dbLocn);
driver.run("use " + dbName);
ret = driver.run(String.format("create table %s (a string) partitioned by (b string)", tblName));
// failure from not having permissions to create table
assertNoPrivileges(ret);
allowCreateInDb(dbName, userName, dbLocn);
driver.run("use " + dbName);
ret = driver.run(String.format("create table %s (a string) partitioned by (b string)", tblName));
// now it succeeds.
assertEquals(0, ret.getResponseCode());
Table tbl = msc.getTable(dbName, tblName);
validateCreateTable(tbl, tblName, dbName);
String fakeUser = "mal";
List<String> fakeGroupNames = new ArrayList<String>();
fakeGroupNames.add("groupygroup");
InjectableDummyAuthenticator.injectUserName(fakeUser);
InjectableDummyAuthenticator.injectGroupNames(fakeGroupNames);
InjectableDummyAuthenticator.injectMode(true);
allowSelectOnTable(tbl.getTableName(), fakeUser, tbl.getSd().getLocation());
ret = driver.run(String.format("select * from %s limit 10", tblName));
assertEquals(0, ret.getResponseCode());
ret = driver.run(String.format("create table %s (a string) partitioned by (b string)", tblName + "mal"));
assertNoPrivileges(ret);
disallowCreateInTbl(tbl.getTableName(), userName, tbl.getSd().getLocation());
ret = driver.run("alter table " + tblName + " add partition (b='2011')");
assertNoPrivileges(ret);
InjectableDummyAuthenticator.injectMode(false);
allowCreateInTbl(tbl.getTableName(), userName, tbl.getSd().getLocation());
ret = driver.run("alter table " + tblName + " add partition (b='2011')");
assertEquals(0, ret.getResponseCode());
allowDropOnTable(tblName, userName, tbl.getSd().getLocation());
allowDropOnDb(dbName, userName, db.getLocationUri());
driver.run("drop database if exists " + getTestDbName() + " cascade");
}
Aggregations