use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.
the class QTestUtil method cleanupFromFile.
private void cleanupFromFile() throws IOException {
File cleanupFile = new File(cleanupScript);
if (cleanupFile.isFile()) {
String cleanupCommands = FileUtils.readFileToString(cleanupFile);
LOG.info("Cleanup (" + cleanupScript + "):\n" + cleanupCommands);
try {
getCliDriver().processLine(cleanupCommands);
} catch (CommandProcessorException e) {
LOG.error("Failed during cleanup processLine with code={}. Ignoring", e.getResponseCode());
// TODO Convert this to an Assert.fail once HIVE-14682 is fixed
}
} else {
LOG.info("No cleanup script detected. Skipping.");
}
}
use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.
the class QTestDatasetHandler method unloadDataset.
public boolean unloadDataset(String table, CliDriver cliDriver) throws Exception {
try {
CommandProcessorResponse result = cliDriver.processLine("drop table " + table);
LOG.info("Result from cliDrriver.processLine in initFromDatasets=" + result);
} catch (CommandProcessorException e) {
Assert.fail("Failed during initFromDatasets processLine with code=" + e);
}
return true;
}
use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.
the class AbstractHCatStorerTest method testDateCharTypes.
/**
* Create a data file with datatypes added in 0.13. Read it with Pig and use Pig + HCatStorer to
* write to a Hive table. Then read it using Pig and Hive and make sure results match.
*/
@Test
public void testDateCharTypes() throws Exception {
final String tblName = "junit_date_char";
AbstractHCatLoaderTest.dropTable(tblName, driver);
AbstractHCatLoaderTest.createTableDefaultDB(tblName, "id int, char5 char(5), varchar10 varchar(10), dec52 decimal(5,2)", null, driver, storageFormat);
int NUM_ROWS = 5;
String[] rows = new String[NUM_ROWS];
for (int i = 0; i < NUM_ROWS; i++) {
// since the file is read by Pig, we need to make sure the values are in format that Pig
// understands
// otherwise it will turn the value to NULL on read
rows[i] = i + "\txxxxx\tyyy\t" + 5.2;
}
HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, rows);
LOG.debug("File=" + INPUT_FILE_NAME);
// dumpFile(INPUT_FILE_NAME);
PigServer server = createPigServer(true);
int queryNumber = 1;
logAndRegister(server, "A = load '" + INPUT_FILE_NAME + "' as (id:int, char5:chararray, varchar10:chararray, dec52:bigdecimal);", queryNumber++);
logAndRegister(server, "store A into '" + tblName + "' using " + HCatStorer.class.getName() + "();", queryNumber++);
logAndRegister(server, "B = load '" + tblName + "' using " + HCatLoader.class.getName() + "();", queryNumber);
try {
driver.run("select * from " + tblName);
} catch (CommandProcessorException e) {
LOG.debug("cpr.respCode=" + e.getResponseCode() + " cpr.errMsg=" + e.getMessage());
}
List l = new ArrayList();
driver.getResults(l);
LOG.debug("Dumping rows via SQL from " + tblName);
/*
* Unfortunately Timestamp.toString() adjusts the value for local TZ and 't' is a String thus
* the timestamp in 't' doesn't match rawData
*/
for (Object t : l) {
LOG.debug(t == null ? null : t.toString());
}
Iterator<Tuple> itr = server.openIterator("B");
int numRowsRead = 0;
while (itr.hasNext()) {
Tuple t = itr.next();
StringBuilder rowFromPig = new StringBuilder();
for (int i = 0; i < t.size(); i++) {
rowFromPig.append(t.get(i)).append("\t");
}
rowFromPig.setLength(rowFromPig.length() - 1);
assertEquals("Comparing Pig to Raw data", rows[numRowsRead], rowFromPig.toString());
// see comment at "Dumping rows via SQL..." for why this doesn't work (for all types)
// assertEquals("Comparing Pig to Hive", rowFromPig.toString(), l.get(numRowsRead));
numRowsRead++;
}
assertEquals("Expected " + NUM_ROWS + " rows; got " + numRowsRead + " file=" + INPUT_FILE_NAME, NUM_ROWS, numRowsRead);
}
use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.
the class TestMetastoreVersion method testMetastoreVersion.
/**
* Test that with no verification, and record verification enabled, hive populates the schema
* and version correctly
* @throws Exception
*/
@Test
public void testMetastoreVersion() throws Exception {
// let the schema and version be auto created
System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString(), "false");
System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION_RECORD_VERSION.toString(), "true");
hiveConf = new HiveConf(this.getClass());
SessionState.start(new CliSessionState(hiveConf));
driver = DriverFactory.newDriver(hiveConf);
try {
driver.run("show tables");
assert false;
} catch (CommandProcessorException e) {
// this is expected
}
// correct version stored by Metastore during startup
assertEquals(metastoreSchemaInfo.getHiveSchemaVersion(), getVersion(hiveConf));
setVersion(hiveConf, "foo");
assertEquals("foo", getVersion(hiveConf));
}
use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.
the class TestMetastoreVersion method testVersionMisMatch.
/**
* Store garbage version in metastore and verify that hive fails when verification is on
* @throws Exception
*/
@Test
public void testVersionMisMatch() throws Exception {
System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString(), "false");
hiveConf = new HiveConf(this.getClass());
SessionState.start(new CliSessionState(hiveConf));
driver = DriverFactory.newDriver(hiveConf);
driver.run("show tables");
ObjectStore.setSchemaVerified(false);
System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString(), "true");
hiveConf = new HiveConf(this.getClass());
setVersion(hiveConf, "fooVersion");
SessionState.start(new CliSessionState(hiveConf));
driver = DriverFactory.newDriver(hiveConf);
try {
driver.run("show tables");
assert false;
} catch (CommandProcessorException e) {
// this is expected
}
}
Aggregations