use of org.apache.hadoop.hive.metastore.HiveMetaException in project hive by apache.
the class MetastoreSchemaTool method run.
public static int run(String[] args) {
LOG.debug("Going to run command: " + StringUtils.join(args, " "));
CommandLineParser parser = new GnuParser();
CommandLine line;
String dbType;
String schemaVer;
Options cmdLineOptions = new Options();
// Argument handling
initOptions(cmdLineOptions);
try {
line = parser.parse(cmdLineOptions, args);
} catch (ParseException e) {
logAndPrintToError("HiveSchemaTool:Parsing failed. Reason: " + e.getLocalizedMessage());
return usage(cmdLineOptions);
}
assert line != null;
if (line.hasOption("help")) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("schemaTool", cmdLineOptions);
return 1;
}
if (line.hasOption("dbType")) {
dbType = line.getOptionValue("dbType");
if ((!dbType.equalsIgnoreCase(HiveSchemaHelper.DB_DERBY) && !dbType.equalsIgnoreCase(HiveSchemaHelper.DB_MSSQL) && !dbType.equalsIgnoreCase(HiveSchemaHelper.DB_MYSQL) && !dbType.equalsIgnoreCase(HiveSchemaHelper.DB_POSTGRACE) && !dbType.equalsIgnoreCase(HiveSchemaHelper.DB_ORACLE))) {
logAndPrintToError("Unsupported dbType " + dbType);
return usage(cmdLineOptions);
}
} else {
logAndPrintToError("no dbType supplied");
return usage(cmdLineOptions);
}
System.setProperty(ConfVars.SCHEMA_VERIFICATION.toString(), "true");
try {
MetastoreSchemaTool schemaTool = new MetastoreSchemaTool(dbType);
if (line.hasOption("userName")) {
schemaTool.setUserName(line.getOptionValue("userName"));
} else {
schemaTool.setUserName(MetastoreConf.getVar(schemaTool.getConf(), ConfVars.CONNECTION_USER_NAME));
}
if (line.hasOption("passWord")) {
schemaTool.setPassWord(line.getOptionValue("passWord"));
} else {
try {
schemaTool.setPassWord(MetastoreConf.getPassword(schemaTool.getConf(), ConfVars.PWD));
} catch (IOException err) {
throw new HiveMetaException("Error getting metastore password", err);
}
}
if (line.hasOption("hiveUser")) {
schemaTool.setHiveUser(line.getOptionValue("hiveUser"));
}
if (line.hasOption("hivePassword")) {
schemaTool.setHivePasswd(line.getOptionValue("hivePassword"));
}
if (line.hasOption("hiveDb")) {
schemaTool.setHiveDb(line.getOptionValue("hiveDb"));
}
if (line.hasOption("url")) {
schemaTool.setUrl(line.getOptionValue("url"));
}
if (line.hasOption("driver")) {
schemaTool.setDriver(line.getOptionValue("driver"));
}
if (line.hasOption("dryRun")) {
schemaTool.setDryRun(true);
}
if (line.hasOption("verbose")) {
schemaTool.setVerbose(true);
}
if (line.hasOption("dbOpts")) {
schemaTool.setDbOpts(line.getOptionValue("dbOpts"));
}
if (line.hasOption("validate") && line.hasOption("servers")) {
schemaTool.setValidationServers(line.getOptionValue("servers"));
}
if (line.hasOption("info")) {
schemaTool.showInfo();
} else if (line.hasOption("upgradeSchema")) {
schemaTool.doUpgrade();
} else if (line.hasOption("upgradeSchemaFrom")) {
schemaVer = line.getOptionValue("upgradeSchemaFrom");
schemaTool.doUpgrade(schemaVer);
} else if (line.hasOption("initSchema")) {
schemaTool.doInit();
} else if (line.hasOption("initSchemaTo")) {
schemaVer = line.getOptionValue("initSchemaTo");
schemaTool.doInit(schemaVer);
} else if (line.hasOption("validate")) {
return schemaTool.doValidate();
} else if (line.hasOption("createUser")) {
schemaTool.doCreateUser();
} else {
logAndPrintToError("no valid option supplied");
return usage(cmdLineOptions);
}
} catch (HiveMetaException e) {
logAndPrintToError(e.getMessage());
if (e.getCause() != null) {
Throwable t = e.getCause();
logAndPrintToError("Underlying cause: " + t.getClass().getName() + " : " + t.getMessage());
if (e.getCause() instanceof SQLException) {
logAndPrintToError("SQL Error code: " + ((SQLException) t).getErrorCode());
}
}
if (line.hasOption("verbose")) {
e.printStackTrace();
} else {
logAndPrintToError("Use --verbose for detailed stacktrace.");
}
logAndPrintToError("*** schemaTool failed ***");
return 1;
}
System.out.println("schemaTool completed");
return 0;
}
use of org.apache.hadoop.hive.metastore.HiveMetaException in project hive by apache.
the class TestSchemaToolForMetastore method testSchemaUpgrade.
/**
* Test schema upgrade
*/
@Test
public void testSchemaUpgrade() throws Exception {
boolean foundException = false;
// Initialize 1.2.0 schema
schemaTool.doInit("1.2.0");
// verify that driver fails due to older version schema
try {
schemaTool.verifySchemaVersion();
} catch (HiveMetaException e) {
// Expected to fail due to old schema
foundException = true;
}
if (!foundException) {
throw new Exception("Hive operations shouldn't pass with older version schema");
}
// Generate dummy pre-upgrade script with errors
String invalidPreUpgradeScript = writeDummyPreUpgradeScript(0, "upgrade-2.3.0-to-3.0.0.derby.sql", "foo bar;");
// Generate dummy pre-upgrade scripts with valid SQL
String validPreUpgradeScript0 = writeDummyPreUpgradeScript(1, "upgrade-2.3.0-to-3.0.0.derby.sql", "CREATE TABLE schema_test0 (id integer);");
String validPreUpgradeScript1 = writeDummyPreUpgradeScript(2, "upgrade-2.3.0-to-3.0.0.derby.sql", "CREATE TABLE schema_test1 (id integer);");
// Capture system out and err
schemaTool.setVerbose(true);
OutputStream stderr = new ByteArrayOutputStream();
PrintStream errPrintStream = new PrintStream(stderr);
System.setErr(errPrintStream);
OutputStream stdout = new ByteArrayOutputStream();
PrintStream outPrintStream = new PrintStream(stdout);
System.setOut(outPrintStream);
// Upgrade schema from 0.7.0 to latest
schemaTool.doUpgrade("1.2.0");
LOG.info("stdout is " + stdout.toString());
LOG.info("stderr is " + stderr.toString());
// Verify that the schemaTool ran pre-upgrade scripts and ignored errors
Assert.assertTrue(stderr.toString().contains(invalidPreUpgradeScript));
Assert.assertTrue(stderr.toString().contains("foo"));
Assert.assertFalse(stderr.toString().contains(validPreUpgradeScript0));
Assert.assertFalse(stderr.toString().contains(validPreUpgradeScript1));
Assert.assertTrue(stdout.toString().contains(validPreUpgradeScript0));
Assert.assertTrue(stdout.toString().contains(validPreUpgradeScript1));
// Verify that driver works fine with latest schema
schemaTool.verifySchemaVersion();
}
use of org.apache.hadoop.hive.metastore.HiveMetaException in project hive by apache.
the class TestSchemaToolForMetastore method validateMetastoreDbPropertiesTable.
private void validateMetastoreDbPropertiesTable() throws HiveMetaException, IOException {
boolean isValid = schemaTool.validateSchemaTables(conn);
Assert.assertTrue(isValid);
// adding same property key twice should throw unique key constraint violation exception
String[] scripts = new String[] { "insert into METASTORE_DB_PROPERTIES values ('guid', 'test-uuid-1', 'dummy uuid 1')", "insert into METASTORE_DB_PROPERTIES values ('guid', 'test-uuid-2', 'dummy uuid 2')" };
File scriptFile = generateTestScript(scripts);
Exception ex = null;
try {
schemaTool.runSqlLine(scriptFile.getPath());
} catch (Exception iox) {
ex = iox;
}
Assert.assertTrue(ex != null && ex instanceof IOException);
}
Aggregations