use of org.apache.hadoop.hive.metastore.HiveMetaException in project hive by apache.
the class SchemaToolTaskValidate method checkMetaStoreDBLocation.
private boolean checkMetaStoreDBLocation(Connection conn, URI[] defaultServers) throws HiveMetaException {
String dbLocQuery = schemaTool.quote(QUERY_DB_LOCATION);
int numOfInvalid = 0;
try (Statement stmt = conn.createStatement();
ResultSet res = stmt.executeQuery(dbLocQuery)) {
while (res.next()) {
String locValue = res.getString(3);
String dbName = getNameOrID(res, 2, 1);
if (!checkLocation("Database " + dbName, locValue, defaultServers)) {
numOfInvalid++;
}
}
} catch (SQLException e) {
throw new HiveMetaException("Failed to get DB Location Info.", e);
}
return numOfInvalid == 0;
}
use of org.apache.hadoop.hive.metastore.HiveMetaException in project hive by apache.
the class SchemaToolTaskValidate method validateSchemaVersions.
boolean validateSchemaVersions() throws HiveMetaException {
System.out.println("Validating schema version");
try {
String hiveSchemaVersion = schemaTool.getMetaStoreSchemaInfo().getHiveSchemaVersion();
MetaStoreConnectionInfo connectionInfo = schemaTool.getConnectionInfo(false);
String newSchemaVersion = schemaTool.getMetaStoreSchemaInfo().getMetaStoreSchemaVersion(connectionInfo);
schemaTool.assertCompatibleVersion(hiveSchemaVersion, newSchemaVersion);
} catch (HiveMetaException hme) {
if (hme.getMessage().contains("Metastore schema version is not compatible") || hme.getMessage().contains("Multiple versions were found in metastore") || hme.getMessage().contains("Could not find version info in metastore VERSION table")) {
System.err.println(hme.getMessage());
System.out.println("[FAIL]\n");
return false;
} else {
throw hme;
}
}
System.out.println("[SUCCESS]\n");
return true;
}
use of org.apache.hadoop.hive.metastore.HiveMetaException in project hive by apache.
the class SchemaToolTaskUpgrade method execute.
@Override
void execute() throws HiveMetaException {
ensureFromVersion();
if (schemaTool.getMetaStoreSchemaInfo().getHiveSchemaVersion().equals(fromVersion)) {
System.out.println("No schema upgrade required from version " + fromVersion);
return;
}
// Find the list of scripts to execute for this upgrade
List<String> upgradeScripts = schemaTool.getMetaStoreSchemaInfo().getUpgradeScripts(fromVersion);
schemaTool.testConnectionToMetastore();
System.out.println("Starting upgrade metastore schema from version " + fromVersion + " to " + schemaTool.getMetaStoreSchemaInfo().getHiveSchemaVersion());
String scriptDir = schemaTool.getMetaStoreSchemaInfo().getMetaStoreScriptDir();
try {
for (String scriptFile : upgradeScripts) {
System.out.println("Upgrade script " + scriptFile);
if (!schemaTool.isDryRun()) {
runPreUpgrade(scriptDir, scriptFile);
schemaTool.execSql(scriptDir, scriptFile);
System.out.println("Completed " + scriptFile);
}
}
} catch (IOException e) {
throw new HiveMetaException("Upgrade FAILED! Metastore state would be inconsistent !!", e);
}
// Revalidated the new version after upgrade
schemaTool.verifySchemaVersion();
}
use of org.apache.hadoop.hive.metastore.HiveMetaException in project hive by apache.
the class HiveSchemaHelper method getConnectionToMetastore.
/**
* Get JDBC connection to metastore db
* @param userName metastore connection username
* @param password metastore connection password
* @param url Metastore URL. If null will be read from config file.
* @param driver Driver class. If null will be read from config file.
* @param printInfo print connection parameters
* @param conf hive config object
* @param schema the schema to create the connection for
* @return metastore connection object
* @throws org.apache.hadoop.hive.metastore.HiveMetaException
*/
public static Connection getConnectionToMetastore(String userName, String password, String url, String driver, boolean printInfo, Configuration conf, String schema) throws HiveMetaException {
try {
url = url == null ? getValidConfVar(MetastoreConf.ConfVars.CONNECT_URL_KEY, conf) : url;
driver = driver == null ? getValidConfVar(MetastoreConf.ConfVars.CONNECTION_DRIVER, conf) : driver;
if (printInfo) {
logAndPrintToStdout("Metastore connection URL:\t " + url);
logAndPrintToStdout("Metastore connection Driver :\t " + driver);
logAndPrintToStdout("Metastore connection User:\t " + userName);
if (MetastoreConf.getBoolVar(conf, MetastoreConf.ConfVars.HIVE_IN_TEST)) {
logAndPrintToStdout("Metastore connection Password:\t " + password);
}
}
if ((userName == null) || userName.isEmpty()) {
throw new HiveMetaException("UserName empty ");
}
// load required JDBC driver
Class.forName(driver);
// Connect using the JDBC URL and user/pass from conf
Connection conn = DriverManager.getConnection(url, userName, password);
if (schema != null) {
conn.setSchema(schema);
}
return conn;
} catch (IOException | SQLException e) {
throw new HiveMetaException("Failed to get schema version.", e);
} catch (ClassNotFoundException e) {
LOG.error("Unable to find driver class", e);
throw new HiveMetaException("Failed to load driver", e);
}
}
use of org.apache.hadoop.hive.metastore.HiveMetaException in project hive by apache.
the class SchemaToolTaskCreateCatalog method execute.
@Override
void execute() throws HiveMetaException {
System.out.println("Create catalog " + catName + " at location " + location);
Connection conn = schemaTool.getConnectionToMetastore(true);
boolean success = false;
try {
conn.setAutoCommit(false);
try (Statement stmt = conn.createStatement()) {
// more reliable then attempting to parse the error message from the SQLException.
if (ifNotExists && catalogExists(stmt)) {
return;
}
int catNum = getNextCatalogId(stmt);
addCatalog(conn, stmt, catNum);
success = true;
}
} catch (SQLException e) {
throw new HiveMetaException("Failed to add catalog", e);
} finally {
try {
if (!success) {
conn.rollback();
}
} catch (SQLException e) {
// Not really much we can do here.
LOG.error("Failed to rollback, everything will probably go bad from here.", e);
}
}
}
Aggregations