Search in sources :

Example 16 with HiveMetaException

use of org.apache.hadoop.hive.metastore.HiveMetaException in project hive by apache.

the class HiveSchemaTool method doUpgrade.

/**
 * Perform metastore schema upgrade
 *
 * @param fromSchemaVer
 *          Existing version of the metastore. If null, then read from the metastore
 * @throws MetaException
 */
public void doUpgrade(String fromSchemaVer) throws HiveMetaException {
    if (metaStoreSchemaInfo.getHiveSchemaVersion().equals(fromSchemaVer)) {
        System.out.println("No schema upgrade required from version " + fromSchemaVer);
        return;
    }
    // Find the list of scripts to execute for this upgrade
    List<String> upgradeScripts = metaStoreSchemaInfo.getUpgradeScripts(fromSchemaVer);
    testConnectionToMetastore();
    System.out.println("Starting upgrade metastore schema from version " + fromSchemaVer + " to " + metaStoreSchemaInfo.getHiveSchemaVersion());
    String scriptDir = metaStoreSchemaInfo.getMetaStoreScriptDir();
    try {
        for (String scriptFile : upgradeScripts) {
            System.out.println("Upgrade script " + scriptFile);
            if (!dryRun) {
                runPreUpgrade(scriptDir, scriptFile);
                runBeeLine(scriptDir, scriptFile);
                System.out.println("Completed " + scriptFile);
            }
        }
    } catch (IOException eIO) {
        throw new HiveMetaException("Upgrade FAILED! Metastore state would be inconsistent !!", eIO);
    }
    // Revalidated the new version after upgrade
    verifySchemaVersion();
}
Also used : HiveMetaException(org.apache.hadoop.hive.metastore.HiveMetaException) IOException(java.io.IOException)

Example 17 with HiveMetaException

use of org.apache.hadoop.hive.metastore.HiveMetaException in project hive by apache.

the class HiveSchemaTool method checkMetaStoreDBLocation.

private boolean checkMetaStoreDBLocation(Connection conn, URI[] defaultServers) throws HiveMetaException {
    String dbLoc;
    boolean isValid = true;
    int numOfInvalid = 0;
    if (needsQuotedIdentifier) {
        dbLoc = "select dbt.\"DB_ID\", dbt.\"NAME\", dbt.\"DB_LOCATION_URI\" from \"DBS\" dbt order by dbt.\"DB_ID\" ";
    } else {
        dbLoc = "select dbt.DB_ID, dbt.NAME, dbt.DB_LOCATION_URI from DBS dbt order by dbt.DB_ID";
    }
    try (Statement stmt = conn.createStatement();
        ResultSet res = stmt.executeQuery(dbLoc)) {
        while (res.next()) {
            String locValue = res.getString(3);
            String dbName = getNameOrID(res, 2, 1);
            if (!checkLocation("Database " + dbName, locValue, defaultServers)) {
                numOfInvalid++;
            }
        }
    } catch (SQLException e) {
        throw new HiveMetaException("Failed to get DB Location Info.", e);
    }
    if (numOfInvalid > 0) {
        isValid = false;
    }
    return isValid;
}
Also used : SQLException(java.sql.SQLException) PreparedStatement(java.sql.PreparedStatement) Statement(java.sql.Statement) ResultSet(java.sql.ResultSet) HiveMetaException(org.apache.hadoop.hive.metastore.HiveMetaException)

Example 18 with HiveMetaException

use of org.apache.hadoop.hive.metastore.HiveMetaException in project hive by apache.

the class HiveSchemaTool method main.

public static void main(String[] args) {
    CommandLineParser parser = new GnuParser();
    CommandLine line = null;
    String dbType = null;
    String metaDbType = null;
    String schemaVer = null;
    Options cmdLineOptions = new Options();
    // Argument handling
    initOptions(cmdLineOptions);
    try {
        line = parser.parse(cmdLineOptions, args);
    } catch (ParseException e) {
        System.err.println("HiveSchemaTool:Parsing failed.  Reason: " + e.getLocalizedMessage());
        printAndExit(cmdLineOptions);
    }
    if (line.hasOption("help")) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("schemaTool", cmdLineOptions);
        return;
    }
    if (line.hasOption("dbType")) {
        dbType = line.getOptionValue("dbType");
        if ((!dbType.equalsIgnoreCase(HiveSchemaHelper.DB_DERBY) && !dbType.equalsIgnoreCase(HiveSchemaHelper.DB_HIVE) && !dbType.equalsIgnoreCase(HiveSchemaHelper.DB_MSSQL) && !dbType.equalsIgnoreCase(HiveSchemaHelper.DB_MYSQL) && !dbType.equalsIgnoreCase(HiveSchemaHelper.DB_POSTGRACE) && !dbType.equalsIgnoreCase(HiveSchemaHelper.DB_ORACLE))) {
            System.err.println("Unsupported dbType " + dbType);
            printAndExit(cmdLineOptions);
        }
    } else {
        System.err.println("no dbType supplied");
        printAndExit(cmdLineOptions);
    }
    if (line.hasOption("metaDbType")) {
        metaDbType = line.getOptionValue("metaDbType");
        if (!dbType.equals(HiveSchemaHelper.DB_HIVE)) {
            System.err.println("metaDbType only supported for dbType = hive");
            printAndExit(cmdLineOptions);
        }
        if (!metaDbType.equalsIgnoreCase(HiveSchemaHelper.DB_DERBY) && !metaDbType.equalsIgnoreCase(HiveSchemaHelper.DB_MSSQL) && !metaDbType.equalsIgnoreCase(HiveSchemaHelper.DB_MYSQL) && !metaDbType.equalsIgnoreCase(HiveSchemaHelper.DB_POSTGRACE) && !metaDbType.equalsIgnoreCase(HiveSchemaHelper.DB_ORACLE)) {
            System.err.println("Unsupported metaDbType " + metaDbType);
            printAndExit(cmdLineOptions);
        }
    } else if (dbType.equalsIgnoreCase(HiveSchemaHelper.DB_HIVE)) {
        System.err.println("no metaDbType supplied");
        printAndExit(cmdLineOptions);
    }
    System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.varname, "true");
    try {
        HiveSchemaTool schemaTool = new HiveSchemaTool(dbType, metaDbType);
        if (line.hasOption("userName")) {
            schemaTool.setUserName(line.getOptionValue("userName"));
        } else {
            schemaTool.setUserName(schemaTool.getHiveConf().get(ConfVars.METASTORE_CONNECTION_USER_NAME.varname));
        }
        if (line.hasOption("passWord")) {
            schemaTool.setPassWord(line.getOptionValue("passWord"));
        } else {
            try {
                schemaTool.setPassWord(ShimLoader.getHadoopShims().getPassword(schemaTool.getHiveConf(), HiveConf.ConfVars.METASTOREPWD.varname));
            } catch (IOException err) {
                throw new HiveMetaException("Error getting metastore password", err);
            }
        }
        if (line.hasOption("url")) {
            schemaTool.setUrl(line.getOptionValue("url"));
        }
        if (line.hasOption("driver")) {
            schemaTool.setDriver(line.getOptionValue("driver"));
        }
        if (line.hasOption("dryRun")) {
            schemaTool.setDryRun(true);
        }
        if (line.hasOption("verbose")) {
            schemaTool.setVerbose(true);
        }
        if (line.hasOption("dbOpts")) {
            schemaTool.setDbOpts(line.getOptionValue("dbOpts"));
        }
        if (line.hasOption("validate") && line.hasOption("servers")) {
            schemaTool.setValidationServers(line.getOptionValue("servers"));
        }
        if (line.hasOption("info")) {
            schemaTool.showInfo();
        } else if (line.hasOption("upgradeSchema")) {
            schemaTool.doUpgrade();
        } else if (line.hasOption("upgradeSchemaFrom")) {
            schemaVer = line.getOptionValue("upgradeSchemaFrom");
            schemaTool.doUpgrade(schemaVer);
        } else if (line.hasOption("initSchema")) {
            schemaTool.doInit();
        } else if (line.hasOption("initSchemaTo")) {
            schemaVer = line.getOptionValue("initSchemaTo");
            schemaTool.doInit(schemaVer);
        } else if (line.hasOption("validate")) {
            schemaTool.doValidate();
        } else {
            System.err.println("no valid option supplied");
            printAndExit(cmdLineOptions);
        }
    } catch (HiveMetaException e) {
        System.err.println(e);
        if (e.getCause() != null) {
            Throwable t = e.getCause();
            System.err.println("Underlying cause: " + t.getClass().getName() + " : " + t.getMessage());
            if (e.getCause() instanceof SQLException) {
                System.err.println("SQL Error code: " + ((SQLException) t).getErrorCode());
            }
        }
        if (line.hasOption("verbose")) {
            e.printStackTrace();
        } else {
            System.err.println("Use --verbose for detailed stacktrace.");
        }
        System.err.println("*** schemaTool failed ***");
        System.exit(1);
    }
    System.out.println("schemaTool completed");
}
Also used : HelpFormatter(org.apache.commons.cli.HelpFormatter) Options(org.apache.commons.cli.Options) CommandLine(org.apache.commons.cli.CommandLine) SQLException(java.sql.SQLException) GnuParser(org.apache.commons.cli.GnuParser) HiveMetaException(org.apache.hadoop.hive.metastore.HiveMetaException) CommandLineParser(org.apache.commons.cli.CommandLineParser) ParseException(org.apache.commons.cli.ParseException) IOException(java.io.IOException)

Example 19 with HiveMetaException

use of org.apache.hadoop.hive.metastore.HiveMetaException in project hive by apache.

the class HiveSchemaTool method checkMetaStoreSkewedColumnsLocation.

private boolean checkMetaStoreSkewedColumnsLocation(Connection conn, URI[] defaultServers) throws HiveMetaException {
    String skewedColLoc, skewedColIDRange;
    boolean isValid = true;
    int numOfInvalid = 0;
    if (needsQuotedIdentifier) {
        skewedColIDRange = "select max(\"STRING_LIST_ID_KID\"), min(\"STRING_LIST_ID_KID\") from \"SKEWED_COL_VALUE_LOC_MAP\" ";
    } else {
        skewedColIDRange = "select max(STRING_LIST_ID_KID), min(STRING_LIST_ID_KID) from SKEWED_COL_VALUE_LOC_MAP";
    }
    if (needsQuotedIdentifier) {
        skewedColLoc = "select t.\"TBL_NAME\", t.\"TBL_ID\", sk.\"STRING_LIST_ID_KID\", sk.\"LOCATION\", db.\"NAME\", db.\"DB_ID\" " + " from \"TBLS\" t, \"SDS\" s, \"DBS\" db, \"SKEWED_COL_VALUE_LOC_MAP\" sk " + "where sk.\"SD_ID\" = s.\"SD_ID\" and s.\"SD_ID\" = t.\"SD_ID\" and t.\"DB_ID\" = db.\"DB_ID\" and " + "sk.\"STRING_LIST_ID_KID\" >= ? and sk.\"STRING_LIST_ID_KID\" <= ? order by t.\"TBL_ID\" ";
    } else {
        skewedColLoc = "select t.TBL_NAME, t.TBL_ID, sk.STRING_LIST_ID_KID, sk.LOCATION, db.NAME, db.DB_ID from TBLS t, SDS s, DBS db, SKEWED_COL_VALUE_LOC_MAP sk " + "where sk.SD_ID = s.SD_ID and s.SD_ID = t.SD_ID and t.DB_ID = db.DB_ID and sk.STRING_LIST_ID_KID >= ? and sk.STRING_LIST_ID_KID <= ? order by t.TBL_ID ";
    }
    long maxID = 0, minID = 0;
    long rtnSize = 2000;
    try {
        Statement stmt = conn.createStatement();
        ResultSet res = stmt.executeQuery(skewedColIDRange);
        if (res.next()) {
            maxID = res.getLong(1);
            minID = res.getLong(2);
        }
        res.close();
        stmt.close();
        PreparedStatement pStmt = conn.prepareStatement(skewedColLoc);
        while (minID <= maxID) {
            pStmt.setLong(1, minID);
            pStmt.setLong(2, minID + rtnSize);
            res = pStmt.executeQuery();
            while (res.next()) {
                String locValue = res.getString(4);
                String entity = "Database " + getNameOrID(res, 5, 6) + ", Table " + getNameOrID(res, 1, 2) + ", String list " + res.getString(3);
                if (!checkLocation(entity, locValue, defaultServers)) {
                    numOfInvalid++;
                }
            }
            res.close();
            minID += rtnSize + 1;
        }
        pStmt.close();
    } catch (SQLException e) {
        throw new HiveMetaException("Failed to get skewed columns location info.", e);
    }
    if (numOfInvalid > 0) {
        isValid = false;
    }
    return isValid;
}
Also used : SQLException(java.sql.SQLException) PreparedStatement(java.sql.PreparedStatement) Statement(java.sql.Statement) ResultSet(java.sql.ResultSet) HiveMetaException(org.apache.hadoop.hive.metastore.HiveMetaException) PreparedStatement(java.sql.PreparedStatement)

Example 20 with HiveMetaException

use of org.apache.hadoop.hive.metastore.HiveMetaException in project hive by apache.

the class HiveSchemaHelper method getConnectionToMetastore.

/**
 * Get JDBC connection to metastore db
 * @param userName metastore connection username
 * @param password metastore connection password
 * @param url Metastore URL.  If null will be read from config file.
 * @param driver Driver class.  If null will be read from config file.
 * @param printInfo print connection parameters
 * @param conf hive config object
 * @param schema the schema to create the connection for
 * @return metastore connection object
 * @throws org.apache.hadoop.hive.metastore.HiveMetaException
 */
public static Connection getConnectionToMetastore(String userName, String password, String url, String driver, boolean printInfo, Configuration conf, String schema) throws HiveMetaException {
    try {
        url = url == null ? getValidConfVar(MetastoreConf.ConfVars.CONNECTURLKEY, conf) : url;
        driver = driver == null ? getValidConfVar(MetastoreConf.ConfVars.CONNECTION_DRIVER, conf) : driver;
        if (printInfo) {
            logAndPrintToStdout("Metastore connection URL:\t " + url);
            logAndPrintToStdout("Metastore Connection Driver :\t " + driver);
            logAndPrintToStdout("Metastore connection User:\t " + userName);
            if (MetastoreConf.getBoolVar(conf, MetastoreConf.ConfVars.HIVE_IN_TEST)) {
                logAndPrintToStdout("Metastore connection Password:\t " + password);
            }
        }
        if ((userName == null) || userName.isEmpty()) {
            throw new HiveMetaException("UserName empty ");
        }
        // load required JDBC driver
        Class.forName(driver);
        // Connect using the JDBC URL and user/pass from conf
        Connection conn = DriverManager.getConnection(url, userName, password);
        if (schema != null) {
            conn.setSchema(schema);
        }
        return conn;
    } catch (IOException | SQLException e) {
        throw new HiveMetaException("Failed to get schema version.", e);
    } catch (ClassNotFoundException e) {
        LOG.error("Unable to find driver class", e);
        throw new HiveMetaException("Failed to load driver", e);
    }
}
Also used : SQLException(java.sql.SQLException) Connection(java.sql.Connection) HiveMetaException(org.apache.hadoop.hive.metastore.HiveMetaException) IOException(java.io.IOException)

Aggregations

HiveMetaException (org.apache.hadoop.hive.metastore.HiveMetaException)28 SQLException (java.sql.SQLException)21 IOException (java.io.IOException)15 ResultSet (java.sql.ResultSet)14 PreparedStatement (java.sql.PreparedStatement)12 Statement (java.sql.Statement)12 File (java.io.File)5 Connection (java.sql.Connection)4 ParseException (org.apache.commons.cli.ParseException)4 ImmutableMap (com.google.common.collect.ImmutableMap)2 ByteArrayOutputStream (java.io.ByteArrayOutputStream)2 OutputStream (java.io.OutputStream)2 PrintStream (java.io.PrintStream)2 DatabaseMetaData (java.sql.DatabaseMetaData)2 ArrayList (java.util.ArrayList)2 CommandLine (org.apache.commons.cli.CommandLine)2 CommandLineParser (org.apache.commons.cli.CommandLineParser)2 GnuParser (org.apache.commons.cli.GnuParser)2 HelpFormatter (org.apache.commons.cli.HelpFormatter)2 Options (org.apache.commons.cli.Options)2