Search in sources :

Example 21 with Connection

use of java.sql.Connection in project hive by apache.

the class HiveSchemaTool method validateSchemaTables.

boolean validateSchemaTables(Connection conn) throws HiveMetaException {
    String version = null;
    ResultSet rs = null;
    DatabaseMetaData metadata = null;
    List<String> dbTables = new ArrayList<String>();
    List<String> schemaTables = new ArrayList<String>();
    List<String> subScripts = new ArrayList<String>();
    Connection hmsConn = getConnectionToMetastore(false);
    System.out.println("Validating metastore schema tables");
    try {
        version = getMetaStoreSchemaVersion(hmsConn);
    } catch (HiveMetaException he) {
        System.err.println("Failed to determine schema version from Hive Metastore DB," + he.getMessage());
        LOG.debug("Failed to determine schema version from Hive Metastore DB," + he.getMessage());
        return false;
    }
    // re-open the hms connection
    hmsConn = getConnectionToMetastore(false);
    LOG.debug("Validating tables in the schema for version " + version);
    try {
        metadata = conn.getMetaData();
        String[] types = { "TABLE" };
        rs = metadata.getTables(null, null, "%", types);
        String table = null;
        while (rs.next()) {
            table = rs.getString("TABLE_NAME");
            dbTables.add(table.toLowerCase());
            LOG.debug("Found table " + table + " in HMS dbstore");
        }
    } catch (SQLException e) {
        throw new HiveMetaException("Failed to retrieve schema tables from Hive Metastore DB," + e.getMessage());
    } finally {
        if (rs != null) {
            try {
                rs.close();
            } catch (SQLException e) {
                throw new HiveMetaException("Failed to close resultset", e);
            }
        }
    }
    // parse the schema file to determine the tables that are expected to exist
    // we are using oracle schema because it is simpler to parse, no quotes or backticks etc
    String baseDir = new File(metaStoreSchemaInfo.getMetaStoreScriptDir()).getParent();
    String schemaFile = baseDir + "/" + dbType + "/hive-schema-" + version + "." + dbType + ".sql";
    try {
        LOG.debug("Parsing schema script " + schemaFile);
        subScripts.addAll(findCreateTable(schemaFile, schemaTables));
        while (subScripts.size() > 0) {
            schemaFile = baseDir + "/" + dbType + "/" + subScripts.remove(0);
            LOG.debug("Parsing subscript " + schemaFile);
            subScripts.addAll(findCreateTable(schemaFile, schemaTables));
        }
    } catch (Exception e) {
        System.err.println("Exception in parsing schema file. Cause:" + e.getMessage());
        System.out.println("Schema table validation failed!!!");
        return false;
    }
    LOG.debug("Schema tables:[ " + Arrays.toString(schemaTables.toArray()) + " ]");
    LOG.debug("DB tables:[ " + Arrays.toString(dbTables.toArray()) + " ]");
    // now diff the lists
    int schemaSize = schemaTables.size();
    schemaTables.removeAll(dbTables);
    if (schemaTables.size() > 0) {
        System.out.println("Table(s) [ " + Arrays.toString(schemaTables.toArray()) + " ] are missing from the metastore database schema.");
        System.out.println("Schema table validation failed!!!");
        return false;
    } else {
        System.out.println("Succeeded in schema table validation.");
        return true;
    }
}
Also used : SQLException(java.sql.SQLException) ResultSet(java.sql.ResultSet) ArrayList(java.util.ArrayList) Connection(java.sql.Connection) HiveMetaException(org.apache.hadoop.hive.metastore.HiveMetaException) DatabaseMetaData(java.sql.DatabaseMetaData) File(java.io.File) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) HiveMetaException(org.apache.hadoop.hive.metastore.HiveMetaException) SQLException(java.sql.SQLException) IOException(java.io.IOException) ParseException(org.apache.commons.cli.ParseException)

Example 22 with Connection

use of java.sql.Connection in project hive by apache.

the class CompactionTxnHandler method purgeCompactionHistory.

/**
   * For any given compactable entity (partition; table if not partitioned) the history of compactions
   * may look like "sssfffaaasffss", for example.  The idea is to retain the tail (most recent) of the
   * history such that a configurable number of each type of state is present.  Any other entries
   * can be purged.  This scheme has advantage of always retaining the last failure/success even if
   * it's not recent.
   * @throws MetaException
   */
@Override
@RetrySemantics.SafeToRetry
public void purgeCompactionHistory() throws MetaException {
    Connection dbConn = null;
    Statement stmt = null;
    ResultSet rs = null;
    List<Long> deleteSet = new ArrayList<>();
    RetentionCounters rc = null;
    try {
        try {
            dbConn = getDbConn(Connection.TRANSACTION_READ_COMMITTED);
            stmt = dbConn.createStatement();
            /*cc_id is monotonically increasing so for any entity sorts in order of compaction history,
        thus this query groups by entity and withing group sorts most recent first*/
            rs = stmt.executeQuery("select cc_id, cc_database, cc_table, cc_partition, cc_state from " + "COMPLETED_COMPACTIONS order by cc_database, cc_table, cc_partition, cc_id desc");
            String lastCompactedEntity = null;
            /*In each group, walk from most recent and count occurences of each state type.  Once you
        * have counted enough (for each state) to satisfy retention policy, delete all other
        * instances of this status.*/
            while (rs.next()) {
                CompactionInfo ci = new CompactionInfo(rs.getLong(1), rs.getString(2), rs.getString(3), rs.getString(4), rs.getString(5).charAt(0));
                if (!ci.getFullPartitionName().equals(lastCompactedEntity)) {
                    lastCompactedEntity = ci.getFullPartitionName();
                    rc = new RetentionCounters(conf.getIntVar(HiveConf.ConfVars.COMPACTOR_HISTORY_RETENTION_ATTEMPTED), getFailedCompactionRetention(), conf.getIntVar(HiveConf.ConfVars.COMPACTOR_HISTORY_RETENTION_SUCCEEDED));
                }
                checkForDeletion(deleteSet, ci, rc);
            }
            close(rs);
            if (deleteSet.size() <= 0) {
                return;
            }
            List<String> queries = new ArrayList<String>();
            StringBuilder prefix = new StringBuilder();
            StringBuilder suffix = new StringBuilder();
            prefix.append("delete from COMPLETED_COMPACTIONS where ");
            suffix.append("");
            TxnUtils.buildQueryWithINClause(conf, queries, prefix, suffix, deleteSet, "cc_id", false, false);
            for (String query : queries) {
                LOG.debug("Going to execute update <" + query + ">");
                int count = stmt.executeUpdate(query);
                LOG.debug("Removed " + count + " records from COMPLETED_COMPACTIONS");
            }
            dbConn.commit();
        } catch (SQLException e) {
            rollbackDBConn(dbConn);
            checkRetryable(dbConn, e, "purgeCompactionHistory()");
            throw new MetaException("Unable to connect to transaction database " + StringUtils.stringifyException(e));
        } finally {
            close(rs, stmt, dbConn);
        }
    } catch (RetryException ex) {
        purgeCompactionHistory();
    }
}
Also used : SQLException(java.sql.SQLException) PreparedStatement(java.sql.PreparedStatement) Statement(java.sql.Statement) Connection(java.sql.Connection) ArrayList(java.util.ArrayList) ResultSet(java.sql.ResultSet) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Example 23 with Connection

use of java.sql.Connection in project hive by apache.

the class TxnDbUtil method cleanDb.

public static void cleanDb() throws Exception {
    int retryCount = 0;
    while (++retryCount <= 3) {
        boolean success = true;
        Connection conn = null;
        Statement stmt = null;
        try {
            conn = getConnection();
            stmt = conn.createStatement();
            // We want to try these, whether they succeed or fail.
            try {
                stmt.execute("DROP INDEX HL_TXNID_INDEX");
            } catch (SQLException e) {
                if (!("42X65".equals(e.getSQLState()) && 30000 == e.getErrorCode())) {
                    //42X65/3000 means index doesn't exist
                    LOG.error("Unable to drop index HL_TXNID_INDEX " + e.getMessage() + "State=" + e.getSQLState() + " code=" + e.getErrorCode() + " retryCount=" + retryCount);
                    success = false;
                }
            }
            success &= dropTable(stmt, "TXN_COMPONENTS", retryCount);
            success &= dropTable(stmt, "COMPLETED_TXN_COMPONENTS", retryCount);
            success &= dropTable(stmt, "TXNS", retryCount);
            success &= dropTable(stmt, "NEXT_TXN_ID", retryCount);
            success &= dropTable(stmt, "HIVE_LOCKS", retryCount);
            success &= dropTable(stmt, "NEXT_LOCK_ID", retryCount);
            success &= dropTable(stmt, "COMPACTION_QUEUE", retryCount);
            success &= dropTable(stmt, "NEXT_COMPACTION_QUEUE_ID", retryCount);
            success &= dropTable(stmt, "COMPLETED_COMPACTIONS", retryCount);
            success &= dropTable(stmt, "AUX_TABLE", retryCount);
            success &= dropTable(stmt, "WRITE_SET", retryCount);
        } finally {
            closeResources(conn, stmt, null);
        }
        if (success) {
            return;
        }
    }
}
Also used : SQLException(java.sql.SQLException) PreparedStatement(java.sql.PreparedStatement) Statement(java.sql.Statement) Connection(java.sql.Connection)

Example 24 with Connection

use of java.sql.Connection in project hive by apache.

the class TxnDbUtil method countQueryAgent.

/**
   * Utility method used to run COUNT queries like "select count(*) from ..." against metastore tables
   * @param countQuery countQuery text
   * @return count countQuery result
   * @throws Exception
   */
public static int countQueryAgent(String countQuery) throws Exception {
    Connection conn = null;
    Statement stmt = null;
    ResultSet rs = null;
    try {
        conn = getConnection();
        stmt = conn.createStatement();
        rs = stmt.executeQuery(countQuery);
        if (!rs.next()) {
            return 0;
        }
        return rs.getInt(1);
    } finally {
        closeResources(conn, stmt, rs);
    }
}
Also used : PreparedStatement(java.sql.PreparedStatement) Statement(java.sql.Statement) Connection(java.sql.Connection) ResultSet(java.sql.ResultSet)

Example 25 with Connection

use of java.sql.Connection in project hive by apache.

the class TxnDbUtil method queryToString.

public static String queryToString(String query, boolean includeHeader) throws Exception {
    Connection conn = null;
    Statement stmt = null;
    ResultSet rs = null;
    StringBuilder sb = new StringBuilder();
    try {
        conn = getConnection();
        stmt = conn.createStatement();
        rs = stmt.executeQuery(query);
        ResultSetMetaData rsmd = rs.getMetaData();
        if (includeHeader) {
            for (int colPos = 1; colPos <= rsmd.getColumnCount(); colPos++) {
                sb.append(rsmd.getColumnName(colPos)).append("   ");
            }
            sb.append('\n');
        }
        while (rs.next()) {
            for (int colPos = 1; colPos <= rsmd.getColumnCount(); colPos++) {
                sb.append(rs.getObject(colPos)).append("   ");
            }
            sb.append('\n');
        }
    } finally {
        closeResources(conn, stmt, rs);
    }
    return sb.toString();
}
Also used : ResultSetMetaData(java.sql.ResultSetMetaData) PreparedStatement(java.sql.PreparedStatement) Statement(java.sql.Statement) Connection(java.sql.Connection) ResultSet(java.sql.ResultSet)

Aggregations

Connection (java.sql.Connection)6326 PreparedStatement (java.sql.PreparedStatement)2793 ResultSet (java.sql.ResultSet)2657 Test (org.junit.Test)2455 SQLException (java.sql.SQLException)2267 Properties (java.util.Properties)1188 Statement (java.sql.Statement)1078 PhoenixConnection (org.apache.phoenix.jdbc.PhoenixConnection)689 ArrayList (java.util.ArrayList)397 BaseConnectionlessQueryTest (org.apache.phoenix.query.BaseConnectionlessQueryTest)232 DataSource (javax.sql.DataSource)211 BaseTest (org.apache.phoenix.query.BaseTest)201 CallableStatement (java.sql.CallableStatement)192 IOException (java.io.IOException)158 Reader (java.io.Reader)144 DatabaseMetaData (java.sql.DatabaseMetaData)144 SqlSessionFactoryBuilder (org.apache.ibatis.session.SqlSessionFactoryBuilder)134 HashMap (java.util.HashMap)123 ScriptRunner (org.apache.ibatis.jdbc.ScriptRunner)114 Timestamp (java.sql.Timestamp)113