Search in sources :

Example 1 with DatabaseMetaData

use of java.sql.DatabaseMetaData in project hadoop by apache.

the class DBInputFormat method setConf.

/** {@inheritDoc} */
public void setConf(Configuration conf) {
    dbConf = new DBConfiguration(conf);
    try {
        this.connection = createConnection();
        DatabaseMetaData dbMeta = connection.getMetaData();
        this.dbProductName = StringUtils.toUpperCase(dbMeta.getDatabaseProductName());
    } catch (Exception ex) {
        throw new RuntimeException(ex);
    }
    tableName = dbConf.getInputTableName();
    fieldNames = dbConf.getInputFieldNames();
    conditions = dbConf.getInputConditions();
}
Also used : DatabaseMetaData(java.sql.DatabaseMetaData) SQLException(java.sql.SQLException) IOException(java.io.IOException)

Example 2 with DatabaseMetaData

use of java.sql.DatabaseMetaData in project hive by apache.

the class HiveSchemaTool method validateSchemaTables.

boolean validateSchemaTables(Connection conn) throws HiveMetaException {
    String version = null;
    ResultSet rs = null;
    DatabaseMetaData metadata = null;
    List<String> dbTables = new ArrayList<String>();
    List<String> schemaTables = new ArrayList<String>();
    List<String> subScripts = new ArrayList<String>();
    Connection hmsConn = getConnectionToMetastore(false);
    System.out.println("Validating metastore schema tables");
    try {
        version = getMetaStoreSchemaVersion(hmsConn);
    } catch (HiveMetaException he) {
        System.err.println("Failed to determine schema version from Hive Metastore DB," + he.getMessage());
        LOG.debug("Failed to determine schema version from Hive Metastore DB," + he.getMessage());
        return false;
    }
    // re-open the hms connection
    hmsConn = getConnectionToMetastore(false);
    LOG.debug("Validating tables in the schema for version " + version);
    try {
        metadata = conn.getMetaData();
        String[] types = { "TABLE" };
        rs = metadata.getTables(null, null, "%", types);
        String table = null;
        while (rs.next()) {
            table = rs.getString("TABLE_NAME");
            dbTables.add(table.toLowerCase());
            LOG.debug("Found table " + table + " in HMS dbstore");
        }
    } catch (SQLException e) {
        throw new HiveMetaException("Failed to retrieve schema tables from Hive Metastore DB," + e.getMessage());
    } finally {
        if (rs != null) {
            try {
                rs.close();
            } catch (SQLException e) {
                throw new HiveMetaException("Failed to close resultset", e);
            }
        }
    }
    // parse the schema file to determine the tables that are expected to exist
    // we are using oracle schema because it is simpler to parse, no quotes or backticks etc
    String baseDir = new File(metaStoreSchemaInfo.getMetaStoreScriptDir()).getParent();
    String schemaFile = baseDir + "/" + dbType + "/hive-schema-" + version + "." + dbType + ".sql";
    try {
        LOG.debug("Parsing schema script " + schemaFile);
        subScripts.addAll(findCreateTable(schemaFile, schemaTables));
        while (subScripts.size() > 0) {
            schemaFile = baseDir + "/" + dbType + "/" + subScripts.remove(0);
            LOG.debug("Parsing subscript " + schemaFile);
            subScripts.addAll(findCreateTable(schemaFile, schemaTables));
        }
    } catch (Exception e) {
        System.err.println("Exception in parsing schema file. Cause:" + e.getMessage());
        System.out.println("Schema table validation failed!!!");
        return false;
    }
    LOG.debug("Schema tables:[ " + Arrays.toString(schemaTables.toArray()) + " ]");
    LOG.debug("DB tables:[ " + Arrays.toString(dbTables.toArray()) + " ]");
    // now diff the lists
    int schemaSize = schemaTables.size();
    schemaTables.removeAll(dbTables);
    if (schemaTables.size() > 0) {
        System.out.println("Table(s) [ " + Arrays.toString(schemaTables.toArray()) + " ] are missing from the metastore database schema.");
        System.out.println("Schema table validation failed!!!");
        return false;
    } else {
        System.out.println("Succeeded in schema table validation.");
        return true;
    }
}
Also used : SQLException(java.sql.SQLException) ResultSet(java.sql.ResultSet) ArrayList(java.util.ArrayList) Connection(java.sql.Connection) HiveMetaException(org.apache.hadoop.hive.metastore.HiveMetaException) DatabaseMetaData(java.sql.DatabaseMetaData) File(java.io.File) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) HiveMetaException(org.apache.hadoop.hive.metastore.HiveMetaException) SQLException(java.sql.SQLException) IOException(java.io.IOException) ParseException(org.apache.commons.cli.ParseException)

Example 3 with DatabaseMetaData

use of java.sql.DatabaseMetaData in project hive by apache.

the class cbo_rp_TestJdbcDriver2 method setUpBeforeClass.

@BeforeClass
public static void setUpBeforeClass() throws SQLException, ClassNotFoundException {
    Class.forName(driverName);
    Connection con1 = getConnection("default");
    System.setProperty(ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LEVEL.varname, "verbose");
    Statement stmt1 = con1.createStatement();
    assertNotNull("Statement is null", stmt1);
    stmt1.execute("set hive.support.concurrency = false");
    DatabaseMetaData metadata = con1.getMetaData();
    // Drop databases created by other test cases
    ResultSet databaseRes = metadata.getSchemas();
    while (databaseRes.next()) {
        String db = databaseRes.getString(1);
        if (!db.equals("default")) {
            System.err.println("Dropping database " + db);
            stmt1.execute("DROP DATABASE " + db + " CASCADE");
        }
    }
    stmt1.close();
    con1.close();
}
Also used : PreparedStatement(java.sql.PreparedStatement) Statement(java.sql.Statement) Connection(java.sql.Connection) ResultSet(java.sql.ResultSet) DatabaseMetaData(java.sql.DatabaseMetaData) BeforeClass(org.junit.BeforeClass)

Example 4 with DatabaseMetaData

use of java.sql.DatabaseMetaData in project hive by apache.

the class cbo_rp_TestJdbcDriver2 method testImportedKeys.

/**
   * test getImportedKeys()
   * @throws SQLException
   */
@Test
public void testImportedKeys() throws SQLException {
    DatabaseMetaData dbmd = con.getMetaData();
    assertNotNull(dbmd);
    // currently getImportedKeys always returns an empty resultset for Hive
    ResultSet res = dbmd.getImportedKeys(null, null, null);
    ResultSetMetaData md = res.getMetaData();
    assertEquals(md.getColumnCount(), 14);
    assertFalse(res.next());
}
Also used : ResultSetMetaData(java.sql.ResultSetMetaData) ResultSet(java.sql.ResultSet) DatabaseMetaData(java.sql.DatabaseMetaData) Test(org.junit.Test)

Example 5 with DatabaseMetaData

use of java.sql.DatabaseMetaData in project hive by apache.

the class TestJdbcMetadataApiAuth method testMetaApiDisAllowed.

/**
   * Call the HS2 metadata api's with authorizer disallowing those calls
   * @throws Exception
   */
@Test
public void testMetaApiDisAllowed() throws Exception {
    TestAuthValidator.allowActions = false;
    Connection hs2Conn = getConnection("user1");
    DatabaseMetaData dbmetadata = hs2Conn.getMetaData();
    try {
        dbmetadata.getCatalogs();
        fail("HiveAccessControlException expected");
    } catch (SQLException e) {
        assertErrorContains(e, TestAuthValidator.DENIED_ERR);
    } catch (Exception e) {
        fail("HiveAccessControlException expected");
    }
    try {
        dbmetadata.getSchemas();
        fail("HiveAccessControlException expected");
    } catch (SQLException e) {
        assertErrorContains(e, TestAuthValidator.DENIED_ERR);
    } catch (Exception e) {
        fail("HiveAccessControlException expected");
    }
    try {
        dbmetadata.getTypeInfo();
        fail("HiveAccessControlException expected");
    } catch (SQLException e) {
        assertErrorContains(e, TestAuthValidator.DENIED_ERR);
    } catch (Exception e) {
        fail("HiveAccessControlException expected");
    }
    try {
        dbmetadata.getTables(null, "default", "t%", null);
        fail("HiveAccessControlException expected");
    } catch (SQLException e) {
        assertErrorContains(e, TestAuthValidator.DENIED_ERR);
    } catch (Exception e) {
        fail("HiveAccessControlException expected");
    }
    try {
        dbmetadata.getTableTypes();
        fail("HiveAccessControlException expected");
    } catch (SQLException e) {
        assertErrorContains(e, TestAuthValidator.DENIED_ERR);
    } catch (Exception e) {
        fail("HiveAccessControlException expected");
    }
    try {
        dbmetadata.getColumns(null, "default", "nosuchtable", null);
        fail("HiveAccessControlException expected");
    } catch (SQLException e) {
        assertErrorContains(e, TestAuthValidator.DENIED_ERR);
    } catch (Exception e) {
        fail("HiveAccessControlException expected");
    }
    try {
        dbmetadata.getFunctions(null, null, "trim");
        fail("HiveAccessControlException expected");
    } catch (SQLException e) {
        assertErrorContains(e, TestAuthValidator.DENIED_ERR);
    } catch (Exception e) {
        fail("HiveAccessControlException expected");
    }
}
Also used : SQLException(java.sql.SQLException) Connection(java.sql.Connection) DatabaseMetaData(java.sql.DatabaseMetaData) SQLException(java.sql.SQLException) HiveAccessControlException(org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException) HiveAuthzPluginException(org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException) Test(org.junit.Test)

Aggregations

DatabaseMetaData (java.sql.DatabaseMetaData)299 ResultSet (java.sql.ResultSet)176 Connection (java.sql.Connection)139 SQLException (java.sql.SQLException)116 Test (org.junit.Test)81 ResultSetMetaData (java.sql.ResultSetMetaData)41 Statement (java.sql.Statement)36 ArrayList (java.util.ArrayList)33 PreparedStatement (java.sql.PreparedStatement)29 Properties (java.util.Properties)24 PhoenixDatabaseMetaData (org.apache.phoenix.jdbc.PhoenixDatabaseMetaData)16 PhoenixConnection (org.apache.phoenix.jdbc.PhoenixConnection)15 IOException (java.io.IOException)14 HashMap (java.util.HashMap)11 DataSource (javax.sql.DataSource)9 HashSet (java.util.HashSet)8 List (java.util.List)8 Savepoint (java.sql.Savepoint)6 GargoyleException (com.kyj.fx.voeditor.visual.exceptions.GargoyleException)5 TreeSet (java.util.TreeSet)5