use of java.sql.DatabaseMetaData in project hadoop by apache.
the class DBInputFormat method setConf.
/** {@inheritDoc} */
public void setConf(Configuration conf) {
dbConf = new DBConfiguration(conf);
try {
this.connection = createConnection();
DatabaseMetaData dbMeta = connection.getMetaData();
this.dbProductName = StringUtils.toUpperCase(dbMeta.getDatabaseProductName());
} catch (Exception ex) {
throw new RuntimeException(ex);
}
tableName = dbConf.getInputTableName();
fieldNames = dbConf.getInputFieldNames();
conditions = dbConf.getInputConditions();
}
use of java.sql.DatabaseMetaData in project hive by apache.
the class HiveSchemaTool method validateSchemaTables.
boolean validateSchemaTables(Connection conn) throws HiveMetaException {
String version = null;
ResultSet rs = null;
DatabaseMetaData metadata = null;
List<String> dbTables = new ArrayList<String>();
List<String> schemaTables = new ArrayList<String>();
List<String> subScripts = new ArrayList<String>();
Connection hmsConn = getConnectionToMetastore(false);
System.out.println("Validating metastore schema tables");
try {
version = getMetaStoreSchemaVersion(hmsConn);
} catch (HiveMetaException he) {
System.err.println("Failed to determine schema version from Hive Metastore DB," + he.getMessage());
LOG.debug("Failed to determine schema version from Hive Metastore DB," + he.getMessage());
return false;
}
// re-open the hms connection
hmsConn = getConnectionToMetastore(false);
LOG.debug("Validating tables in the schema for version " + version);
try {
metadata = conn.getMetaData();
String[] types = { "TABLE" };
rs = metadata.getTables(null, null, "%", types);
String table = null;
while (rs.next()) {
table = rs.getString("TABLE_NAME");
dbTables.add(table.toLowerCase());
LOG.debug("Found table " + table + " in HMS dbstore");
}
} catch (SQLException e) {
throw new HiveMetaException("Failed to retrieve schema tables from Hive Metastore DB," + e.getMessage());
} finally {
if (rs != null) {
try {
rs.close();
} catch (SQLException e) {
throw new HiveMetaException("Failed to close resultset", e);
}
}
}
// parse the schema file to determine the tables that are expected to exist
// we are using oracle schema because it is simpler to parse, no quotes or backticks etc
String baseDir = new File(metaStoreSchemaInfo.getMetaStoreScriptDir()).getParent();
String schemaFile = baseDir + "/" + dbType + "/hive-schema-" + version + "." + dbType + ".sql";
try {
LOG.debug("Parsing schema script " + schemaFile);
subScripts.addAll(findCreateTable(schemaFile, schemaTables));
while (subScripts.size() > 0) {
schemaFile = baseDir + "/" + dbType + "/" + subScripts.remove(0);
LOG.debug("Parsing subscript " + schemaFile);
subScripts.addAll(findCreateTable(schemaFile, schemaTables));
}
} catch (Exception e) {
System.err.println("Exception in parsing schema file. Cause:" + e.getMessage());
System.out.println("Schema table validation failed!!!");
return false;
}
LOG.debug("Schema tables:[ " + Arrays.toString(schemaTables.toArray()) + " ]");
LOG.debug("DB tables:[ " + Arrays.toString(dbTables.toArray()) + " ]");
// now diff the lists
int schemaSize = schemaTables.size();
schemaTables.removeAll(dbTables);
if (schemaTables.size() > 0) {
System.out.println("Table(s) [ " + Arrays.toString(schemaTables.toArray()) + " ] are missing from the metastore database schema.");
System.out.println("Schema table validation failed!!!");
return false;
} else {
System.out.println("Succeeded in schema table validation.");
return true;
}
}
use of java.sql.DatabaseMetaData in project hive by apache.
the class cbo_rp_TestJdbcDriver2 method setUpBeforeClass.
@BeforeClass
public static void setUpBeforeClass() throws SQLException, ClassNotFoundException {
Class.forName(driverName);
Connection con1 = getConnection("default");
System.setProperty(ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LEVEL.varname, "verbose");
Statement stmt1 = con1.createStatement();
assertNotNull("Statement is null", stmt1);
stmt1.execute("set hive.support.concurrency = false");
DatabaseMetaData metadata = con1.getMetaData();
// Drop databases created by other test cases
ResultSet databaseRes = metadata.getSchemas();
while (databaseRes.next()) {
String db = databaseRes.getString(1);
if (!db.equals("default")) {
System.err.println("Dropping database " + db);
stmt1.execute("DROP DATABASE " + db + " CASCADE");
}
}
stmt1.close();
con1.close();
}
use of java.sql.DatabaseMetaData in project hive by apache.
the class cbo_rp_TestJdbcDriver2 method testImportedKeys.
/**
* test getImportedKeys()
* @throws SQLException
*/
@Test
public void testImportedKeys() throws SQLException {
DatabaseMetaData dbmd = con.getMetaData();
assertNotNull(dbmd);
// currently getImportedKeys always returns an empty resultset for Hive
ResultSet res = dbmd.getImportedKeys(null, null, null);
ResultSetMetaData md = res.getMetaData();
assertEquals(md.getColumnCount(), 14);
assertFalse(res.next());
}
use of java.sql.DatabaseMetaData in project hive by apache.
the class TestJdbcMetadataApiAuth method testMetaApiDisAllowed.
/**
* Call the HS2 metadata api's with authorizer disallowing those calls
* @throws Exception
*/
@Test
public void testMetaApiDisAllowed() throws Exception {
TestAuthValidator.allowActions = false;
Connection hs2Conn = getConnection("user1");
DatabaseMetaData dbmetadata = hs2Conn.getMetaData();
try {
dbmetadata.getCatalogs();
fail("HiveAccessControlException expected");
} catch (SQLException e) {
assertErrorContains(e, TestAuthValidator.DENIED_ERR);
} catch (Exception e) {
fail("HiveAccessControlException expected");
}
try {
dbmetadata.getSchemas();
fail("HiveAccessControlException expected");
} catch (SQLException e) {
assertErrorContains(e, TestAuthValidator.DENIED_ERR);
} catch (Exception e) {
fail("HiveAccessControlException expected");
}
try {
dbmetadata.getTypeInfo();
fail("HiveAccessControlException expected");
} catch (SQLException e) {
assertErrorContains(e, TestAuthValidator.DENIED_ERR);
} catch (Exception e) {
fail("HiveAccessControlException expected");
}
try {
dbmetadata.getTables(null, "default", "t%", null);
fail("HiveAccessControlException expected");
} catch (SQLException e) {
assertErrorContains(e, TestAuthValidator.DENIED_ERR);
} catch (Exception e) {
fail("HiveAccessControlException expected");
}
try {
dbmetadata.getTableTypes();
fail("HiveAccessControlException expected");
} catch (SQLException e) {
assertErrorContains(e, TestAuthValidator.DENIED_ERR);
} catch (Exception e) {
fail("HiveAccessControlException expected");
}
try {
dbmetadata.getColumns(null, "default", "nosuchtable", null);
fail("HiveAccessControlException expected");
} catch (SQLException e) {
assertErrorContains(e, TestAuthValidator.DENIED_ERR);
} catch (Exception e) {
fail("HiveAccessControlException expected");
}
try {
dbmetadata.getFunctions(null, null, "trim");
fail("HiveAccessControlException expected");
} catch (SQLException e) {
assertErrorContains(e, TestAuthValidator.DENIED_ERR);
} catch (Exception e) {
fail("HiveAccessControlException expected");
}
}
Aggregations