use of java.sql.Connection in project groovy by apache.
the class Sql method execute.
/**
* Executes the given piece of SQL with parameters.
* Also calls the provided processResults Closure to process any ResultSet or UpdateCount results that executing the SQL might produce.
* <p>
* This method supports named and named ordinal parameters.
* See the class Javadoc for more details.
* <p>
* Resource handling is performed automatically where appropriate.
*
* @param sql the SQL statement
* @param params a list of parameters
* @param processResults a Closure which will be passed two parameters: either {@code true} plus a list of GroovyRowResult values
* derived from {@code statement.getResultSet()} or {@code false} plus the update count from {@code statement.getUpdateCount()}.
* The closure will be called for each result produced from executing the SQL.
* @throws SQLException if a database access error occurs
* @see #execute(String, Closure)
* @since 2.3.2
*/
public void execute(String sql, List<Object> params, Closure processResults) throws SQLException {
Connection connection = createConnection();
PreparedStatement statement = null;
try {
statement = getPreparedStatement(connection, sql, params);
boolean isResultSet = statement.execute();
int updateCount = statement.getUpdateCount();
while (isResultSet || updateCount != -1) {
if (processResults.getMaximumNumberOfParameters() != 2) {
throw new SQLException("Incorrect number of parameters for processResults Closure");
}
if (isResultSet) {
ResultSet resultSet = statement.getResultSet();
List<GroovyRowResult> rowResult = resultSet == null ? null : asList(sql, resultSet);
processResults.call(isResultSet, rowResult);
} else {
processResults.call(isResultSet, updateCount);
}
isResultSet = statement.getMoreResults();
updateCount = statement.getUpdateCount();
}
} catch (SQLException e) {
LOG.warning("Failed to execute: " + sql + " because: " + e.getMessage());
throw e;
} finally {
closeResources(connection, statement);
}
}
use of java.sql.Connection in project hadoop by apache.
the class TestDbClasses method testOracleDBRecordReader.
/**
* test generate sql script for OracleDBRecordReader.
*/
@Test(timeout = 20000)
public void testOracleDBRecordReader() throws Exception {
DBInputSplit splitter = new DBInputSplit(1, 10);
Configuration configuration = new Configuration();
Connection connect = DriverForTest.getConnection();
DBConfiguration dbConfiguration = new DBConfiguration(configuration);
dbConfiguration.setInputOrderBy("Order");
String[] fields = { "f1", "f2" };
OracleDBRecordReader<NullDBWritable> recorder = new OracleDBRecordReader<NullDBWritable>(splitter, NullDBWritable.class, configuration, connect, dbConfiguration, "condition", fields, "table");
assertEquals("SELECT * FROM (SELECT a.*,ROWNUM dbif_rno FROM ( SELECT f1, f2 FROM table WHERE condition ORDER BY Order ) a WHERE rownum <= 10 ) WHERE dbif_rno > 1", recorder.getSelectQuery());
}
use of java.sql.Connection in project hive by apache.
the class Commands method getConfInternal.
/**
* Use call statement to retrieve the configurations for substitution and sql for the substitution.
*
* @param call
* @return
*/
private BufferedRows getConfInternal(boolean call) {
Statement stmnt = null;
BufferedRows rows = null;
try {
boolean hasResults = false;
DatabaseConnection dbconn = beeLine.getDatabaseConnection();
Connection conn = null;
if (dbconn != null)
conn = dbconn.getConnection();
if (conn != null) {
if (call) {
stmnt = conn.prepareCall("set");
hasResults = ((CallableStatement) stmnt).execute();
} else {
stmnt = beeLine.createStatement();
hasResults = stmnt.execute("set");
}
}
if (hasResults) {
ResultSet rs = stmnt.getResultSet();
rows = new BufferedRows(beeLine, rs);
}
} catch (SQLException e) {
beeLine.error(e);
} finally {
if (stmnt != null) {
try {
stmnt.close();
} catch (SQLException e1) {
beeLine.error(e1);
}
}
}
return rows;
}
use of java.sql.Connection in project hive by apache.
the class HiveSchemaTool method showInfo.
/***
* Print Hive version and schema version
* @throws MetaException
*/
public void showInfo() throws HiveMetaException {
Connection metastoreConn = getConnectionToMetastore(true);
String hiveVersion = MetaStoreSchemaInfo.getHiveSchemaVersion();
String dbVersion = getMetaStoreSchemaVersion(metastoreConn);
System.out.println("Hive distribution version:\t " + hiveVersion);
System.out.println("Metastore schema version:\t " + dbVersion);
assertCompatibleVersion(hiveVersion, dbVersion);
}
use of java.sql.Connection in project hive by apache.
the class HiveSchemaTool method doValidate.
public void doValidate() throws HiveMetaException {
System.out.println("Starting metastore validation\n");
Connection conn = getConnectionToMetastore(false);
boolean success = true;
try {
if (validateSchemaVersions(conn)) {
System.out.println("[SUCCESS]\n");
} else {
success = false;
System.out.println("[FAIL]\n");
}
if (validateSequences(conn)) {
System.out.println("[SUCCESS]\n");
} else {
success = false;
System.out.println("[FAIL]\n");
}
if (validateSchemaTables(conn)) {
System.out.println("[SUCCESS]\n");
} else {
success = false;
System.out.println("[FAIL]\n");
}
if (validateLocations(conn, this.validationServers)) {
System.out.println("[SUCCESS]\n");
} else {
success = false;
System.out.println("[FAIL]\n");
}
if (validateColumnNullValues(conn)) {
System.out.println("[SUCCESS]\n");
} else {
success = false;
System.out.println("[FAIL]\n");
}
} finally {
if (conn != null) {
try {
conn.close();
} catch (SQLException e) {
throw new HiveMetaException("Failed to close metastore connection", e);
}
}
}
System.out.print("Done with metastore validation: ");
if (!success) {
System.out.println("[FAIL]");
System.exit(1);
} else {
System.out.println("[SUCCESS]");
}
}
Aggregations