use of java.sql.SQLTimeoutException in project cerberus-source by cerberustesting.
the class SQLService method queryDatabase.
@Override
public List<String> queryDatabase(String connectionName, String sql, int limit, int defaultTimeOut) throws CerberusEventException {
List<String> list = null;
boolean throwEx = false;
int maxSecurityFetch = 100;
int nbFetch = 0;
MessageEvent msg = new MessageEvent(MessageEventEnum.PROPERTY_FAILED_SQL_GENERIC);
msg.setDescription(msg.getDescription().replace("%JDBC%", "jdbc/" + connectionName));
try (Connection connection = this.databaseSpring.connect(connectionName);
PreparedStatement preStat = connection.prepareStatement(sql)) {
preStat.setQueryTimeout(defaultTimeOut);
if (limit > 0 && limit < maxSecurityFetch) {
preStat.setMaxRows(limit);
} else {
preStat.setMaxRows(maxSecurityFetch);
}
/*
ORACLE => * WHERE ROWNUM <= limit *
DB2 => * FETCH FIRST limit ROWS ONLY
MYSQL => * LIMIT 0, limit
SQL SERVER => SELECT TOP limit *
SYBASE => SET ROWCOUNT limit *
if (limit > 0) {
sql.concat(Util.DbLimit(databaseType, limit));
}
*/
try {
LOG.info("Sending to external Database (queryDatabase) : '" + connectionName + "' SQL '" + sql + "'");
ResultSet resultSet = preStat.executeQuery();
list = new ArrayList<String>();
try {
while ((resultSet.next()) && (nbFetch < maxSecurityFetch)) {
list.add(resultSet.getString(1));
nbFetch++;
}
} catch (SQLException exception) {
LOG.warn("Unable to execute query : " + exception.toString());
} finally {
resultSet.close();
}
} catch (SQLTimeoutException exception) {
msg = new MessageEvent(MessageEventEnum.PROPERTY_FAILED_SQL_TIMEOUT);
msg.setDescription(msg.getDescription().replace("%SQL%", sql));
msg.setDescription(msg.getDescription().replace("%TIMEOUT%", String.valueOf(defaultTimeOut)));
msg.setDescription(msg.getDescription().replace("%EX%", exception.toString()));
} catch (SQLException exception) {
LOG.warn(exception.toString());
msg = new MessageEvent(MessageEventEnum.PROPERTY_FAILED_SQL_ERROR);
msg.setDescription(msg.getDescription().replace("%SQL%", sql));
msg.setDescription(msg.getDescription().replace("%EX%", exception.toString()));
throwEx = true;
} finally {
preStat.close();
}
} catch (SQLException exception) {
LOG.warn(exception.toString());
msg = new MessageEvent(MessageEventEnum.PROPERTY_FAILED_SQL_ERROR);
msg.setDescription(msg.getDescription().replace("%SQL%", sql));
msg.setDescription(msg.getDescription().replace("%EX%", exception.toString()));
throwEx = true;
} catch (NullPointerException exception) {
// TODO check where exception occur
LOG.warn(exception.toString());
msg = new MessageEvent(MessageEventEnum.PROPERTY_FAILED_SQL_CANNOTACCESSJDBC);
msg.setDescription(msg.getDescription().replace("%JDBC%", "jdbc/" + connectionName));
msg.setDescription(msg.getDescription().replace("%EX%", exception.toString()));
throwEx = true;
}
if (throwEx) {
throw new CerberusEventException(msg);
}
return list;
}
use of java.sql.SQLTimeoutException in project hive by apache.
the class HiveStatement method waitForOperationToComplete.
TGetOperationStatusResp waitForOperationToComplete() throws SQLException {
TGetOperationStatusResp statusResp = null;
final TGetOperationStatusReq statusReq = new TGetOperationStatusReq(stmtHandle.get());
statusReq.setGetProgressUpdate(inPlaceUpdateStream.isPresent());
// Progress bar is completed if there is nothing to request
if (inPlaceUpdateStream.isPresent()) {
inPlaceUpdateStream.get().getEventNotifier().progressBarCompleted();
}
LOG.debug("Waiting on operation to complete: Polling operation status");
// Poll on the operation status, till the operation is complete
do {
try {
if (Thread.currentThread().isInterrupted()) {
throw new SQLException(CLIENT_POLLING_OPSTATUS_INTERRUPTED.getMsg(), CLIENT_POLLING_OPSTATUS_INTERRUPTED.getSQLState());
}
/**
* For an async SQLOperation, GetOperationStatus will use the long polling approach It will
* essentially return after the HIVE_SERVER2_LONG_POLLING_TIMEOUT (a server config) expires
*/
statusResp = client.GetOperationStatus(statusReq);
LOG.debug("Status response: {}", statusResp);
if (!isOperationComplete && inPlaceUpdateStream.isPresent()) {
inPlaceUpdateStream.get().update(statusResp.getProgressUpdateResponse());
}
Utils.verifySuccessWithInfo(statusResp.getStatus());
if (statusResp.isSetOperationState()) {
switch(statusResp.getOperationState()) {
case CLOSED_STATE:
case FINISHED_STATE:
isOperationComplete = true;
isLogBeingGenerated = false;
break;
case CANCELED_STATE:
// 01000 -> warning
final String errMsg = statusResp.getErrorMessage();
final String fullErrMsg = (errMsg == null || errMsg.isEmpty()) ? QUERY_CANCELLED_MESSAGE : QUERY_CANCELLED_MESSAGE + " " + errMsg;
throw new SQLException(fullErrMsg, "01000");
case TIMEDOUT_STATE:
throw new SQLTimeoutException("Query timed out after " + queryTimeout + " seconds");
case ERROR_STATE:
// Get the error details from the underlying exception
throw new SQLException(statusResp.getErrorMessage(), statusResp.getSqlState(), statusResp.getErrorCode());
case UKNOWN_STATE:
throw new SQLException("Unknown query", "HY000");
case INITIALIZED_STATE:
case PENDING_STATE:
case RUNNING_STATE:
break;
}
}
} catch (SQLException e) {
isLogBeingGenerated = false;
throw e;
} catch (Exception e) {
isLogBeingGenerated = false;
throw new SQLException("Failed to wait for operation to complete", "08S01", e);
}
} while (!isOperationComplete);
// set progress bar to be completed when hive query execution has completed
if (inPlaceUpdateStream.isPresent()) {
inPlaceUpdateStream.get().getEventNotifier().progressBarCompleted();
}
return statusResp;
}
use of java.sql.SQLTimeoutException in project ranger by apache.
the class HiveClient method getDBList.
private List<String> getDBList(String databaseMatching, List<String> dbList) throws HadoopException {
if (LOG.isDebugEnabled()) {
LOG.debug("==> HiveClient getDBList databaseMatching : " + databaseMatching + " ExcludedbList :" + dbList);
}
List<String> ret = new ArrayList<String>();
if (con != null) {
Statement stat = null;
ResultSet rs = null;
String sql = "show databases";
if (databaseMatching != null && !databaseMatching.isEmpty()) {
sql = sql + " like \"" + databaseMatching + "\"";
}
try {
stat = con.createStatement();
rs = stat.executeQuery(sql);
while (rs.next()) {
String dbName = rs.getString(1);
if (dbList != null && dbList.contains(dbName)) {
continue;
}
ret.add(rs.getString(1));
}
} catch (SQLTimeoutException sqlt) {
String msgDesc = "Time Out, Unable to execute SQL [" + sql + "].";
HadoopException hdpException = new HadoopException(msgDesc, sqlt);
hdpException.generateResponseDataMap(false, getMessage(sqlt), msgDesc + ERR_MSG, null, null);
if (LOG.isDebugEnabled()) {
LOG.debug("<== HiveClient.getDBList() Error : ", sqlt);
}
throw hdpException;
} catch (SQLException sqle) {
String msgDesc = "Unable to execute SQL [" + sql + "].";
HadoopException hdpException = new HadoopException(msgDesc, sqle);
hdpException.generateResponseDataMap(false, getMessage(sqle), msgDesc + ERR_MSG, null, null);
if (LOG.isDebugEnabled()) {
LOG.debug("<== HiveClient.getDBList() Error : ", sqle);
}
throw hdpException;
} finally {
close(rs);
close(stat);
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("<== HiveClient.getDBList(): " + ret);
}
return ret;
}
use of java.sql.SQLTimeoutException in project ranger by apache.
the class HiveClient method getTblList.
private List<String> getTblList(String tableNameMatching, List<String> dbList, List<String> tblList) throws HadoopException {
if (LOG.isDebugEnabled()) {
LOG.debug("==> HiveClient getTblList() tableNameMatching : " + tableNameMatching + " ExcludedbList :" + dbList + "ExcludeTableList :" + tblList);
}
List<String> ret = new ArrayList<String>();
if (con != null) {
Statement stat = null;
ResultSet rs = null;
String sql = null;
try {
if (dbList != null && !dbList.isEmpty()) {
for (String db : dbList) {
sql = "use " + db;
try {
stat = con.createStatement();
stat.execute(sql);
} finally {
close(stat);
stat = null;
}
sql = "show tables ";
if (tableNameMatching != null && !tableNameMatching.isEmpty()) {
sql = sql + " like \"" + tableNameMatching + "\"";
}
try {
stat = con.createStatement();
rs = stat.executeQuery(sql);
while (rs.next()) {
String tblName = rs.getString(1);
if (tblList != null && tblList.contains(tblName)) {
continue;
}
ret.add(tblName);
}
} finally {
close(rs);
close(stat);
rs = null;
stat = null;
}
}
}
} catch (SQLTimeoutException sqlt) {
String msgDesc = "Time Out, Unable to execute SQL [" + sql + "].";
HadoopException hdpException = new HadoopException(msgDesc, sqlt);
hdpException.generateResponseDataMap(false, getMessage(sqlt), msgDesc + ERR_MSG, null, null);
if (LOG.isDebugEnabled()) {
LOG.debug("<== HiveClient.getTblList() Error : ", sqlt);
}
throw hdpException;
} catch (SQLException sqle) {
String msgDesc = "Unable to execute SQL [" + sql + "].";
HadoopException hdpException = new HadoopException(msgDesc, sqle);
hdpException.generateResponseDataMap(false, getMessage(sqle), msgDesc + ERR_MSG, null, null);
if (LOG.isDebugEnabled()) {
LOG.debug("<== HiveClient.getTblList() Error : ", sqle);
}
throw hdpException;
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("<== HiveClient getTblList() " + ret);
}
return ret;
}
use of java.sql.SQLTimeoutException in project ranger by apache.
the class HiveClient method getClmList.
private List<String> getClmList(String columnNameMatching, List<String> dbList, List<String> tblList, List<String> colList) throws HadoopException {
if (LOG.isDebugEnabled()) {
LOG.debug("==> HiveClient.getClmList() columnNameMatching: " + columnNameMatching + " dbList :" + dbList + " tblList: " + tblList + " colList: " + colList);
}
List<String> ret = new ArrayList<String>();
if (con != null) {
String columnNameMatchingRegEx = null;
if (columnNameMatching != null && !columnNameMatching.isEmpty()) {
columnNameMatchingRegEx = columnNameMatching;
}
Statement stat = null;
ResultSet rs = null;
String sql = null;
if (dbList != null && !dbList.isEmpty() && tblList != null && !tblList.isEmpty()) {
for (String db : dbList) {
for (String tbl : tblList) {
try {
sql = "use " + db;
try {
stat = con.createStatement();
stat.execute(sql);
} finally {
close(stat);
}
sql = "describe " + tbl;
stat = con.createStatement();
rs = stat.executeQuery(sql);
while (rs.next()) {
String columnName = rs.getString(1);
if (colList != null && colList.contains(columnName)) {
continue;
}
if (columnNameMatchingRegEx == null) {
ret.add(columnName);
} else if (FilenameUtils.wildcardMatch(columnName, columnNameMatchingRegEx)) {
ret.add(columnName);
}
}
} catch (SQLTimeoutException sqlt) {
String msgDesc = "Time Out, Unable to execute SQL [" + sql + "].";
HadoopException hdpException = new HadoopException(msgDesc, sqlt);
hdpException.generateResponseDataMap(false, getMessage(sqlt), msgDesc + ERR_MSG, null, null);
if (LOG.isDebugEnabled()) {
LOG.debug("<== HiveClient.getClmList() Error : ", sqlt);
}
throw hdpException;
} catch (SQLException sqle) {
String msgDesc = "Unable to execute SQL [" + sql + "].";
HadoopException hdpException = new HadoopException(msgDesc, sqle);
hdpException.generateResponseDataMap(false, getMessage(sqle), msgDesc + ERR_MSG, null, null);
if (LOG.isDebugEnabled()) {
LOG.debug("<== HiveClient.getClmList() Error : ", sqle);
}
throw hdpException;
} finally {
close(rs);
close(stat);
}
}
}
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("<== HiveClient.getClmList() " + ret);
}
return ret;
}
Aggregations