use of java.sql.SQLTimeoutException in project tomcat by apache.
the class TestValidationQueryTimeout method testLongValidationQueryTime.
@Test
public void testLongValidationQueryTime() throws Exception {
// use our mock driver
Connection con = this.datasource.getConnection();
Statement stmt = null;
long start = 0, end = 0;
try {
stmt = con.createStatement();
// set the query timeout to 2 sec
// this keeps this test from slowing things down too much
stmt.setQueryTimeout(2);
// assert that our long query takes longer than one second to run
// this is a requirement for other tests to run properly
start = System.currentTimeMillis();
stmt.execute(longQuery);
} catch (SQLTimeoutException ex) {
} catch (SQLException x) {
Assert.fail("We should have got a timeout exception.");
} finally {
end = System.currentTimeMillis();
if (stmt != null) {
stmt.close();
}
if (con != null) {
con.close();
}
Assert.assertTrue(start != 0 && end != 0);
// we're faking it
// Assert.assertTrue((end - start) > 1000);
}
}
use of java.sql.SQLTimeoutException in project ignite by apache.
the class JdbcThinStatement method sendFile.
/**
* Sends a file to server in batches via multiple {@link JdbcBulkLoadBatchRequest}s.
*
* @param cmdRes Result of invoking COPY command: contains server-parsed
* bulk load parameters, such as file name and batch size.
* @return Bulk load result.
* @throws SQLException On error.
*/
private JdbcResult sendFile(JdbcBulkLoadAckResult cmdRes, JdbcThinTcpIo stickyIO) throws SQLException {
String fileName = cmdRes.params().localFileName();
int batchSize = cmdRes.params().packetSize();
int batchNum = 0;
try {
try (InputStream input = new BufferedInputStream(new FileInputStream(fileName))) {
byte[] buf = new byte[batchSize];
int readBytes;
int timeSpendMillis = 0;
while ((readBytes = input.read(buf)) != -1) {
long startTime = System.currentTimeMillis();
if (readBytes == 0)
continue;
if (reqTimeout != JdbcThinConnection.NO_TIMEOUT)
reqTimeout -= timeSpendMillis;
JdbcResult res = conn.sendRequest(new JdbcBulkLoadBatchRequest(cmdRes.cursorId(), batchNum++, JdbcBulkLoadBatchRequest.CMD_CONTINUE, readBytes == buf.length ? buf : Arrays.copyOf(buf, readBytes)), this, stickyIO).response();
if (!(res instanceof JdbcQueryExecuteResult))
throw new SQLException("Unknown response sent by the server: " + res);
timeSpendMillis = (int) (System.currentTimeMillis() - startTime);
}
if (reqTimeout != JdbcThinConnection.NO_TIMEOUT)
reqTimeout -= timeSpendMillis;
return conn.sendRequest(new JdbcBulkLoadBatchRequest(cmdRes.cursorId(), batchNum++, JdbcBulkLoadBatchRequest.CMD_FINISHED_EOF), this, stickyIO).response();
}
} catch (Exception e) {
if (e instanceof SQLTimeoutException)
throw (SQLTimeoutException) e;
try {
conn.sendRequest(new JdbcBulkLoadBatchRequest(cmdRes.cursorId(), batchNum, JdbcBulkLoadBatchRequest.CMD_FINISHED_ERROR), this, stickyIO);
} catch (SQLException e1) {
throw new SQLException("Cannot send finalization request: " + e1.getMessage(), e);
}
if (e instanceof SQLException)
throw (SQLException) e;
else
throw new SQLException("Failed to read file: '" + fileName + "'", SqlStateCode.INTERNAL_ERROR, e);
}
}
use of java.sql.SQLTimeoutException in project ignite by apache.
the class JdbcThinConnection method sendRequest.
/**
* Send request for execution via corresponding singleIo from {@link #ios} or sticky singleIo.
*
* @param req Request.
* @param stmt Jdbc thin statement.
* @param stickyIo Sticky ignite endpoint.
* @return Server response.
* @throws SQLException On any error.
*/
JdbcResultWithIo sendRequest(JdbcRequest req, JdbcThinStatement stmt, @Nullable JdbcThinTcpIo stickyIo) throws SQLException {
RequestTimeoutTask reqTimeoutTask = null;
acquireMutex();
try {
int retryAttemptsLeft = 1;
Exception lastE = null;
while (retryAttemptsLeft > 0) {
JdbcThinTcpIo cliIo = null;
ensureConnected();
try {
cliIo = (stickyIo == null || !stickyIo.connected()) ? cliIo(calculateNodeIds(req)) : stickyIo;
if (stmt != null && stmt.requestTimeout() != NO_TIMEOUT) {
reqTimeoutTask = new RequestTimeoutTask(req instanceof JdbcBulkLoadBatchRequest ? stmt.currentRequestId() : req.requestId(), cliIo, stmt.requestTimeout());
qryTimeoutScheduledFut = maintenanceExecutor.scheduleAtFixedRate(reqTimeoutTask, 0, REQUEST_TIMEOUT_PERIOD, TimeUnit.MILLISECONDS);
}
JdbcQueryExecuteRequest qryReq = null;
if (req instanceof JdbcQueryExecuteRequest)
qryReq = (JdbcQueryExecuteRequest) req;
JdbcResponse res = cliIo.sendRequest(req, stmt);
txIo = res.activeTransaction() ? cliIo : null;
if (res.status() == IgniteQueryErrorCode.QUERY_CANCELED && stmt != null && stmt.requestTimeout() != NO_TIMEOUT && reqTimeoutTask != null && reqTimeoutTask.expired.get()) {
int qryTimeout = stmt.getQueryTimeout();
throw new SQLTimeoutException(getTimeoutDescription(qryTimeout, cliIo), SqlStateCode.QUERY_CANCELLED, IgniteQueryErrorCode.QUERY_CANCELED);
} else if (res.status() != ClientListenerResponse.STATUS_SUCCESS)
throw new SQLException(res.error(), IgniteQueryErrorCode.codeToSqlState(res.status()), res.status());
updateAffinityCache(qryReq, res);
return new JdbcResultWithIo(res.response(), cliIo);
} catch (SQLException e) {
if (LOG.isLoggable(Level.FINE))
LOG.log(Level.FINE, "Exception during sending an sql request.", e);
throw e;
} catch (Exception e) {
if (LOG.isLoggable(Level.FINE))
LOG.log(Level.FINE, "Exception during sending an sql request.", e);
// for the first time and should skip it during next processing
if (cliIo != null && cliIo.connected())
onDisconnect(cliIo);
if (e instanceof SocketTimeoutException)
throw new SQLException("Connection timed out.", CONNECTION_FAILURE, e);
else {
if (lastE == null) {
retryAttemptsLeft = calculateRetryAttemptsCount(stickyIo, req);
lastE = e;
} else
retryAttemptsLeft--;
}
}
}
throw new SQLException("Failed to communicate with Ignite cluster.", CONNECTION_FAILURE, lastE);
} finally {
if (stmt != null && stmt.requestTimeout() != NO_TIMEOUT && reqTimeoutTask != null)
qryTimeoutScheduledFut.cancel(false);
releaseMutex();
}
}
use of java.sql.SQLTimeoutException in project vibur-dbcp by vibur.
the class PoolOperations method createSQLException.
private SQLException createSQLException(long takenNanos) {
String poolName = getPoolName(dataSource);
if (poolService.isTerminated()) {
return new SQLException(format("Pool %s, the poolService is terminated.", poolName), SQLSTATE_POOL_CLOSED_ERROR);
}
Hook.GetConnectionTimeout[] onTimeout = ((ConnHooksAccessor) dataSource.getConnHooks()).onTimeout();
// someone else has interrupted us, so we do not clear the flag
boolean isInterrupted = Thread.currentThread().isInterrupted();
if (!isInterrupted && onTimeout.length > 0) {
TakenConnection[] takenConnections = dataSource.getTakenConnections();
for (Hook.GetConnectionTimeout hook : onTimeout) {
hook.on(takenConnections, takenNanos);
}
}
double takenMs = takenNanos * 0.000_001;
int intTakenMs = (int) Math.round(takenMs);
return !isInterrupted ? new SQLTimeoutException(format("Pool %s, couldn't obtain SQL connection within %.3f ms.", poolName, takenMs), SQLSTATE_TIMEOUT_ERROR, intTakenMs) : new SQLException(format("Pool %s, interrupted while getting SQL connection, waited for %.3f ms.", poolName, takenMs), SQLSTATE_INTERRUPTED_ERROR, intTakenMs);
}
use of java.sql.SQLTimeoutException in project ranger by apache.
the class PrestoClient method getColumns.
private List<String> getColumns(String needle, List<String> catalogs, List<String> schemas, List<String> tables, List<String> columns) throws HadoopException {
List<String> ret = new ArrayList<>();
if (con != null) {
String regex = null;
ResultSet rs = null;
String sql = null;
Statement stat = null;
if (needle != null && !needle.isEmpty()) {
regex = needle;
}
if (catalogs != null && !catalogs.isEmpty() && schemas != null && !schemas.isEmpty() && tables != null && !tables.isEmpty()) {
try {
for (String catalog : catalogs) {
for (String schema : schemas) {
for (String table : tables) {
sql = "SHOW COLUMNS FROM \"" + StringEscapeUtils.escapeSql(catalog) + "\"." + "\"" + StringEscapeUtils.escapeSql(schema) + "\"." + "\"" + StringEscapeUtils.escapeSql(table) + "\"";
try {
stat = con.createStatement();
rs = stat.executeQuery(sql);
while (rs.next()) {
String column = rs.getString(1);
if (columns != null && columns.contains(column)) {
continue;
}
if (regex == null) {
ret.add(column);
} else if (FilenameUtils.wildcardMatch(column, regex)) {
ret.add(column);
}
}
} finally {
close(rs);
close(stat);
stat = null;
rs = null;
}
}
}
}
} catch (SQLTimeoutException sqlt) {
String msgDesc = "Time Out, Unable to execute SQL [" + sql + "].";
HadoopException hdpException = new HadoopException(msgDesc, sqlt);
hdpException.generateResponseDataMap(false, getMessage(sqlt), msgDesc + ERR_MSG, null, null);
if (LOG.isDebugEnabled()) {
LOG.debug("<== PrestoClient.getColumns() Error : ", sqlt);
}
throw hdpException;
} catch (SQLException sqle) {
String msgDesc = "Unable to execute SQL [" + sql + "].";
HadoopException hdpException = new HadoopException(msgDesc, sqle);
hdpException.generateResponseDataMap(false, getMessage(sqle), msgDesc + ERR_MSG, null, null);
if (LOG.isDebugEnabled()) {
LOG.debug("<== PrestoClient.getColumns() Error : ", sqle);
}
throw hdpException;
}
}
}
return ret;
}
Aggregations