use of org.apache.hadoop.io.retry.RetryPolicy.RetryAction in project hadoop by apache.
the class TestWebHDFS method testReadRetryExceptionHelper.
private void testReadRetryExceptionHelper(WebHdfsFileSystem fs, Path fn, final IOException ex, String msg, boolean shouldAttemptRetry, int numTimesTried) throws Exception {
// Ovverride WebHdfsInputStream#getInputStream so that it returns
// an input stream that throws the specified exception when read
// is called.
FSDataInputStream in = fs.open(fn);
// Connection is made only when the first read() occurs.
in.read();
final WebHdfsInputStream webIn = (WebHdfsInputStream) (in.getWrappedStream());
final InputStream spyInputStream = spy(webIn.getReadRunner().getInputStream());
doThrow(ex).when(spyInputStream).read((byte[]) any(), anyInt(), anyInt());
final WebHdfsFileSystem.ReadRunner rr = spy(webIn.getReadRunner());
doReturn(spyInputStream).when(rr).initializeInputStream((HttpURLConnection) any());
rr.setInputStream(spyInputStream);
webIn.setReadRunner(rr);
// Override filesystem's retry policy in order to verify that
// WebHdfsInputStream is calling shouldRetry for the appropriate
// exceptions.
final RetryAction retryAction = new RetryAction(RetryDecision.RETRY);
final RetryAction failAction = new RetryAction(RetryDecision.FAIL);
RetryPolicy rp = new RetryPolicy() {
@Override
public RetryAction shouldRetry(Exception e, int retries, int failovers, boolean isIdempotentOrAtMostOnce) throws Exception {
attemptedRetry = true;
if (retries > 3) {
return failAction;
} else {
return retryAction;
}
}
};
fs.setRetryPolicy(rp);
// If the retry logic is exercised, attemptedRetry will be true. Some
// exceptions should exercise the retry logic and others should not.
// Either way, the value of attemptedRetry should match shouldAttemptRetry.
attemptedRetry = false;
try {
webIn.read();
fail(msg + ": Read should have thrown exception.");
} catch (Exception e) {
assertTrue(e.getMessage().contains(msg));
}
assertEquals(msg + ": Read should " + (shouldAttemptRetry ? "" : "not ") + "have called shouldRetry. ", attemptedRetry, shouldAttemptRetry);
verify(rr, times(numTimesTried)).getResponse((HttpURLConnection) any());
webIn.close();
in.close();
}
use of org.apache.hadoop.io.retry.RetryPolicy.RetryAction in project hadoop by apache.
the class RetriableCommand method execute.
/**
* The execute() method invokes doExecute() until either:
* 1. doExecute() succeeds, or
* 2. the command may no longer be retried (e.g. runs out of retry-attempts).
* @param arguments The list of arguments for the command.
* @return Generic "Object" from doExecute(), on success.
* @throws Exception
*/
public Object execute(Object... arguments) throws Exception {
Exception latestException;
int counter = 0;
while (true) {
try {
return doExecute(arguments);
} catch (Exception exception) {
LOG.error("Failure in Retriable command: " + description, exception);
latestException = exception;
}
counter++;
RetryAction action = retryPolicy.shouldRetry(latestException, counter, 0, true);
if (action.action == RetryPolicy.RetryAction.RetryDecision.RETRY) {
ThreadUtil.sleepAtLeastIgnoreInterrupts(action.delayMillis);
} else {
break;
}
}
throw new IOException("Couldn't run retriable-command: " + description, latestException);
}
Aggregations