Search in sources :

Example 1 with RetryPolicy

use of org.apache.hadoop.io.retry.RetryPolicy in project hadoop by apache.

the class TestWebHDFS method testReadRetryExceptionHelper.

private void testReadRetryExceptionHelper(WebHdfsFileSystem fs, Path fn, final IOException ex, String msg, boolean shouldAttemptRetry, int numTimesTried) throws Exception {
    // Ovverride WebHdfsInputStream#getInputStream so that it returns
    // an input stream that throws the specified exception when read
    // is called.
    FSDataInputStream in = fs.open(fn);
    // Connection is made only when the first read() occurs.
    in.read();
    final WebHdfsInputStream webIn = (WebHdfsInputStream) (in.getWrappedStream());
    final InputStream spyInputStream = spy(webIn.getReadRunner().getInputStream());
    doThrow(ex).when(spyInputStream).read((byte[]) any(), anyInt(), anyInt());
    final WebHdfsFileSystem.ReadRunner rr = spy(webIn.getReadRunner());
    doReturn(spyInputStream).when(rr).initializeInputStream((HttpURLConnection) any());
    rr.setInputStream(spyInputStream);
    webIn.setReadRunner(rr);
    // Override filesystem's retry policy in order to verify that
    // WebHdfsInputStream is calling shouldRetry for the appropriate
    // exceptions.
    final RetryAction retryAction = new RetryAction(RetryDecision.RETRY);
    final RetryAction failAction = new RetryAction(RetryDecision.FAIL);
    RetryPolicy rp = new RetryPolicy() {

        @Override
        public RetryAction shouldRetry(Exception e, int retries, int failovers, boolean isIdempotentOrAtMostOnce) throws Exception {
            attemptedRetry = true;
            if (retries > 3) {
                return failAction;
            } else {
                return retryAction;
            }
        }
    };
    fs.setRetryPolicy(rp);
    // If the retry logic is exercised, attemptedRetry will be true. Some
    // exceptions should exercise the retry logic and others should not.
    // Either way, the value of attemptedRetry should match shouldAttemptRetry.
    attemptedRetry = false;
    try {
        webIn.read();
        fail(msg + ": Read should have thrown exception.");
    } catch (Exception e) {
        assertTrue(e.getMessage().contains(msg));
    }
    assertEquals(msg + ": Read should " + (shouldAttemptRetry ? "" : "not ") + "have called shouldRetry. ", attemptedRetry, shouldAttemptRetry);
    verify(rr, times(numTimesTried)).getResponse((HttpURLConnection) any());
    webIn.close();
    in.close();
}
Also used : FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) WebHdfsInputStream(org.apache.hadoop.hdfs.web.WebHdfsFileSystem.WebHdfsInputStream) InputStream(java.io.InputStream) RetryAction(org.apache.hadoop.io.retry.RetryPolicy.RetryAction) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) WebHdfsInputStream(org.apache.hadoop.hdfs.web.WebHdfsFileSystem.WebHdfsInputStream) RetryPolicy(org.apache.hadoop.io.retry.RetryPolicy) RetriableException(org.apache.hadoop.ipc.RetriableException) SocketException(java.net.SocketException) SocketTimeoutException(java.net.SocketTimeoutException) IOException(java.io.IOException) JSONException(org.codehaus.jettison.json.JSONException) ServletException(javax.servlet.ServletException) URISyntaxException(java.net.URISyntaxException) EOFException(java.io.EOFException) RemoteException(org.apache.hadoop.ipc.RemoteException) AccessControlException(org.apache.hadoop.security.AccessControlException)

Example 2 with RetryPolicy

use of org.apache.hadoop.io.retry.RetryPolicy in project hadoop by apache.

the class TestReuseRpcConnections method testDefaultRetryPolicyReuseConnections.

@Test(timeout = 60000)
public void testDefaultRetryPolicyReuseConnections() throws Exception {
    RetryPolicy rp1 = null;
    RetryPolicy rp2 = null;
    RetryPolicy rp3 = null;
    /* test the same setting */
    rp1 = getDefaultRetryPolicy(true, "10000,2");
    rp2 = getDefaultRetryPolicy(true, "10000,2");
    verifyRetryPolicyReuseConnections(rp1, rp2, RetryPolicies.RETRY_FOREVER);
    /* test enabled and different specifications */
    rp1 = getDefaultRetryPolicy(true, "20000,3");
    rp2 = getDefaultRetryPolicy(true, "20000,3");
    rp3 = getDefaultRetryPolicy(true, "30000,4");
    verifyRetryPolicyReuseConnections(rp1, rp2, rp3);
    /* test disabled and the same specifications */
    rp1 = getDefaultRetryPolicy(false, "40000,5");
    rp2 = getDefaultRetryPolicy(false, "40000,5");
    verifyRetryPolicyReuseConnections(rp1, rp2, RetryPolicies.RETRY_FOREVER);
    /* test disabled and different specifications */
    rp1 = getDefaultRetryPolicy(false, "50000,6");
    rp2 = getDefaultRetryPolicy(false, "60000,7");
    verifyRetryPolicyReuseConnections(rp1, rp2, RetryPolicies.RETRY_FOREVER);
    /* test different remoteExceptionToRetry */
    rp1 = getDefaultRetryPolicy(true, "70000,8", new RemoteException(RpcNoSuchMethodException.class.getName(), "no such method exception").getClassName());
    rp2 = getDefaultRetryPolicy(true, "70000,8", new RemoteException(PathIOException.class.getName(), "path IO exception").getClassName());
    verifyRetryPolicyReuseConnections(rp1, rp2, RetryPolicies.RETRY_FOREVER);
}
Also used : RetryPolicy(org.apache.hadoop.io.retry.RetryPolicy) TestConnectionRetryPolicy(org.apache.hadoop.io.retry.TestConnectionRetryPolicy) Test(org.junit.Test)

Example 3 with RetryPolicy

use of org.apache.hadoop.io.retry.RetryPolicy in project hadoop by apache.

the class NameNodeProxiesClient method createNonHAProxyWithClientProtocol.

public static ClientProtocol createNonHAProxyWithClientProtocol(InetSocketAddress address, Configuration conf, UserGroupInformation ugi, boolean withRetries, AtomicBoolean fallbackToSimpleAuth) throws IOException {
    RPC.setProtocolEngine(conf, ClientNamenodeProtocolPB.class, ProtobufRpcEngine.class);
    final RetryPolicy defaultPolicy = RetryUtils.getDefaultRetryPolicy(conf, HdfsClientConfigKeys.Retry.POLICY_ENABLED_KEY, HdfsClientConfigKeys.Retry.POLICY_ENABLED_DEFAULT, HdfsClientConfigKeys.Retry.POLICY_SPEC_KEY, HdfsClientConfigKeys.Retry.POLICY_SPEC_DEFAULT, SafeModeException.class.getName());
    final long version = RPC.getProtocolVersion(ClientNamenodeProtocolPB.class);
    ClientNamenodeProtocolPB proxy = RPC.getProtocolProxy(ClientNamenodeProtocolPB.class, version, address, ugi, conf, NetUtils.getDefaultSocketFactory(conf), org.apache.hadoop.ipc.Client.getTimeout(conf), defaultPolicy, fallbackToSimpleAuth).getProxy();
    if (withRetries) {
        // create the proxy with retries
        Map<String, RetryPolicy> methodNameToPolicyMap = new HashMap<>();
        ClientProtocol translatorProxy = new ClientNamenodeProtocolTranslatorPB(proxy);
        return (ClientProtocol) RetryProxy.create(ClientProtocol.class, new DefaultFailoverProxyProvider<>(ClientProtocol.class, translatorProxy), methodNameToPolicyMap, defaultPolicy);
    } else {
        return new ClientNamenodeProtocolTranslatorPB(proxy);
    }
}
Also used : ClientNamenodeProtocolPB(org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolPB) HashMap(java.util.HashMap) ClientNamenodeProtocolTranslatorPB(org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB) SafeModeException(org.apache.hadoop.hdfs.server.namenode.SafeModeException) DefaultFailoverProxyProvider(org.apache.hadoop.io.retry.DefaultFailoverProxyProvider) ClientProtocol(org.apache.hadoop.hdfs.protocol.ClientProtocol) RetryPolicy(org.apache.hadoop.io.retry.RetryPolicy)

Example 4 with RetryPolicy

use of org.apache.hadoop.io.retry.RetryPolicy in project hadoop by apache.

the class NativeS3FileSystem method createDefaultStore.

private static NativeFileSystemStore createDefaultStore(Configuration conf) {
    NativeFileSystemStore store = new Jets3tNativeFileSystemStore();
    RetryPolicy basePolicy = RetryPolicies.retryUpToMaximumCountWithFixedSleep(conf.getInt(S3_NATIVE_MAX_RETRIES_KEY, S3_NATIVE_MAX_RETRIES_DEFAUL), conf.getLong(S3_NATIVE_SLEEP_TIME_KEY, S3_NATIVE_SLEEP_TIME_DEFAULT), TimeUnit.SECONDS);
    Map<Class<? extends Exception>, RetryPolicy> exceptionToPolicyMap = new HashMap<Class<? extends Exception>, RetryPolicy>();
    exceptionToPolicyMap.put(IOException.class, basePolicy);
    exceptionToPolicyMap.put(S3Exception.class, basePolicy);
    RetryPolicy methodPolicy = RetryPolicies.retryByException(RetryPolicies.TRY_ONCE_THEN_FAIL, exceptionToPolicyMap);
    Map<String, RetryPolicy> methodNameToPolicyMap = new HashMap<String, RetryPolicy>();
    methodNameToPolicyMap.put("storeFile", methodPolicy);
    methodNameToPolicyMap.put("rename", methodPolicy);
    return (NativeFileSystemStore) RetryProxy.create(NativeFileSystemStore.class, store, methodNameToPolicyMap);
}
Also used : HashMap(java.util.HashMap) RetryPolicy(org.apache.hadoop.io.retry.RetryPolicy) FileAlreadyExistsException(org.apache.hadoop.fs.FileAlreadyExistsException) IOException(java.io.IOException) EOFException(java.io.EOFException) FileNotFoundException(java.io.FileNotFoundException) NoSuchAlgorithmException(java.security.NoSuchAlgorithmException)

Example 5 with RetryPolicy

use of org.apache.hadoop.io.retry.RetryPolicy in project hadoop by apache.

the class ServerProxy method createRetryPolicy.

protected static RetryPolicy createRetryPolicy(Configuration conf, String maxWaitTimeStr, long defMaxWaitTime, String connectRetryIntervalStr, long defRetryInterval) {
    long maxWaitTime = conf.getLong(maxWaitTimeStr, defMaxWaitTime);
    long retryIntervalMS = conf.getLong(connectRetryIntervalStr, defRetryInterval);
    Preconditions.checkArgument((maxWaitTime == -1 || maxWaitTime > 0), "Invalid Configuration. " + maxWaitTimeStr + " should be either" + " positive value or -1.");
    Preconditions.checkArgument(retryIntervalMS > 0, "Invalid Configuration. " + connectRetryIntervalStr + "should be a positive value.");
    RetryPolicy retryPolicy = null;
    if (maxWaitTime == -1) {
        // wait forever.
        retryPolicy = RetryPolicies.retryForeverWithFixedSleep(retryIntervalMS, TimeUnit.MILLISECONDS);
    } else {
        retryPolicy = RetryPolicies.retryUpToMaximumTimeWithFixedSleep(maxWaitTime, retryIntervalMS, TimeUnit.MILLISECONDS);
    }
    Map<Class<? extends Exception>, RetryPolicy> exceptionToPolicyMap = new HashMap<Class<? extends Exception>, RetryPolicy>();
    exceptionToPolicyMap.put(EOFException.class, retryPolicy);
    exceptionToPolicyMap.put(ConnectException.class, retryPolicy);
    exceptionToPolicyMap.put(NoRouteToHostException.class, retryPolicy);
    exceptionToPolicyMap.put(UnknownHostException.class, retryPolicy);
    exceptionToPolicyMap.put(ConnectTimeoutException.class, retryPolicy);
    exceptionToPolicyMap.put(RetriableException.class, retryPolicy);
    exceptionToPolicyMap.put(SocketException.class, retryPolicy);
    exceptionToPolicyMap.put(NMNotYetReadyException.class, retryPolicy);
    return RetryPolicies.retryByException(RetryPolicies.TRY_ONCE_THEN_FAIL, exceptionToPolicyMap);
}
Also used : HashMap(java.util.HashMap) RetryPolicy(org.apache.hadoop.io.retry.RetryPolicy) ConnectTimeoutException(org.apache.hadoop.net.ConnectTimeoutException) EOFException(java.io.EOFException) UnknownHostException(java.net.UnknownHostException) NMNotYetReadyException(org.apache.hadoop.yarn.exceptions.NMNotYetReadyException) SocketException(java.net.SocketException) RetriableException(org.apache.hadoop.ipc.RetriableException) ConnectException(java.net.ConnectException) NoRouteToHostException(java.net.NoRouteToHostException)

Aggregations

RetryPolicy (org.apache.hadoop.io.retry.RetryPolicy)14 HashMap (java.util.HashMap)5 EOFException (java.io.EOFException)4 IOException (java.io.IOException)4 SocketException (java.net.SocketException)3 RetriableException (org.apache.hadoop.ipc.RetriableException)3 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)3 Test (org.junit.Test)3 ConnectException (java.net.ConnectException)2 NoRouteToHostException (java.net.NoRouteToHostException)2 UnknownHostException (java.net.UnknownHostException)2 Private (org.apache.hadoop.classification.InterfaceAudience.Private)2 TestConnectionRetryPolicy (org.apache.hadoop.io.retry.TestConnectionRetryPolicy)2 ConnectTimeoutException (org.apache.hadoop.net.ConnectTimeoutException)2 FileNotFoundException (java.io.FileNotFoundException)1 InputStream (java.io.InputStream)1 InetSocketAddress (java.net.InetSocketAddress)1 SocketTimeoutException (java.net.SocketTimeoutException)1 URISyntaxException (java.net.URISyntaxException)1 NoSuchAlgorithmException (java.security.NoSuchAlgorithmException)1