Search in sources :

Example 11 with SocketTimeoutException

use of java.net.SocketTimeoutException in project hadoop by apache.

the class TestWebHdfsTimeouts method testTwoStepWriteConnectTimeout.

/**
   * On the second step of two-step write, expect connect timeout accessing the
   * redirect location, because the connection backlog is consumed.
   */
@Test(timeout = TEST_TIMEOUT)
public void testTwoStepWriteConnectTimeout() throws Exception {
    startSingleTemporaryRedirectResponseThread(true);
    OutputStream os = null;
    try {
        os = fs.create(new Path("/file"));
        fail("expected timeout");
    } catch (SocketTimeoutException e) {
        GenericTestUtils.assertExceptionContains(fs.getUri().getAuthority() + ": connect timed out", e);
    } finally {
        IOUtils.cleanup(LOG, os);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) SocketTimeoutException(java.net.SocketTimeoutException) OutputStream(java.io.OutputStream) Test(org.junit.Test)

Example 12 with SocketTimeoutException

use of java.net.SocketTimeoutException in project hadoop by apache.

the class TestWebHdfsTimeouts method testRedirectConnectTimeout.

/**
   * After a redirect, expect connect timeout accessing the redirect location,
   * because the connection backlog is consumed.
   */
@Test(timeout = TEST_TIMEOUT)
public void testRedirectConnectTimeout() throws Exception {
    startSingleTemporaryRedirectResponseThread(true);
    try {
        fs.getFileChecksum(new Path("/file"));
        fail("expected timeout");
    } catch (SocketTimeoutException e) {
        GenericTestUtils.assertExceptionContains(fs.getUri().getAuthority() + ": connect timed out", e);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) SocketTimeoutException(java.net.SocketTimeoutException) Test(org.junit.Test)

Example 13 with SocketTimeoutException

use of java.net.SocketTimeoutException in project hadoop by apache.

the class TestIPC method testIpcConnectTimeout.

@Test(timeout = 60000)
public void testIpcConnectTimeout() throws IOException {
    // start server
    Server server = new TestServer(1, true);
    InetSocketAddress addr = NetUtils.getConnectAddress(server);
    //Intentionally do not start server to get a connection timeout
    // start client
    Client.setConnectTimeout(conf, 100);
    Client client = new Client(LongWritable.class, conf);
    // set the rpc timeout to twice the MIN_SLEEP_TIME
    try {
        call(client, new LongWritable(RANDOM.nextLong()), addr, MIN_SLEEP_TIME * 2, conf);
        fail("Expected an exception to have been thrown");
    } catch (SocketTimeoutException e) {
        LOG.info("Get a SocketTimeoutException ", e);
    }
    client.stop();
}
Also used : SocketTimeoutException(java.net.SocketTimeoutException) InetSocketAddress(java.net.InetSocketAddress) LongWritable(org.apache.hadoop.io.LongWritable) Test(org.junit.Test)

Example 14 with SocketTimeoutException

use of java.net.SocketTimeoutException in project hadoop by apache.

the class TestIPC method testIpcTimeout.

@Test(timeout = 60000)
public void testIpcTimeout() throws IOException {
    // start server
    Server server = new TestServer(1, true);
    InetSocketAddress addr = NetUtils.getConnectAddress(server);
    server.start();
    // start client
    Client client = new Client(LongWritable.class, conf);
    // set timeout to be less than MIN_SLEEP_TIME
    try {
        call(client, new LongWritable(RANDOM.nextLong()), addr, MIN_SLEEP_TIME / 2, conf);
        fail("Expected an exception to have been thrown");
    } catch (SocketTimeoutException e) {
        LOG.info("Get a SocketTimeoutException ", e);
    }
    // set timeout to be bigger than 3*ping interval
    call(client, new LongWritable(RANDOM.nextLong()), addr, 3 * PING_INTERVAL + MIN_SLEEP_TIME, conf);
    client.stop();
}
Also used : SocketTimeoutException(java.net.SocketTimeoutException) InetSocketAddress(java.net.InetSocketAddress) LongWritable(org.apache.hadoop.io.LongWritable) Test(org.junit.Test)

Example 15 with SocketTimeoutException

use of java.net.SocketTimeoutException in project hadoop by apache.

the class TestSaslDataTransfer method TestPeerFromSocketAndKeyReadTimeout.

/**
   * Verifies that peerFromSocketAndKey honors socket read timeouts.
   */
@Test(timeout = 60000)
public void TestPeerFromSocketAndKeyReadTimeout() throws Exception {
    HdfsConfiguration conf = createSecureConfig("authentication,integrity,privacy");
    AtomicBoolean fallbackToSimpleAuth = new AtomicBoolean(false);
    SaslDataTransferClient saslClient = new SaslDataTransferClient(conf, DataTransferSaslUtil.getSaslPropertiesResolver(conf), TrustedChannelResolver.getInstance(conf), fallbackToSimpleAuth);
    DatanodeID fakeDatanodeId = new DatanodeID("127.0.0.1", "localhost", "beefbeef-beef-beef-beef-beefbeefbeef", 1, 2, 3, 4);
    DataEncryptionKeyFactory dataEncKeyFactory = new DataEncryptionKeyFactory() {

        @Override
        public DataEncryptionKey newDataEncryptionKey() {
            return new DataEncryptionKey(123, "456", new byte[8], new byte[8], 1234567, "fakeAlgorithm");
        }
    };
    ServerSocket serverSocket = null;
    Socket socket = null;
    try {
        serverSocket = new ServerSocket(0, -1);
        socket = new Socket(serverSocket.getInetAddress(), serverSocket.getLocalPort());
        Peer peer = DFSUtilClient.peerFromSocketAndKey(saslClient, socket, dataEncKeyFactory, new Token(), fakeDatanodeId, 1);
        peer.close();
        Assert.fail("Expected DFSClient#peerFromSocketAndKey to time out.");
    } catch (SocketTimeoutException e) {
        GenericTestUtils.assertExceptionContains("Read timed out", e);
    } finally {
        IOUtils.cleanup(null, socket, serverSocket);
    }
}
Also used : AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) DatanodeID(org.apache.hadoop.hdfs.protocol.DatanodeID) DataEncryptionKey(org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey) SocketTimeoutException(java.net.SocketTimeoutException) Peer(org.apache.hadoop.hdfs.net.Peer) ServerSocket(java.net.ServerSocket) Token(org.apache.hadoop.security.token.Token) HdfsConfiguration(org.apache.hadoop.hdfs.HdfsConfiguration) Socket(java.net.Socket) ServerSocket(java.net.ServerSocket) Test(org.junit.Test)

Aggregations

SocketTimeoutException (java.net.SocketTimeoutException)369 IOException (java.io.IOException)200 Test (org.junit.Test)91 Socket (java.net.Socket)52 SocketException (java.net.SocketException)46 InputStream (java.io.InputStream)43 ServerSocket (java.net.ServerSocket)42 InetSocketAddress (java.net.InetSocketAddress)38 ConnectException (java.net.ConnectException)34 UnknownHostException (java.net.UnknownHostException)31 OutputStream (java.io.OutputStream)27 MalformedURLException (java.net.MalformedURLException)27 URL (java.net.URL)27 DatagramPacket (java.net.DatagramPacket)25 HttpURLConnection (java.net.HttpURLConnection)23 HashMap (java.util.HashMap)21 File (java.io.File)20 ArrayList (java.util.ArrayList)20 InterruptedIOException (java.io.InterruptedIOException)19 BufferedInputStream (java.io.BufferedInputStream)18