use of java.net.SocketTimeoutException in project hadoop by apache.
the class TestWebHdfsTimeouts method testTwoStepWriteConnectTimeout.
/**
* On the second step of two-step write, expect connect timeout accessing the
* redirect location, because the connection backlog is consumed.
*/
@Test(timeout = TEST_TIMEOUT)
public void testTwoStepWriteConnectTimeout() throws Exception {
startSingleTemporaryRedirectResponseThread(true);
OutputStream os = null;
try {
os = fs.create(new Path("/file"));
fail("expected timeout");
} catch (SocketTimeoutException e) {
GenericTestUtils.assertExceptionContains(fs.getUri().getAuthority() + ": connect timed out", e);
} finally {
IOUtils.cleanup(LOG, os);
}
}
use of java.net.SocketTimeoutException in project hadoop by apache.
the class TestWebHdfsTimeouts method testRedirectConnectTimeout.
/**
* After a redirect, expect connect timeout accessing the redirect location,
* because the connection backlog is consumed.
*/
@Test(timeout = TEST_TIMEOUT)
public void testRedirectConnectTimeout() throws Exception {
startSingleTemporaryRedirectResponseThread(true);
try {
fs.getFileChecksum(new Path("/file"));
fail("expected timeout");
} catch (SocketTimeoutException e) {
GenericTestUtils.assertExceptionContains(fs.getUri().getAuthority() + ": connect timed out", e);
}
}
use of java.net.SocketTimeoutException in project hadoop by apache.
the class TestIPC method testIpcConnectTimeout.
@Test(timeout = 60000)
public void testIpcConnectTimeout() throws IOException {
// start server
Server server = new TestServer(1, true);
InetSocketAddress addr = NetUtils.getConnectAddress(server);
//Intentionally do not start server to get a connection timeout
// start client
Client.setConnectTimeout(conf, 100);
Client client = new Client(LongWritable.class, conf);
// set the rpc timeout to twice the MIN_SLEEP_TIME
try {
call(client, new LongWritable(RANDOM.nextLong()), addr, MIN_SLEEP_TIME * 2, conf);
fail("Expected an exception to have been thrown");
} catch (SocketTimeoutException e) {
LOG.info("Get a SocketTimeoutException ", e);
}
client.stop();
}
use of java.net.SocketTimeoutException in project hadoop by apache.
the class TestIPC method testIpcTimeout.
@Test(timeout = 60000)
public void testIpcTimeout() throws IOException {
// start server
Server server = new TestServer(1, true);
InetSocketAddress addr = NetUtils.getConnectAddress(server);
server.start();
// start client
Client client = new Client(LongWritable.class, conf);
// set timeout to be less than MIN_SLEEP_TIME
try {
call(client, new LongWritable(RANDOM.nextLong()), addr, MIN_SLEEP_TIME / 2, conf);
fail("Expected an exception to have been thrown");
} catch (SocketTimeoutException e) {
LOG.info("Get a SocketTimeoutException ", e);
}
// set timeout to be bigger than 3*ping interval
call(client, new LongWritable(RANDOM.nextLong()), addr, 3 * PING_INTERVAL + MIN_SLEEP_TIME, conf);
client.stop();
}
use of java.net.SocketTimeoutException in project hadoop by apache.
the class TestSaslDataTransfer method TestPeerFromSocketAndKeyReadTimeout.
/**
* Verifies that peerFromSocketAndKey honors socket read timeouts.
*/
@Test(timeout = 60000)
public void TestPeerFromSocketAndKeyReadTimeout() throws Exception {
HdfsConfiguration conf = createSecureConfig("authentication,integrity,privacy");
AtomicBoolean fallbackToSimpleAuth = new AtomicBoolean(false);
SaslDataTransferClient saslClient = new SaslDataTransferClient(conf, DataTransferSaslUtil.getSaslPropertiesResolver(conf), TrustedChannelResolver.getInstance(conf), fallbackToSimpleAuth);
DatanodeID fakeDatanodeId = new DatanodeID("127.0.0.1", "localhost", "beefbeef-beef-beef-beef-beefbeefbeef", 1, 2, 3, 4);
DataEncryptionKeyFactory dataEncKeyFactory = new DataEncryptionKeyFactory() {
@Override
public DataEncryptionKey newDataEncryptionKey() {
return new DataEncryptionKey(123, "456", new byte[8], new byte[8], 1234567, "fakeAlgorithm");
}
};
ServerSocket serverSocket = null;
Socket socket = null;
try {
serverSocket = new ServerSocket(0, -1);
socket = new Socket(serverSocket.getInetAddress(), serverSocket.getLocalPort());
Peer peer = DFSUtilClient.peerFromSocketAndKey(saslClient, socket, dataEncKeyFactory, new Token(), fakeDatanodeId, 1);
peer.close();
Assert.fail("Expected DFSClient#peerFromSocketAndKey to time out.");
} catch (SocketTimeoutException e) {
GenericTestUtils.assertExceptionContains("Read timed out", e);
} finally {
IOUtils.cleanup(null, socket, serverSocket);
}
}
Aggregations