use of org.apache.hadoop.hdfs.DFSClient in project hadoop by apache.
the class TestRetryCacheWithHA method testRemoveXAttr.
@Test(timeout = 60000)
public void testRemoveXAttr() throws Exception {
DFSClient client = genClientWithDummyHandler();
AtMostOnceOp op = new RemoveXAttrOp(client, "/removexattr");
testClientRetryWithFailover(op);
}
use of org.apache.hadoop.hdfs.DFSClient in project hadoop by apache.
the class TestRetryCacheWithHA method testRename.
@Test(timeout = 60000)
public void testRename() throws Exception {
final DFSClient client = genClientWithDummyHandler();
AtMostOnceOp op = new RenameOp(client, "/file1", "/file2");
testClientRetryWithFailover(op);
}
use of org.apache.hadoop.hdfs.DFSClient in project hadoop by apache.
the class TestRetryCacheWithHA method genClientWithDummyHandler.
private DFSClient genClientWithDummyHandler() throws IOException {
URI nnUri = dfs.getUri();
FailoverProxyProvider<ClientProtocol> failoverProxyProvider = NameNodeProxiesClient.createFailoverProxyProvider(conf, nnUri, ClientProtocol.class, true, null);
InvocationHandler dummyHandler = new DummyRetryInvocationHandler(failoverProxyProvider, RetryPolicies.failoverOnNetworkException(RetryPolicies.TRY_ONCE_THEN_FAIL, Integer.MAX_VALUE, HdfsClientConfigKeys.Failover.SLEEPTIME_BASE_DEFAULT, HdfsClientConfigKeys.Failover.SLEEPTIME_MAX_DEFAULT));
ClientProtocol proxy = (ClientProtocol) Proxy.newProxyInstance(failoverProxyProvider.getInterface().getClassLoader(), new Class[] { ClientProtocol.class }, dummyHandler);
DFSClient client = new DFSClient(null, proxy, conf, null);
return client;
}
use of org.apache.hadoop.hdfs.DFSClient in project hadoop by apache.
the class TestRetryCacheWithHA method testModifyCacheDirectiveInfo.
@Test(timeout = 60000)
public void testModifyCacheDirectiveInfo() throws Exception {
DFSClient client = genClientWithDummyHandler();
AtMostOnceOp op = new ModifyCacheDirectiveInfoOp(client, new CacheDirectiveInfo.Builder().setPool("pool").setPath(new Path("/path")).setReplication((short) 1).build(), (short) 555);
testClientRetryWithFailover(op);
}
use of org.apache.hadoop.hdfs.DFSClient in project hadoop by apache.
the class TestRetryCacheWithHA method testUpdatePipeline.
@Test(timeout = 60000)
public void testUpdatePipeline() throws Exception {
final DFSClient client = genClientWithDummyHandler();
AtMostOnceOp op = new UpdatePipelineOp(client, "/testfile");
testClientRetryWithFailover(op);
}
Aggregations