use of org.apache.hadoop.hdfs.HdfsConfiguration in project hadoop by apache.
the class TestStorageLocationChecker method testTimeoutInCheck.
/**
* Verify that a {@link StorageLocation#check} timeout is correctly detected
* as a failure.
*
* This is hard to test without a {@link Thread#sleep} call.
*
* @throws Exception
*/
@Test(timeout = 300000)
public void testTimeoutInCheck() throws Exception {
final Configuration conf = new HdfsConfiguration();
conf.setTimeDuration(DFS_DATANODE_DISK_CHECK_TIMEOUT_KEY, 1, TimeUnit.SECONDS);
conf.setInt(DFS_DATANODE_FAILED_VOLUMES_TOLERATED_KEY, 1);
final FakeTimer timer = new FakeTimer();
// Generate a list of storage locations the first of which sleeps
// for 2 seconds in its check() routine.
final List<StorageLocation> locations = makeSlowLocations(2000, 1);
StorageLocationChecker checker = new StorageLocationChecker(conf, timer);
try {
// Check the two locations and ensure that only one of them
// was filtered out.
List<StorageLocation> filteredList = checker.check(conf, locations);
assertThat(filteredList.size(), is(1));
} finally {
checker.shutdownAndWait(10, TimeUnit.SECONDS);
}
}
use of org.apache.hadoop.hdfs.HdfsConfiguration in project hadoop by apache.
the class TestEditLogTailer method getConf.
private static Configuration getConf() {
Configuration conf = new HdfsConfiguration();
conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_EDITS_ASYNC_LOGGING, useAsyncEditLog);
return conf;
}
use of org.apache.hadoop.hdfs.HdfsConfiguration in project hadoop by apache.
the class TestWebHdfsTokens method testSetTokenServiceAndKind.
@Test
public void testSetTokenServiceAndKind() throws Exception {
MiniDFSCluster cluster = null;
try {
final Configuration clusterConf = new HdfsConfiguration(conf);
SecurityUtil.setAuthenticationMethod(SIMPLE, clusterConf);
clusterConf.setBoolean(DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true);
// trick the NN into thinking s[ecurity is enabled w/o it trying
// to login from a keytab
UserGroupInformation.setConfiguration(clusterConf);
cluster = new MiniDFSCluster.Builder(clusterConf).numDataNodes(0).build();
cluster.waitActive();
SecurityUtil.setAuthenticationMethod(KERBEROS, clusterConf);
final WebHdfsFileSystem fs = WebHdfsTestUtil.getWebHdfsFileSystem(clusterConf, "webhdfs");
Whitebox.setInternalState(fs, "canRefreshDelegationToken", true);
URLConnectionFactory factory = new URLConnectionFactory(new ConnectionConfigurator() {
@Override
public HttpURLConnection configure(HttpURLConnection conn) throws IOException {
return conn;
}
}) {
@Override
public URLConnection openConnection(URL url) throws IOException {
return super.openConnection(new URL(url + "&service=foo&kind=bar"));
}
};
Whitebox.setInternalState(fs, "connectionFactory", factory);
Token<?> token1 = fs.getDelegationToken();
Assert.assertEquals(new Text("bar"), token1.getKind());
final HttpOpParam.Op op = GetOpParam.Op.GETDELEGATIONTOKEN;
Token<DelegationTokenIdentifier> token2 = fs.new FsPathResponseRunner<Token<DelegationTokenIdentifier>>(op, null, new RenewerParam(null)) {
@Override
Token<DelegationTokenIdentifier> decodeResponse(Map<?, ?> json) throws IOException {
return JsonUtilClient.toDelegationToken(json);
}
}.run();
Assert.assertEquals(new Text("bar"), token2.getKind());
Assert.assertEquals(new Text("foo"), token2.getService());
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
use of org.apache.hadoop.hdfs.HdfsConfiguration in project hadoop by apache.
the class TestGetConf method testTool.
/**
* Tests commands other than {@link Command#NAMENODE}, {@link Command#BACKUP},
* {@link Command#SECONDARY} and {@link Command#NNRPCADDRESSES}
*/
@Test(timeout = 10000)
public void testTool() throws Exception {
HdfsConfiguration conf = new HdfsConfiguration(false);
for (Command cmd : Command.values()) {
CommandHandler handler = Command.getHandler(cmd.getName());
if (handler.key != null && !"-confKey".equals(cmd.getName())) {
// Add the key to the conf and ensure tool returns the right value
String[] args = { cmd.getName() };
conf.set(handler.key, "value");
assertTrue(runTool(conf, args, true).contains("value"));
}
}
}
use of org.apache.hadoop.hdfs.HdfsConfiguration in project hadoop by apache.
the class TestGetConf method testInvalidArgument.
/**
* Test invalid argument to the tool
*/
@Test(timeout = 10000)
public void testInvalidArgument() throws Exception {
HdfsConfiguration conf = new HdfsConfiguration();
String[] args = { "-invalidArgument" };
String ret = runTool(conf, args, false);
assertTrue(ret.contains(GetConf.USAGE));
}
Aggregations