use of org.apache.hadoop.fs.FileSystemTestHelper in project hadoop by apache.
the class TestHdfsHelper method startMiniHdfs.
private static synchronized MiniDFSCluster startMiniHdfs(Configuration conf) throws Exception {
if (MINI_DFS == null) {
if (System.getProperty("hadoop.log.dir") == null) {
System.setProperty("hadoop.log.dir", new File(TEST_DIR_ROOT, "hadoop-log").getAbsolutePath());
}
if (System.getProperty("test.build.data") == null) {
System.setProperty("test.build.data", new File(TEST_DIR_ROOT, "hadoop-data").getAbsolutePath());
}
conf = new Configuration(conf);
HadoopUsersConfTestHelper.addUserConf(conf);
conf.set("fs.hdfs.impl.disable.cache", "true");
conf.set("dfs.block.access.token.enable", "false");
conf.set("dfs.permissions", "true");
conf.set("hadoop.security.authentication", "simple");
conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY, true);
conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_XATTRS_ENABLED_KEY, true);
FileSystemTestHelper helper = new FileSystemTestHelper();
final String jceksPath = JavaKeyStoreProvider.SCHEME_NAME + "://file" + new Path(helper.getTestRootDir(), "test.jks").toUri();
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH, jceksPath);
MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf);
builder.numDataNodes(2);
MiniDFSCluster miniHdfs = builder.build();
final String testkey = "testkey";
DFSTestUtil.createKey(testkey, miniHdfs, conf);
DistributedFileSystem fileSystem = miniHdfs.getFileSystem();
fileSystem.getClient().setKeyProvider(miniHdfs.getNameNode().getNamesystem().getProvider());
fileSystem.mkdirs(new Path("/tmp"));
fileSystem.mkdirs(new Path("/user"));
fileSystem.setPermission(new Path("/tmp"), FsPermission.valueOf("-rwxrwxrwx"));
fileSystem.setPermission(new Path("/user"), FsPermission.valueOf("-rwxrwxrwx"));
fileSystem.mkdirs(ENCRYPTION_ZONE);
fileSystem.createEncryptionZone(ENCRYPTION_ZONE, testkey);
fileSystem.create(ENCRYPTED_FILE).close();
MINI_DFS = miniHdfs;
}
return MINI_DFS;
}
use of org.apache.hadoop.fs.FileSystemTestHelper in project hadoop by apache.
the class TestFsVolumeList method setUp.
@Before
public void setUp() {
dataset = mock(FsDatasetImpl.class);
baseDir = new FileSystemTestHelper().getTestRootDir();
Configuration blockScannerConf = new Configuration();
blockScannerConf.setInt(DFSConfigKeys.DFS_DATANODE_SCAN_PERIOD_HOURS_KEY, -1);
blockScanner = new BlockScanner(null, blockScannerConf);
}
use of org.apache.hadoop.fs.FileSystemTestHelper in project hadoop by apache.
the class TestOfflineImageViewer method testPBDelimitedWriter.
@Test
public void testPBDelimitedWriter() throws IOException, InterruptedException {
// Test in memory db.
testPBDelimitedWriter("");
testPBDelimitedWriter(new FileSystemTestHelper().getTestRootDir() + "/delimited.db");
}
use of org.apache.hadoop.fs.FileSystemTestHelper in project hadoop by apache.
the class TestKeyProviderFactory method setup.
@Before
public void setup() {
fsHelper = new FileSystemTestHelper();
String testRoot = fsHelper.getTestRootDir();
testRootDir = new File(testRoot).getAbsoluteFile();
}
use of org.apache.hadoop.fs.FileSystemTestHelper in project hadoop by apache.
the class TestViewFileSystemDelegation method setupFileSystem.
static FakeFileSystem setupFileSystem(URI uri, Class clazz) throws Exception {
String scheme = uri.getScheme();
conf.set("fs." + scheme + ".impl", clazz.getName());
FakeFileSystem fs = (FakeFileSystem) FileSystem.get(uri, conf);
assertEquals(uri, fs.getUri());
Path targetPath = new FileSystemTestHelper().getAbsoluteTestRootPath(fs);
ConfigUtil.addLink(conf, "/mounts/" + scheme, targetPath.toUri());
return fs;
}
Aggregations