use of org.apache.hadoop.fs.FileContext in project elasticsearch by elastic.
the class HdfsBlobStoreContainerTests method createContext.
@SuppressForbidden(reason = "lesser of two evils (the other being a bunch of JNI/classloader nightmares)")
private FileContext createContext(URI uri) {
// mirrors HdfsRepository.java behaviour
Configuration cfg = new Configuration(true);
cfg.setClassLoader(HdfsRepository.class.getClassLoader());
cfg.reloadConfiguration();
Constructor<?> ctor;
Subject subject;
try {
Class<?> clazz = Class.forName("org.apache.hadoop.security.User");
ctor = clazz.getConstructor(String.class);
ctor.setAccessible(true);
} catch (ClassNotFoundException | NoSuchMethodException e) {
throw new RuntimeException(e);
}
try {
Principal principal = (Principal) ctor.newInstance(System.getProperty("user.name"));
subject = new Subject(false, Collections.singleton(principal), Collections.emptySet(), Collections.emptySet());
} catch (InstantiationException | IllegalAccessException | InvocationTargetException e) {
throw new RuntimeException(e);
}
// disable file system cache
cfg.setBoolean("fs.hdfs.impl.disable.cache", true);
// set file system to TestingFs to avoid a bunch of security
// checks, similar to what is done in HdfsTests.java
cfg.set("fs.AbstractFileSystem." + uri.getScheme() + ".impl", TestingFs.class.getName());
// create the FileContext with our user
return Subject.doAs(subject, (PrivilegedAction<FileContext>) () -> {
try {
TestingFs fs = (TestingFs) AbstractFileSystem.get(uri, cfg);
return FileContext.getFileContext(fs, cfg);
} catch (UnsupportedFileSystemException e) {
throw new RuntimeException(e);
}
});
}
use of org.apache.hadoop.fs.FileContext in project hadoop by apache.
the class TestDefaultContainerExecutor method createTmpFile.
byte[] createTmpFile(Path dst, Random r, int len) throws IOException {
// use unmodified local context
FileContext lfs = FileContext.getLocalFSFileContext();
dst = lfs.makeQualified(dst);
lfs.mkdir(dst.getParent(), null, true);
byte[] bytes = new byte[len];
FSDataOutputStream out = null;
try {
out = lfs.create(dst, EnumSet.of(CREATE, OVERWRITE));
r.nextBytes(bytes);
out.write(bytes);
} finally {
if (out != null)
out.close();
}
return bytes;
}
use of org.apache.hadoop.fs.FileContext in project hadoop by apache.
the class TestDefaultContainerExecutor method testContainerLaunchError.
@Test
public void testContainerLaunchError() throws IOException, InterruptedException {
if (Shell.WINDOWS) {
BASE_TMP_PATH = new Path(new File("target").getAbsolutePath(), TestDefaultContainerExecutor.class.getSimpleName());
}
Path localDir = new Path(BASE_TMP_PATH, "localDir");
List<String> localDirs = new ArrayList<String>();
localDirs.add(localDir.toString());
List<String> logDirs = new ArrayList<String>();
Path logDir = new Path(BASE_TMP_PATH, "logDir");
logDirs.add(logDir.toString());
Configuration conf = new Configuration();
conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077");
conf.set(YarnConfiguration.NM_LOCAL_DIRS, localDir.toString());
conf.set(YarnConfiguration.NM_LOG_DIRS, logDir.toString());
FileContext lfs = FileContext.getLocalFSFileContext(conf);
DefaultContainerExecutor mockExec = spy(new DefaultContainerExecutor(lfs));
mockExec.setConf(conf);
doAnswer(new Answer() {
@Override
public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
String diagnostics = (String) invocationOnMock.getArguments()[0];
assertTrue("Invalid Diagnostics message: " + diagnostics, diagnostics.contains("No such file or directory"));
return null;
}
}).when(mockExec).logOutput(any(String.class));
String appSubmitter = "nobody";
String appId = "APP_ID";
String containerId = "CONTAINER_ID";
Container container = mock(Container.class);
ContainerId cId = mock(ContainerId.class);
ContainerLaunchContext context = mock(ContainerLaunchContext.class);
HashMap<String, String> env = new HashMap<String, String>();
env.put("LANG", "C");
when(container.getContainerId()).thenReturn(cId);
when(container.getLaunchContext()).thenReturn(context);
try {
doAnswer(new Answer() {
@Override
public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
ContainerDiagnosticsUpdateEvent event = (ContainerDiagnosticsUpdateEvent) invocationOnMock.getArguments()[0];
assertTrue("Invalid Diagnostics message: " + event.getDiagnosticsUpdate(), event.getDiagnosticsUpdate().contains("No such file or directory"));
return null;
}
}).when(container).handle(any(ContainerDiagnosticsUpdateEvent.class));
when(cId.toString()).thenReturn(containerId);
when(cId.getApplicationAttemptId()).thenReturn(ApplicationAttemptId.newInstance(ApplicationId.newInstance(0, 1), 0));
when(context.getEnvironment()).thenReturn(env);
mockExec.createUserLocalDirs(localDirs, appSubmitter);
mockExec.createUserCacheDirs(localDirs, appSubmitter);
mockExec.createAppDirs(localDirs, appSubmitter, appId);
mockExec.createAppLogDirs(appId, logDirs, appSubmitter);
Path scriptPath = new Path("file:///bin/echo");
Path tokensPath = new Path("file:///dev/null");
if (Shell.WINDOWS) {
File tmp = new File(BASE_TMP_PATH.toString(), "test_echo.cmd");
BufferedWriter output = new BufferedWriter(new FileWriter(tmp));
output.write("Exit 1");
output.write("Echo No such file or directory 1>&2");
output.close();
scriptPath = new Path(tmp.getAbsolutePath());
tmp = new File(BASE_TMP_PATH.toString(), "tokens");
tmp.createNewFile();
tokensPath = new Path(tmp.getAbsolutePath());
}
Path workDir = localDir;
Path pidFile = new Path(workDir, "pid.txt");
mockExec.init();
mockExec.activateContainer(cId, pidFile);
int ret = mockExec.launchContainer(new ContainerStartContext.Builder().setContainer(container).setNmPrivateContainerScriptPath(scriptPath).setNmPrivateTokensPath(tokensPath).setUser(appSubmitter).setAppId(appId).setContainerWorkDir(workDir).setLocalDirs(localDirs).setLogDirs(logDirs).build());
Assert.assertNotSame(0, ret);
} finally {
mockExec.deleteAsUser(new DeletionAsUserContext.Builder().setUser(appSubmitter).setSubDir(localDir).build());
mockExec.deleteAsUser(new DeletionAsUserContext.Builder().setUser(appSubmitter).setSubDir(logDir).build());
}
}
use of org.apache.hadoop.fs.FileContext in project hadoop by apache.
the class TestDefaultContainerExecutor method testDirPermissions.
@Test
public void testDirPermissions() throws Exception {
deleteTmpFiles();
final String user = "somebody";
final String appId = "app_12345_123";
final FsPermission userCachePerm = new FsPermission(DefaultContainerExecutor.USER_PERM);
final FsPermission appCachePerm = new FsPermission(DefaultContainerExecutor.APPCACHE_PERM);
final FsPermission fileCachePerm = new FsPermission(DefaultContainerExecutor.FILECACHE_PERM);
final FsPermission appDirPerm = new FsPermission(DefaultContainerExecutor.APPDIR_PERM);
List<String> localDirs = new ArrayList<String>();
localDirs.add(new Path(BASE_TMP_PATH, "localDirA").toString());
localDirs.add(new Path(BASE_TMP_PATH, "localDirB").toString());
List<String> logDirs = new ArrayList<String>();
logDirs.add(new Path(BASE_TMP_PATH, "logDirA").toString());
logDirs.add(new Path(BASE_TMP_PATH, "logDirB").toString());
Configuration conf = new Configuration();
conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077");
FileContext lfs = FileContext.getLocalFSFileContext(conf);
DefaultContainerExecutor executor = new DefaultContainerExecutor(lfs);
executor.setConf(conf);
executor.init();
try {
executor.createUserLocalDirs(localDirs, user);
executor.createUserCacheDirs(localDirs, user);
executor.createAppDirs(localDirs, user, appId);
for (String dir : localDirs) {
FileStatus stats = lfs.getFileStatus(new Path(new Path(dir, ContainerLocalizer.USERCACHE), user));
Assert.assertEquals(userCachePerm, stats.getPermission());
}
for (String dir : localDirs) {
Path userCachePath = new Path(new Path(dir, ContainerLocalizer.USERCACHE), user);
Path appCachePath = new Path(userCachePath, ContainerLocalizer.APPCACHE);
FileStatus stats = lfs.getFileStatus(appCachePath);
Assert.assertEquals(appCachePerm, stats.getPermission());
stats = lfs.getFileStatus(new Path(userCachePath, ContainerLocalizer.FILECACHE));
Assert.assertEquals(fileCachePerm, stats.getPermission());
stats = lfs.getFileStatus(new Path(appCachePath, appId));
Assert.assertEquals(appDirPerm, stats.getPermission());
}
String[] permissionsArray = { "000", "111", "555", "710", "777" };
for (String perm : permissionsArray) {
conf.set(YarnConfiguration.NM_DEFAULT_CONTAINER_EXECUTOR_LOG_DIRS_PERMISSIONS, perm);
executor.clearLogDirPermissions();
FsPermission logDirPerm = new FsPermission(executor.getLogDirPermissions());
executor.createAppLogDirs(appId, logDirs, user);
for (String dir : logDirs) {
FileStatus stats = lfs.getFileStatus(new Path(dir, appId));
Assert.assertEquals(logDirPerm, stats.getPermission());
lfs.delete(new Path(dir, appId), true);
}
}
} finally {
deleteTmpFiles();
}
}
use of org.apache.hadoop.fs.FileContext in project hadoop by apache.
the class TestDefaultContainerExecutor method testPickDirectory.
@Test
public void testPickDirectory() throws Exception {
Configuration conf = new Configuration();
FileContext lfs = FileContext.getLocalFSFileContext(conf);
DefaultContainerExecutor executor = new DefaultContainerExecutor(lfs);
long[] availableOnDisk = new long[2];
availableOnDisk[0] = 100;
availableOnDisk[1] = 100;
assertEquals(0, executor.pickDirectory(0L, availableOnDisk));
assertEquals(0, executor.pickDirectory(99L, availableOnDisk));
assertEquals(1, executor.pickDirectory(100L, availableOnDisk));
assertEquals(1, executor.pickDirectory(101L, availableOnDisk));
assertEquals(1, executor.pickDirectory(199L, availableOnDisk));
long[] availableOnDisk2 = new long[5];
availableOnDisk2[0] = 100;
availableOnDisk2[1] = 10;
availableOnDisk2[2] = 400;
availableOnDisk2[3] = 200;
availableOnDisk2[4] = 350;
assertEquals(0, executor.pickDirectory(0L, availableOnDisk2));
assertEquals(0, executor.pickDirectory(99L, availableOnDisk2));
assertEquals(1, executor.pickDirectory(100L, availableOnDisk2));
assertEquals(1, executor.pickDirectory(105L, availableOnDisk2));
assertEquals(2, executor.pickDirectory(110L, availableOnDisk2));
assertEquals(2, executor.pickDirectory(259L, availableOnDisk2));
assertEquals(3, executor.pickDirectory(700L, availableOnDisk2));
assertEquals(4, executor.pickDirectory(710L, availableOnDisk2));
assertEquals(4, executor.pickDirectory(910L, availableOnDisk2));
}
Aggregations