Search in sources :

Example 11 with ApplicationAccessType

use of org.apache.hadoop.yarn.api.records.ApplicationAccessType in project hadoop by apache.

the class TestApplicationClientProtocolRecords method testCLCPBImplNullEnv.

/*
   * This test validates the scenario in which the client sets a null value for a
   * particular environment.
   *
   */
@Test
public void testCLCPBImplNullEnv() throws IOException {
    Map<String, LocalResource> localResources = Collections.emptyMap();
    Map<String, String> environment = new HashMap<String, String>();
    List<String> commands = Collections.emptyList();
    Map<String, ByteBuffer> serviceData = Collections.emptyMap();
    Credentials containerCreds = new Credentials();
    DataOutputBuffer dob = new DataOutputBuffer();
    containerCreds.writeTokenStorageToStream(dob);
    ByteBuffer containerTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
    Map<ApplicationAccessType, String> acls = Collections.emptyMap();
    environment.put("testCLCPBImplNullEnv", null);
    ContainerLaunchContext clc = ContainerLaunchContext.newInstance(localResources, environment, commands, serviceData, containerTokens, acls);
    ContainerLaunchContext clcProto = new ContainerLaunchContextPBImpl(((ContainerLaunchContextPBImpl) clc).getProto());
    Assert.assertEquals("", clcProto.getEnvironment().get("testCLCPBImplNullEnv"));
}
Also used : HashMap(java.util.HashMap) ContainerLaunchContext(org.apache.hadoop.yarn.api.records.ContainerLaunchContext) ByteBuffer(java.nio.ByteBuffer) LocalResource(org.apache.hadoop.yarn.api.records.LocalResource) ApplicationAccessType(org.apache.hadoop.yarn.api.records.ApplicationAccessType) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) Credentials(org.apache.hadoop.security.Credentials) Test(org.junit.Test)

Example 12 with ApplicationAccessType

use of org.apache.hadoop.yarn.api.records.ApplicationAccessType in project hadoop by apache.

the class TestNMLeveldbStateStoreService method createContainerRequest.

private StartContainerRequest createContainerRequest(ContainerId containerId) {
    LocalResource lrsrc = LocalResource.newInstance(URL.newInstance("hdfs", "somehost", 12345, "/some/path/to/rsrc"), LocalResourceType.FILE, LocalResourceVisibility.APPLICATION, 123L, 1234567890L);
    Map<String, LocalResource> localResources = new HashMap<String, LocalResource>();
    localResources.put("rsrc", lrsrc);
    Map<String, String> env = new HashMap<String, String>();
    env.put("somevar", "someval");
    List<String> containerCmds = new ArrayList<String>();
    containerCmds.add("somecmd");
    containerCmds.add("somearg");
    Map<String, ByteBuffer> serviceData = new HashMap<String, ByteBuffer>();
    serviceData.put("someservice", ByteBuffer.wrap(new byte[] { 0x1, 0x2, 0x3 }));
    ByteBuffer containerTokens = ByteBuffer.wrap(new byte[] { 0x7, 0x8, 0x9, 0xa });
    Map<ApplicationAccessType, String> acls = new HashMap<ApplicationAccessType, String>();
    acls.put(ApplicationAccessType.VIEW_APP, "viewuser");
    acls.put(ApplicationAccessType.MODIFY_APP, "moduser");
    ContainerLaunchContext clc = ContainerLaunchContext.newInstance(localResources, env, containerCmds, serviceData, containerTokens, acls);
    Resource containerRsrc = Resource.newInstance(1357, 3);
    ContainerTokenIdentifier containerTokenId = new ContainerTokenIdentifier(containerId, "host", "user", containerRsrc, 9876543210L, 42, 2468, Priority.newInstance(7), 13579);
    Token containerToken = Token.newInstance(containerTokenId.getBytes(), ContainerTokenIdentifier.KIND.toString(), "password".getBytes(), "tokenservice");
    return StartContainerRequest.newInstance(clc, containerToken);
}
Also used : HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) Resource(org.apache.hadoop.yarn.api.records.Resource) LocalResource(org.apache.hadoop.yarn.api.records.LocalResource) Token(org.apache.hadoop.yarn.api.records.Token) ContainerLaunchContext(org.apache.hadoop.yarn.api.records.ContainerLaunchContext) ByteBuffer(java.nio.ByteBuffer) LocalResource(org.apache.hadoop.yarn.api.records.LocalResource) ContainerTokenIdentifier(org.apache.hadoop.yarn.security.ContainerTokenIdentifier) ApplicationAccessType(org.apache.hadoop.yarn.api.records.ApplicationAccessType)

Example 13 with ApplicationAccessType

use of org.apache.hadoop.yarn.api.records.ApplicationAccessType in project hadoop by apache.

the class TestNonAggregatingLogHandler method runMockedFailedDirs.

/**
   * Function to run a log handler with directories failing the getFileStatus
   * call. The function accepts the log handler, setup the mocks to fail with
   * specific exceptions and ensures the deletion service has the correct calls.
   * 
   * @param logHandler the logHandler implementation to test
   * 
   * @param appId the application id that we wish when sending events to the log
   * handler
   * 
   * @param user the user name to use
   * 
   * @param mockDelService a mock of the DeletionService which we will verify
   * the delete calls against
   * 
   * @param dirsHandler a spy or mock on the LocalDirsHandler service used to
   * when creating the logHandler. It needs to be a spy so that we can intercept
   * the getAllLogDirs() call.
   * 
   * @param conf the configuration used
   * 
   * @param spylfs a spy on the AbstractFileSystem object used when creating lfs
   * 
   * @param lfs the FileContext object to be used to mock the getFileStatus()
   * calls
   * 
   * @param localLogDirs list of the log dirs to run the test against, must have
   * at least 7 entries
   */
public static void runMockedFailedDirs(LogHandler logHandler, ApplicationId appId, String user, DeletionService mockDelService, LocalDirsHandlerService dirsHandler, Configuration conf, AbstractFileSystem spylfs, FileContext lfs, File[] localLogDirs) throws Exception {
    Map<ApplicationAccessType, String> appAcls = new HashMap<ApplicationAccessType, String>();
    if (localLogDirs.length < 7) {
        throw new IllegalArgumentException("Argument localLogDirs must be at least of length 7");
    }
    Path[] localAppLogDirPaths = new Path[localLogDirs.length];
    for (int i = 0; i < localAppLogDirPaths.length; i++) {
        localAppLogDirPaths[i] = new Path(localLogDirs[i].getAbsolutePath(), appId.toString());
    }
    final List<String> localLogDirPaths = new ArrayList<String>(localLogDirs.length);
    for (int i = 0; i < localLogDirs.length; i++) {
        localLogDirPaths.add(localLogDirs[i].getAbsolutePath());
    }
    // setup mocks
    FsPermission defaultPermission = FsPermission.getDirDefault().applyUMask(lfs.getUMask());
    final FileStatus fs = new FileStatus(0, true, 1, 0, System.currentTimeMillis(), 0, defaultPermission, "", "", new Path(localLogDirs[0].getAbsolutePath()));
    doReturn(fs).when(spylfs).getFileStatus(isA(Path.class));
    doReturn(localLogDirPaths).when(dirsHandler).getLogDirsForCleanup();
    logHandler.handle(new LogHandlerAppStartedEvent(appId, user, null, appAcls));
    // test case where some dirs have the log dir to delete
    // mock some dirs throwing various exceptions
    // verify deletion happens only on the others
    Mockito.doThrow(new FileNotFoundException()).when(spylfs).getFileStatus(eq(localAppLogDirPaths[0]));
    doReturn(fs).when(spylfs).getFileStatus(eq(localAppLogDirPaths[1]));
    Mockito.doThrow(new AccessControlException()).when(spylfs).getFileStatus(eq(localAppLogDirPaths[2]));
    doReturn(fs).when(spylfs).getFileStatus(eq(localAppLogDirPaths[3]));
    Mockito.doThrow(new IOException()).when(spylfs).getFileStatus(eq(localAppLogDirPaths[4]));
    Mockito.doThrow(new UnsupportedFileSystemException("test")).when(spylfs).getFileStatus(eq(localAppLogDirPaths[5]));
    doReturn(fs).when(spylfs).getFileStatus(eq(localAppLogDirPaths[6]));
    logHandler.handle(new LogHandlerAppFinishedEvent(appId));
    testDeletionServiceCall(mockDelService, user, 5000, localAppLogDirPaths[1], localAppLogDirPaths[3], localAppLogDirPaths[6]);
    return;
}
Also used : Path(org.apache.hadoop.fs.Path) FileStatus(org.apache.hadoop.fs.FileStatus) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) FileNotFoundException(java.io.FileNotFoundException) AccessControlException(org.apache.hadoop.security.AccessControlException) IOException(java.io.IOException) LogHandlerAppStartedEvent(org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerAppStartedEvent) ApplicationAccessType(org.apache.hadoop.yarn.api.records.ApplicationAccessType) LogHandlerAppFinishedEvent(org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerAppFinishedEvent) UnsupportedFileSystemException(org.apache.hadoop.fs.UnsupportedFileSystemException) FsPermission(org.apache.hadoop.fs.permission.FsPermission)

Example 14 with ApplicationAccessType

use of org.apache.hadoop.yarn.api.records.ApplicationAccessType in project hadoop by apache.

the class TestLogsCLI method uploadContainerLogIntoRemoteDir.

private static void uploadContainerLogIntoRemoteDir(UserGroupInformation ugi, Configuration configuration, List<String> rootLogDirs, NodeId nodeId, ContainerId containerId, Path appDir, FileSystem fs) throws Exception {
    Path path = new Path(appDir, LogAggregationUtils.getNodeString(nodeId) + System.currentTimeMillis());
    AggregatedLogFormat.LogWriter writer = new AggregatedLogFormat.LogWriter(configuration, path, ugi);
    writer.writeApplicationOwner(ugi.getUserName());
    Map<ApplicationAccessType, String> appAcls = new HashMap<ApplicationAccessType, String>();
    appAcls.put(ApplicationAccessType.VIEW_APP, ugi.getUserName());
    writer.writeApplicationACLs(appAcls);
    writer.append(new AggregatedLogFormat.LogKey(containerId), new AggregatedLogFormat.LogValue(rootLogDirs, containerId, UserGroupInformation.getCurrentUser().getShortUserName()));
    writer.close();
}
Also used : Path(org.apache.hadoop.fs.Path) HashMap(java.util.HashMap) ApplicationAccessType(org.apache.hadoop.yarn.api.records.ApplicationAccessType) Matchers.anyString(org.mockito.Matchers.anyString) AggregatedLogFormat(org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat)

Example 15 with ApplicationAccessType

use of org.apache.hadoop.yarn.api.records.ApplicationAccessType in project hadoop by apache.

the class TestLogsCLI method uploadEmptyContainerLogIntoRemoteDir.

private static void uploadEmptyContainerLogIntoRemoteDir(UserGroupInformation ugi, Configuration configuration, List<String> rootLogDirs, NodeId nodeId, ContainerId containerId, Path appDir, FileSystem fs) throws Exception {
    Path path = new Path(appDir, LogAggregationUtils.getNodeString(nodeId) + System.currentTimeMillis());
    AggregatedLogFormat.LogWriter writer = new AggregatedLogFormat.LogWriter(configuration, path, ugi);
    writer.writeApplicationOwner(ugi.getUserName());
    Map<ApplicationAccessType, String> appAcls = new HashMap<ApplicationAccessType, String>();
    appAcls.put(ApplicationAccessType.VIEW_APP, ugi.getUserName());
    writer.writeApplicationACLs(appAcls);
    DataOutputStream out = writer.getWriter().prepareAppendKey(-1);
    new AggregatedLogFormat.LogKey(containerId).write(out);
    out.close();
    out = writer.getWriter().prepareAppendValue(-1);
    new AggregatedLogFormat.LogValue(rootLogDirs, containerId, UserGroupInformation.getCurrentUser().getShortUserName()).write(out, new HashSet<File>());
    out.close();
    writer.close();
}
Also used : Path(org.apache.hadoop.fs.Path) HashMap(java.util.HashMap) DataOutputStream(java.io.DataOutputStream) Matchers.anyString(org.mockito.Matchers.anyString) AggregatedLogFormat(org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat) ApplicationAccessType(org.apache.hadoop.yarn.api.records.ApplicationAccessType) File(java.io.File)

Aggregations

ApplicationAccessType (org.apache.hadoop.yarn.api.records.ApplicationAccessType)61 Test (org.junit.Test)31 HashMap (java.util.HashMap)25 RMApp (org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp)24 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)23 ApplicationAttemptId (org.apache.hadoop.yarn.api.records.ApplicationAttemptId)18 ContainerLaunchContext (org.apache.hadoop.yarn.api.records.ContainerLaunchContext)18 Credentials (org.apache.hadoop.security.Credentials)17 ContainerId (org.apache.hadoop.yarn.api.records.ContainerId)16 ByteBuffer (java.nio.ByteBuffer)13 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)13 ArrayList (java.util.ArrayList)12 RMAppAttempt (org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt)12 MockRM (org.apache.hadoop.yarn.server.resourcemanager.MockRM)11 MockNM (org.apache.hadoop.yarn.server.resourcemanager.MockNM)10 UserGroupInformation (org.apache.hadoop.security.UserGroupInformation)9 LocalResource (org.apache.hadoop.yarn.api.records.LocalResource)9 MemoryRMStateStore (org.apache.hadoop.yarn.server.resourcemanager.recovery.MemoryRMStateStore)9 Path (org.apache.hadoop.fs.Path)8 ApplicationSubmissionContext (org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext)8