Search in sources :

Example 51 with Credentials

use of org.apache.hadoop.security.Credentials in project hadoop by apache.

the class ContainerManagerImpl method startContainerInternal.

@SuppressWarnings("unchecked")
protected void startContainerInternal(ContainerTokenIdentifier containerTokenIdentifier, StartContainerRequest request) throws YarnException, IOException {
    ContainerId containerId = containerTokenIdentifier.getContainerID();
    String containerIdStr = containerId.toString();
    String user = containerTokenIdentifier.getApplicationSubmitter();
    LOG.info("Start request for " + containerIdStr + " by user " + user);
    ContainerLaunchContext launchContext = request.getContainerLaunchContext();
    Credentials credentials = YarnServerSecurityUtils.parseCredentials(launchContext);
    Container container = new ContainerImpl(getConfig(), this.dispatcher, launchContext, credentials, metrics, containerTokenIdentifier, context);
    ApplicationId applicationID = containerId.getApplicationAttemptId().getApplicationId();
    if (context.getContainers().putIfAbsent(containerId, container) != null) {
        NMAuditLogger.logFailure(user, AuditConstants.START_CONTAINER, "ContainerManagerImpl", "Container already running on this node!", applicationID, containerId);
        throw RPCUtil.getRemoteException("Container " + containerIdStr + " already is running on this node!!");
    }
    this.readLock.lock();
    try {
        if (!isServiceStopped()) {
            // Create the application
            // populate the flow context from the launch context if the timeline
            // service v.2 is enabled
            FlowContext flowContext = null;
            if (YarnConfiguration.timelineServiceV2Enabled(getConfig())) {
                String flowName = launchContext.getEnvironment().get(TimelineUtils.FLOW_NAME_TAG_PREFIX);
                String flowVersion = launchContext.getEnvironment().get(TimelineUtils.FLOW_VERSION_TAG_PREFIX);
                String flowRunIdStr = launchContext.getEnvironment().get(TimelineUtils.FLOW_RUN_ID_TAG_PREFIX);
                long flowRunId = 0L;
                if (flowRunIdStr != null && !flowRunIdStr.isEmpty()) {
                    flowRunId = Long.parseLong(flowRunIdStr);
                }
                flowContext = new FlowContext(flowName, flowVersion, flowRunId);
            }
            if (!context.getApplications().containsKey(applicationID)) {
                Application application = new ApplicationImpl(dispatcher, user, flowContext, applicationID, credentials, context);
                if (context.getApplications().putIfAbsent(applicationID, application) == null) {
                    LOG.info("Creating a new application reference for app " + applicationID);
                    LogAggregationContext logAggregationContext = containerTokenIdentifier.getLogAggregationContext();
                    Map<ApplicationAccessType, String> appAcls = container.getLaunchContext().getApplicationACLs();
                    context.getNMStateStore().storeApplication(applicationID, buildAppProto(applicationID, user, credentials, appAcls, logAggregationContext));
                    dispatcher.getEventHandler().handle(new ApplicationInitEvent(applicationID, appAcls, logAggregationContext));
                }
            }
            this.context.getNMStateStore().storeContainer(containerId, containerTokenIdentifier.getVersion(), request);
            dispatcher.getEventHandler().handle(new ApplicationContainerInitEvent(container));
            this.context.getContainerTokenSecretManager().startContainerSuccessful(containerTokenIdentifier);
            NMAuditLogger.logSuccess(user, AuditConstants.START_CONTAINER, "ContainerManageImpl", applicationID, containerId);
            // TODO launchedContainer misplaced -> doesn't necessarily mean a container
            // launch. A finished Application will not launch containers.
            metrics.launchedContainer();
            metrics.allocateContainer(containerTokenIdentifier.getResource());
        } else {
            throw new YarnException("Container start failed as the NodeManager is " + "in the process of shutting down");
        }
    } finally {
        this.readLock.unlock();
    }
}
Also used : ApplicationImpl(org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationImpl) ApplicationContainerInitEvent(org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationContainerInitEvent) ApplicationInitEvent(org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationInitEvent) ByteString(com.google.protobuf.ByteString) ContainerLaunchContext(org.apache.hadoop.yarn.api.records.ContainerLaunchContext) FlowContext(org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationImpl.FlowContext) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) Container(org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container) ContainerId(org.apache.hadoop.yarn.api.records.ContainerId) ApplicationAccessType(org.apache.hadoop.yarn.api.records.ApplicationAccessType) ContainerImpl(org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerImpl) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) Application(org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application) Credentials(org.apache.hadoop.security.Credentials) LogAggregationContext(org.apache.hadoop.yarn.api.records.LogAggregationContext)

Example 52 with Credentials

use of org.apache.hadoop.security.Credentials in project hadoop by apache.

the class RMWebServices method createCredentials.

/**
   * Generate a Credentials object from the information in the CredentialsInfo
   * object.
   * 
   * @param credentials
   *          the CredentialsInfo provided by the user.
   * @return
   */
private Credentials createCredentials(CredentialsInfo credentials) {
    Credentials ret = new Credentials();
    try {
        for (Map.Entry<String, String> entry : credentials.getTokens().entrySet()) {
            Text alias = new Text(entry.getKey());
            Token<TokenIdentifier> token = new Token<TokenIdentifier>();
            token.decodeFromUrlString(entry.getValue());
            ret.addToken(alias, token);
        }
        for (Map.Entry<String, String> entry : credentials.getSecrets().entrySet()) {
            Text alias = new Text(entry.getKey());
            Base64 decoder = new Base64(0, null, true);
            byte[] secret = decoder.decode(entry.getValue());
            ret.addSecretKey(alias, secret);
        }
    } catch (IOException ie) {
        throw new BadRequestException("Could not parse credentials data; exception message = " + ie.getMessage());
    }
    return ret;
}
Also used : TokenIdentifier(org.apache.hadoop.security.token.TokenIdentifier) RMDelegationTokenIdentifier(org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier) Base64(org.apache.commons.codec.binary.Base64) BadRequestException(org.apache.hadoop.yarn.webapp.BadRequestException) Text(org.apache.hadoop.io.Text) InvalidToken(org.apache.hadoop.security.token.SecretManager.InvalidToken) Token(org.apache.hadoop.security.token.Token) DelegationToken(org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.DelegationToken) IOException(java.io.IOException) Map(java.util.Map) HashMap(java.util.HashMap) ConcurrentMap(java.util.concurrent.ConcurrentMap) Credentials(org.apache.hadoop.security.Credentials)

Example 53 with Credentials

use of org.apache.hadoop.security.Credentials in project hadoop by apache.

the class TestYARNRunner method testGetHSDelegationToken.

@Test(timeout = 20000)
public void testGetHSDelegationToken() throws Exception {
    try {
        Configuration conf = new Configuration();
        // Setup mock service
        InetSocketAddress mockRmAddress = new InetSocketAddress("localhost", 4444);
        Text rmTokenSevice = SecurityUtil.buildTokenService(mockRmAddress);
        InetSocketAddress mockHsAddress = new InetSocketAddress("localhost", 9200);
        Text hsTokenSevice = SecurityUtil.buildTokenService(mockHsAddress);
        // Setup mock rm token
        RMDelegationTokenIdentifier tokenIdentifier = new RMDelegationTokenIdentifier(new Text("owner"), new Text("renewer"), new Text("real"));
        Token<RMDelegationTokenIdentifier> token = new Token<RMDelegationTokenIdentifier>(new byte[0], new byte[0], tokenIdentifier.getKind(), rmTokenSevice);
        token.setKind(RMDelegationTokenIdentifier.KIND_NAME);
        // Setup mock history token
        org.apache.hadoop.yarn.api.records.Token historyToken = org.apache.hadoop.yarn.api.records.Token.newInstance(new byte[0], MRDelegationTokenIdentifier.KIND_NAME.toString(), new byte[0], hsTokenSevice.toString());
        GetDelegationTokenResponse getDtResponse = Records.newRecord(GetDelegationTokenResponse.class);
        getDtResponse.setDelegationToken(historyToken);
        // mock services
        MRClientProtocol mockHsProxy = mock(MRClientProtocol.class);
        doReturn(mockHsAddress).when(mockHsProxy).getConnectAddress();
        doReturn(getDtResponse).when(mockHsProxy).getDelegationToken(any(GetDelegationTokenRequest.class));
        ResourceMgrDelegate rmDelegate = mock(ResourceMgrDelegate.class);
        doReturn(rmTokenSevice).when(rmDelegate).getRMDelegationTokenService();
        ClientCache clientCache = mock(ClientCache.class);
        doReturn(mockHsProxy).when(clientCache).getInitializedHSProxy();
        Credentials creds = new Credentials();
        YARNRunner yarnRunner = new YARNRunner(conf, rmDelegate, clientCache);
        // No HS token if no RM token
        yarnRunner.addHistoryToken(creds);
        verify(mockHsProxy, times(0)).getDelegationToken(any(GetDelegationTokenRequest.class));
        // No HS token if RM token, but secirity disabled.
        creds.addToken(new Text("rmdt"), token);
        yarnRunner.addHistoryToken(creds);
        verify(mockHsProxy, times(0)).getDelegationToken(any(GetDelegationTokenRequest.class));
        conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
        UserGroupInformation.setConfiguration(conf);
        creds = new Credentials();
        // No HS token if no RM token, security enabled
        yarnRunner.addHistoryToken(creds);
        verify(mockHsProxy, times(0)).getDelegationToken(any(GetDelegationTokenRequest.class));
        // HS token if RM token present, security enabled
        creds.addToken(new Text("rmdt"), token);
        yarnRunner.addHistoryToken(creds);
        verify(mockHsProxy, times(1)).getDelegationToken(any(GetDelegationTokenRequest.class));
        // No additional call to get HS token if RM and HS token present
        yarnRunner.addHistoryToken(creds);
        verify(mockHsProxy, times(1)).getDelegationToken(any(GetDelegationTokenRequest.class));
    } finally {
        // Back to defaults.
        UserGroupInformation.setConfiguration(new Configuration());
    }
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) GetDelegationTokenResponse(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDelegationTokenResponse) InetSocketAddress(java.net.InetSocketAddress) Text(org.apache.hadoop.io.Text) Token(org.apache.hadoop.security.token.Token) RMDelegationTokenIdentifier(org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier) MRClientProtocol(org.apache.hadoop.mapreduce.v2.api.MRClientProtocol) GetDelegationTokenRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDelegationTokenRequest) Credentials(org.apache.hadoop.security.Credentials) Test(org.junit.Test)

Example 54 with Credentials

use of org.apache.hadoop.security.Credentials in project hadoop by apache.

the class TestYarnClient method testAutomaticTimelineDelegationTokenLoading.

@Test
public void testAutomaticTimelineDelegationTokenLoading() throws Exception {
    Configuration conf = new YarnConfiguration();
    conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
    SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS, conf);
    TimelineDelegationTokenIdentifier timelineDT = new TimelineDelegationTokenIdentifier();
    final Token<TimelineDelegationTokenIdentifier> dToken = new Token<TimelineDelegationTokenIdentifier>(timelineDT.getBytes(), new byte[0], timelineDT.getKind(), new Text());
    // create a mock client
    YarnClientImpl client = spy(new YarnClientImpl() {

        @Override
        TimelineClient createTimelineClient() throws IOException, YarnException {
            timelineClient = mock(TimelineClient.class);
            when(timelineClient.getDelegationToken(any(String.class))).thenReturn(dToken);
            return timelineClient;
        }

        @Override
        protected void serviceStart() throws Exception {
            rmClient = mock(ApplicationClientProtocol.class);
        }

        @Override
        protected void serviceStop() throws Exception {
        }

        @Override
        public ApplicationReport getApplicationReport(ApplicationId appId) {
            ApplicationReport report = mock(ApplicationReport.class);
            when(report.getYarnApplicationState()).thenReturn(YarnApplicationState.RUNNING);
            return report;
        }

        @Override
        public boolean isSecurityEnabled() {
            return true;
        }
    });
    client.init(conf);
    client.start();
    try {
        // when i == 1, timeline DT doesn't exist, need to get one more
        for (int i = 0; i < 2; ++i) {
            ApplicationSubmissionContext context = mock(ApplicationSubmissionContext.class);
            ApplicationId applicationId = ApplicationId.newInstance(0, i + 1);
            when(context.getApplicationId()).thenReturn(applicationId);
            DataOutputBuffer dob = new DataOutputBuffer();
            Credentials credentials = new Credentials();
            if (i == 0) {
                credentials.addToken(client.timelineService, dToken);
            }
            credentials.writeTokenStorageToStream(dob);
            ByteBuffer tokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
            ContainerLaunchContext clc = ContainerLaunchContext.newInstance(null, null, null, null, tokens, null);
            when(context.getAMContainerSpec()).thenReturn(clc);
            client.submitApplication(context);
            if (i == 0) {
                // GetTimelineDelegationToken shouldn't be called
                verify(client, never()).getTimelineDelegationToken();
            }
            // In either way, token should be there
            credentials = new Credentials();
            DataInputByteBuffer dibb = new DataInputByteBuffer();
            tokens = clc.getTokens();
            if (tokens != null) {
                dibb.reset(tokens);
                credentials.readTokenStorageStream(dibb);
                tokens.rewind();
            }
            Collection<Token<? extends TokenIdentifier>> dTokens = credentials.getAllTokens();
            Assert.assertEquals(1, dTokens.size());
            Assert.assertEquals(dToken, dTokens.iterator().next());
        }
    } finally {
        client.stop();
    }
}
Also used : TokenIdentifier(org.apache.hadoop.security.token.TokenIdentifier) TimelineDelegationTokenIdentifier(org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier) CapacitySchedulerConfiguration(org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerConfiguration) Configuration(org.apache.hadoop.conf.Configuration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) DataInputByteBuffer(org.apache.hadoop.io.DataInputByteBuffer) TimelineDelegationTokenIdentifier(org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier) Token(org.apache.hadoop.security.token.Token) Text(org.apache.hadoop.io.Text) IOException(java.io.IOException) ContainerLaunchContext(org.apache.hadoop.yarn.api.records.ContainerLaunchContext) ByteBuffer(java.nio.ByteBuffer) DataInputByteBuffer(org.apache.hadoop.io.DataInputByteBuffer) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) ApplicationNotFoundException(org.apache.hadoop.yarn.exceptions.ApplicationNotFoundException) ApplicationIdNotProvidedException(org.apache.hadoop.yarn.exceptions.ApplicationIdNotProvidedException) IOException(java.io.IOException) ContainerNotFoundException(org.apache.hadoop.yarn.exceptions.ContainerNotFoundException) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) ApplicationReport(org.apache.hadoop.yarn.api.records.ApplicationReport) TimelineClient(org.apache.hadoop.yarn.client.api.TimelineClient) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) ApplicationSubmissionContext(org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) Credentials(org.apache.hadoop.security.Credentials) Test(org.junit.Test)

Example 55 with Credentials

use of org.apache.hadoop.security.Credentials in project hadoop by apache.

the class TestResourceLocalizationService method getMockContainer.

private static Container getMockContainer(ApplicationId appId, int id, String user) throws IOException {
    Container c = mock(Container.class);
    ApplicationAttemptId appAttemptId = BuilderUtils.newApplicationAttemptId(appId, 1);
    ContainerId cId = BuilderUtils.newContainerId(appAttemptId, id);
    when(c.getUser()).thenReturn(user);
    when(c.getContainerId()).thenReturn(cId);
    Credentials creds = new Credentials();
    Token<? extends TokenIdentifier> tk = getToken(id);
    String fingerprint = ResourceLocalizationService.buildTokenFingerprint(tk);
    assertNotNull(fingerprint);
    assertTrue("Expected token fingerprint of 10 hex bytes delimited by space.", fingerprint.matches("^(([0-9a-f]){2} ){9}([0-9a-f]){2}$"));
    creds.addToken(new Text("tok" + id), tk);
    when(c.getCredentials()).thenReturn(creds);
    when(c.toString()).thenReturn(cId.toString());
    when(c.getContainerState()).thenReturn(ContainerState.LOCALIZING);
    return c;
}
Also used : Container(org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container) ContainerId(org.apache.hadoop.yarn.api.records.ContainerId) Text(org.apache.hadoop.io.Text) ApplicationAttemptId(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) Credentials(org.apache.hadoop.security.Credentials)

Aggregations

Credentials (org.apache.hadoop.security.Credentials)238 Test (org.junit.Test)105 Token (org.apache.hadoop.security.token.Token)76 Text (org.apache.hadoop.io.Text)64 IOException (java.io.IOException)63 Path (org.apache.hadoop.fs.Path)50 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)48 ByteBuffer (java.nio.ByteBuffer)42 Configuration (org.apache.hadoop.conf.Configuration)41 DataOutputBuffer (org.apache.hadoop.io.DataOutputBuffer)37 HashMap (java.util.HashMap)34 InetSocketAddress (java.net.InetSocketAddress)30 UserGroupInformation (org.apache.hadoop.security.UserGroupInformation)30 ContainerId (org.apache.hadoop.yarn.api.records.ContainerId)28 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)27 File (java.io.File)25 ApplicationAttemptId (org.apache.hadoop.yarn.api.records.ApplicationAttemptId)24 ContainerLaunchContext (org.apache.hadoop.yarn.api.records.ContainerLaunchContext)23 JobConf (org.apache.hadoop.mapred.JobConf)20 LocalResource (org.apache.hadoop.yarn.api.records.LocalResource)19